diff --git a/analysis/data_stat.py b/analysis/data_stat.py index 04a7d87..c1ee542 100644 --- a/analysis/data_stat.py +++ b/analysis/data_stat.py @@ -19,7 +19,6 @@ class Stat(Enum): MATRIX_ROWS = 'matrix rows' MATRIX_SIZE = 'matrix size' MATRIX_NNZ = 'matrix nnz' - MATRIX_DENSITY_GROUP = 'matrix density group' MATRIX_DENSITY = 'matrix density' #POWER_BEFORE = 'power before' diff --git a/pytorch/batch.py b/pytorch/batch.py index 187f1bd..60d9a63 100644 --- a/pytorch/batch.py +++ b/pytorch/batch.py @@ -132,15 +132,20 @@ for i, parameter in parameter_list: synthetic_size = args.synthetic_size synthetic_density = args.synthetic_density - output_filename_list = [ - args.cpu.name.lower(), + output_filename_list = [args.cpu.name.lower()] + if args.cores is not None: + output_filename_list += [str(args.cores)] + else: + output_filename_list += ['max'] + output_filename_list += [ + args.format.name.lower(), str(args.min_time_s), str(args.baseline_time_s), str(args.baseline_delay_s)] if args.matrix_type == MatrixType.SUITESPARSE: output_filename_list += [os.path.splitext(os.path.basename(parameter))[0]] elif args.matrix_type == MatrixType.SYNTHETIC: - output_filename_list += [str(parameter[0]), str(parameter[1])] + output_filename_list += ['synthetic', str(parameter[0]), str(parameter[1])] output_filename = '_'.join(output_filename_list) diff --git a/pytorch/output_1core_after_test/altra_10_10_10_100000_0.0001.json b/pytorch/output_1core_after_test/altra_10_10_10_100000_0.0001.json deleted file mode 100644 index 34ff84b..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_100000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 4372, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 999958, "MATRIX_DENSITY": 9.99958e-05, "TIME_S": 10.330792903900146, "TIME_S_1KI": 2.362944397049439, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 381.8866044712067, "W": 35.91605080170059, "J_1KI": 87.34826268783318, "W_1KI": 8.215016194350547, "W_D": 17.618050801700594, "J_D": 187.32843527841572, "W_D_1KI": 4.029746294990987, "J_D_1KI": 0.9217169018735102} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_100000_0.0001.output b/pytorch/output_1core_after_test/altra_10_10_10_100000_0.0001.output deleted file mode 100644 index 3e08738..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_100000_0.0001.output +++ /dev/null @@ -1,68 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 100000 -sd 0.0001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 999945, "MATRIX_DENSITY": 9.99945e-05, "TIME_S": 2.401212692260742} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 18, ..., 999923, 999933, - 999945]), - col_indices=tensor([ 2985, 7299, 36484, ..., 77100, 85631, 92891]), - values=tensor([ 0.2415, 0.2506, -1.0512, ..., 0.5862, -1.2492, - -0.0903]), size=(100000, 100000), nnz=999945, - layout=torch.sparse_csr) -tensor([0.5691, 0.2840, 0.2992, ..., 0.3981, 0.5874, 0.9189]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 999945 -Density: 9.99945e-05 -Time: 2.401212692260742 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 4372 -ss 100000 -sd 0.0001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 999958, "MATRIX_DENSITY": 9.99958e-05, "TIME_S": 10.330792903900146} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 10, 15, ..., 999940, 999947, - 999958]), - col_indices=tensor([ 6364, 15058, 52155, ..., 41882, 75278, 93727]), - values=tensor([ 1.1379, 2.3847, 1.1576, ..., 0.9163, 0.7641, - -1.0168]), size=(100000, 100000), nnz=999958, - layout=torch.sparse_csr) -tensor([0.2064, 0.7933, 0.3767, ..., 0.4884, 0.5023, 0.3792]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 999958 -Density: 9.99958e-05 -Time: 10.330792903900146 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 10, 15, ..., 999940, 999947, - 999958]), - col_indices=tensor([ 6364, 15058, 52155, ..., 41882, 75278, 93727]), - values=tensor([ 1.1379, 2.3847, 1.1576, ..., 0.9163, 0.7641, - -1.0168]), size=(100000, 100000), nnz=999958, - layout=torch.sparse_csr) -tensor([0.2064, 0.7933, 0.3767, ..., 0.4884, 0.5023, 0.3792]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 999958 -Density: 9.99958e-05 -Time: 10.330792903900146 seconds - -[20.52, 20.36, 20.28, 20.32, 20.32, 20.28, 20.2, 20.44, 20.4, 20.44] -[20.44, 20.28, 20.8, 21.68, 24.08, 26.36, 29.2, 30.64, 32.6, 32.12, 32.12, 32.44, 32.6, 32.44] -10.632755994796753 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 4372, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 0.0001, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 999958, 'MATRIX_DENSITY': 9.99958e-05, 'TIME_S': 10.330792903900146, 'TIME_S_1KI': 2.362944397049439, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 381.8866044712067, 'W': 35.91605080170059} -[20.52, 20.36, 20.28, 20.32, 20.32, 20.28, 20.2, 20.44, 20.4, 20.44, 20.28, 20.24, 19.96, 19.96, 19.96, 20.0, 20.48, 20.92, 20.88, 20.68] -365.96 -18.298 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 4372, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 0.0001, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 999958, 'MATRIX_DENSITY': 9.99958e-05, 'TIME_S': 10.330792903900146, 'TIME_S_1KI': 2.362944397049439, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 381.8866044712067, 'W': 35.91605080170059, 'J_1KI': 87.34826268783318, 'W_1KI': 8.215016194350547, 'W_D': 17.618050801700594, 'J_D': 187.32843527841572, 'W_D_1KI': 4.029746294990987, 'J_D_1KI': 0.9217169018735102} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_100000_1e-05.json b/pytorch/output_1core_after_test/altra_10_10_10_100000_1e-05.json deleted file mode 100644 index d37f2c8..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_100000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 13545, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 99998, "MATRIX_DENSITY": 9.9998e-06, "TIME_S": 10.47447156906128, "TIME_S_1KI": 0.7733090859402938, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 345.2238781929017, "W": 32.996623304417554, "J_1KI": 25.487181852558265, "W_1KI": 2.4360740719392804, "W_D": 14.60462330441755, "J_D": 152.79941375160223, "W_D_1KI": 1.078229848978778, "J_D_1KI": 0.07960353259348675} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_100000_1e-05.output b/pytorch/output_1core_after_test/altra_10_10_10_100000_1e-05.output deleted file mode 100644 index 4ed2e76..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_100000_1e-05.output +++ /dev/null @@ -1,66 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 100000 -sd 1e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.7751424312591553} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 2, ..., 99996, 99999, - 100000]), - col_indices=tensor([44500, 49971, 56483, ..., 66134, 68074, 1637]), - values=tensor([ 1.5203, 1.7392, 0.4724, ..., 0.1484, -0.5457, - 0.0441]), size=(100000, 100000), nnz=100000, - layout=torch.sparse_csr) -tensor([0.4796, 0.4726, 0.4035, ..., 0.0030, 0.1184, 0.1782]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 100000 -Density: 1e-05 -Time: 0.7751424312591553 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 13545 -ss 100000 -sd 1e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 99998, "MATRIX_DENSITY": 9.9998e-06, "TIME_S": 10.47447156906128} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 99996, 99997, 99998]), - col_indices=tensor([91921, 45205, 73439, ..., 30117, 55221, 9400]), - values=tensor([ 1.2826, -0.8828, -0.6837, ..., -2.0824, -1.6052, - 1.5294]), size=(100000, 100000), nnz=99998, - layout=torch.sparse_csr) -tensor([0.0960, 0.3139, 0.1449, ..., 0.1558, 0.0708, 0.3546]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 99998 -Density: 9.9998e-06 -Time: 10.47447156906128 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 99996, 99997, 99998]), - col_indices=tensor([91921, 45205, 73439, ..., 30117, 55221, 9400]), - values=tensor([ 1.2826, -0.8828, -0.6837, ..., -2.0824, -1.6052, - 1.5294]), size=(100000, 100000), nnz=99998, - layout=torch.sparse_csr) -tensor([0.0960, 0.3139, 0.1449, ..., 0.1558, 0.0708, 0.3546]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 99998 -Density: 9.9998e-06 -Time: 10.47447156906128 seconds - -[20.76, 20.72, 20.56, 20.52, 20.12, 19.92, 19.92, 20.0, 20.12, 19.92] -[20.24, 20.32, 20.68, 22.96, 25.36, 27.6, 30.2, 31.48, 31.68, 31.76, 31.64, 31.32, 31.28] -10.462400197982788 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 13545, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 1e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 99998, 'MATRIX_DENSITY': 9.9998e-06, 'TIME_S': 10.47447156906128, 'TIME_S_1KI': 0.7733090859402938, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 345.2238781929017, 'W': 32.996623304417554} -[20.76, 20.72, 20.56, 20.52, 20.12, 19.92, 19.92, 20.0, 20.12, 19.92, 20.4, 20.52, 20.52, 20.84, 21.04, 20.76, 20.72, 20.56, 20.28, 20.36] -367.84000000000003 -18.392000000000003 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 13545, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 1e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 99998, 'MATRIX_DENSITY': 9.9998e-06, 'TIME_S': 10.47447156906128, 'TIME_S_1KI': 0.7733090859402938, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 345.2238781929017, 'W': 32.996623304417554, 'J_1KI': 25.487181852558265, 'W_1KI': 2.4360740719392804, 'W_D': 14.60462330441755, 'J_D': 152.79941375160223, 'W_D_1KI': 1.078229848978778, 'J_D_1KI': 0.07960353259348675} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_100000_2e-05.json b/pytorch/output_1core_after_test/altra_10_10_10_100000_2e-05.json deleted file mode 100644 index 0a8dce4..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_100000_2e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 10495, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 200000, "MATRIX_DENSITY": 2e-05, "TIME_S": 10.752148389816284, "TIME_S_1KI": 1.0245019904541481, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 398.74920181274416, "W": 36.55315226044744, "J_1KI": 37.9942069378508, "W_1KI": 3.482911125340394, "W_D": 18.25915226044744, "J_D": 199.18452826595308, "W_D_1KI": 1.739795355926388, "J_D_1KI": 0.16577373567664488} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_100000_2e-05.output b/pytorch/output_1core_after_test/altra_10_10_10_100000_2e-05.output deleted file mode 100644 index b0c43b1..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_100000_2e-05.output +++ /dev/null @@ -1,68 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 100000 -sd 2e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 199996, "MATRIX_DENSITY": 1.99996e-05, "TIME_S": 1.0004651546478271} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 5, ..., 199988, 199992, - 199996]), - col_indices=tensor([39669, 76958, 86447, ..., 67341, 83508, 90452]), - values=tensor([-1.3977, 1.0356, -0.5900, ..., 0.8207, -1.1645, - 0.3989]), size=(100000, 100000), nnz=199996, - layout=torch.sparse_csr) -tensor([0.9430, 0.8048, 0.8924, ..., 0.6826, 0.2927, 0.5723]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 199996 -Density: 1.99996e-05 -Time: 1.0004651546478271 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 10495 -ss 100000 -sd 2e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 200000, "MATRIX_DENSITY": 2e-05, "TIME_S": 10.752148389816284} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 4, ..., 199995, 199997, - 200000]), - col_indices=tensor([33783, 37586, 62221, ..., 65115, 69602, 99771]), - values=tensor([ 0.3562, 0.3055, 0.5875, ..., -0.0843, 2.8119, - 0.1610]), size=(100000, 100000), nnz=200000, - layout=torch.sparse_csr) -tensor([0.8561, 0.5376, 0.4377, ..., 0.1840, 0.7093, 0.8920]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 200000 -Density: 2e-05 -Time: 10.752148389816284 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 4, ..., 199995, 199997, - 200000]), - col_indices=tensor([33783, 37586, 62221, ..., 65115, 69602, 99771]), - values=tensor([ 0.3562, 0.3055, 0.5875, ..., -0.0843, 2.8119, - 0.1610]), size=(100000, 100000), nnz=200000, - layout=torch.sparse_csr) -tensor([0.8561, 0.5376, 0.4377, ..., 0.1840, 0.7093, 0.8920]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 200000 -Density: 2e-05 -Time: 10.752148389816284 seconds - -[20.0, 20.12, 20.0, 19.88, 20.2, 20.36, 20.6, 20.76, 20.64, 20.44] -[20.4, 20.48, 23.72, 25.72, 25.72, 27.88, 30.08, 32.52, 30.44, 31.72, 31.72, 32.04, 31.88, 31.68] -10.908750057220459 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 10495, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 2e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 200000, 'MATRIX_DENSITY': 2e-05, 'TIME_S': 10.752148389816284, 'TIME_S_1KI': 1.0245019904541481, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 398.74920181274416, 'W': 36.55315226044744} -[20.0, 20.12, 20.0, 19.88, 20.2, 20.36, 20.6, 20.76, 20.64, 20.44, 21.0, 20.64, 20.4, 20.16, 20.2, 20.2, 20.2, 20.32, 20.32, 20.32] -365.88 -18.294 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 10495, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 2e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 200000, 'MATRIX_DENSITY': 2e-05, 'TIME_S': 10.752148389816284, 'TIME_S_1KI': 1.0245019904541481, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 398.74920181274416, 'W': 36.55315226044744, 'J_1KI': 37.9942069378508, 'W_1KI': 3.482911125340394, 'W_D': 18.25915226044744, 'J_D': 199.18452826595308, 'W_D_1KI': 1.739795355926388, 'J_D_1KI': 0.16577373567664488} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_100000_5e-05.json b/pytorch/output_1core_after_test/altra_10_10_10_100000_5e-05.json deleted file mode 100644 index 5b242d1..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_100000_5e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 6654, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 499994, "MATRIX_DENSITY": 4.99994e-05, "TIME_S": 10.683140993118286, "TIME_S_1KI": 1.6055216400839023, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 366.1741095733642, "W": 37.080439049513444, "J_1KI": 55.03067471796877, "W_1KI": 5.572653899836706, "W_D": 18.798439049513444, "J_D": 185.63700583839412, "W_D_1KI": 2.825133611288465, "J_D_1KI": 0.4245767374945093} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_100000_5e-05.output b/pytorch/output_1core_after_test/altra_10_10_10_100000_5e-05.output deleted file mode 100644 index a12622c..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_100000_5e-05.output +++ /dev/null @@ -1,68 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 100000 -sd 5e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 499988, "MATRIX_DENSITY": 4.99988e-05, "TIME_S": 1.5778212547302246} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 10, ..., 499983, 499985, - 499988]), - col_indices=tensor([ 1371, 28809, 70668, ..., 42030, 54936, 56770]), - values=tensor([ 0.4333, 0.4225, 0.5901, ..., -0.2567, 0.8071, - -0.4001]), size=(100000, 100000), nnz=499988, - layout=torch.sparse_csr) -tensor([0.8725, 0.4955, 0.3045, ..., 0.0592, 0.4078, 0.2144]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 499988 -Density: 4.99988e-05 -Time: 1.5778212547302246 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 6654 -ss 100000 -sd 5e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 499994, "MATRIX_DENSITY": 4.99994e-05, "TIME_S": 10.683140993118286} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 10, ..., 499983, 499991, - 499994]), - col_indices=tensor([11812, 14754, 24587, ..., 4989, 19562, 26481]), - values=tensor([-0.5266, -0.2099, 0.6678, ..., -1.2539, -0.8739, - -0.3506]), size=(100000, 100000), nnz=499994, - layout=torch.sparse_csr) -tensor([0.7326, 0.9445, 0.7161, ..., 0.6054, 0.1400, 0.0492]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 499994 -Density: 4.99994e-05 -Time: 10.683140993118286 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 10, ..., 499983, 499991, - 499994]), - col_indices=tensor([11812, 14754, 24587, ..., 4989, 19562, 26481]), - values=tensor([-0.5266, -0.2099, 0.6678, ..., -1.2539, -0.8739, - -0.3506]), size=(100000, 100000), nnz=499994, - layout=torch.sparse_csr) -tensor([0.7326, 0.9445, 0.7161, ..., 0.6054, 0.1400, 0.0492]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 499994 -Density: 4.99994e-05 -Time: 10.683140993118286 seconds - -[20.12, 20.4, 20.36, 20.4, 20.52, 20.28, 20.08, 20.16, 20.52, 20.56] -[20.64, 20.68, 20.68, 23.88, 25.56, 28.76, 31.2, 33.8, 31.48, 32.0, 31.92, 32.0, 31.92] -9.875128746032715 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 6654, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 5e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 499994, 'MATRIX_DENSITY': 4.99994e-05, 'TIME_S': 10.683140993118286, 'TIME_S_1KI': 1.6055216400839023, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 366.1741095733642, 'W': 37.080439049513444} -[20.12, 20.4, 20.36, 20.4, 20.52, 20.28, 20.08, 20.16, 20.52, 20.56, 20.16, 20.12, 20.08, 20.08, 20.36, 20.4, 20.56, 20.56, 20.28, 20.12] -365.64 -18.282 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 6654, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 5e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 499994, 'MATRIX_DENSITY': 4.99994e-05, 'TIME_S': 10.683140993118286, 'TIME_S_1KI': 1.6055216400839023, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 366.1741095733642, 'W': 37.080439049513444, 'J_1KI': 55.03067471796877, 'W_1KI': 5.572653899836706, 'W_D': 18.798439049513444, 'J_D': 185.63700583839412, 'W_D_1KI': 2.825133611288465, 'J_D_1KI': 0.4245767374945093} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_100000_8e-05.json b/pytorch/output_1core_after_test/altra_10_10_10_100000_8e-05.json deleted file mode 100644 index 237a0ac..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_100000_8e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 5005, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 799969, "MATRIX_DENSITY": 7.99969e-05, "TIME_S": 10.528297424316406, "TIME_S_1KI": 2.103555928934347, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 353.08805488586427, "W": 33.32030349996928, "J_1KI": 70.5470639132596, "W_1KI": 6.657403296697158, "W_D": 14.945303499969278, "J_D": 158.3721511566639, "W_D_1KI": 2.986074625368487, "J_D_1KI": 0.5966183067669305} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_100000_8e-05.output b/pytorch/output_1core_after_test/altra_10_10_10_100000_8e-05.output deleted file mode 100644 index 21f7e14..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_100000_8e-05.output +++ /dev/null @@ -1,68 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 100000 -sd 8e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 799967, "MATRIX_DENSITY": 7.99967e-05, "TIME_S": 2.0975828170776367} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 13, ..., 799955, 799960, - 799967]), - col_indices=tensor([ 6629, 8372, 29001, ..., 55409, 75475, 87705]), - values=tensor([ 0.3983, 0.9368, 0.8306, ..., -2.2845, 0.6609, - -0.9219]), size=(100000, 100000), nnz=799967, - layout=torch.sparse_csr) -tensor([0.4006, 0.2457, 0.6854, ..., 0.9449, 0.7766, 0.5729]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 799967 -Density: 7.99967e-05 -Time: 2.0975828170776367 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 5005 -ss 100000 -sd 8e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 799969, "MATRIX_DENSITY": 7.99969e-05, "TIME_S": 10.528297424316406} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 14, ..., 799947, 799960, - 799969]), - col_indices=tensor([13025, 14589, 27413, ..., 85258, 89285, 92694]), - values=tensor([-0.0092, 0.8106, 0.5188, ..., -1.1562, 0.5281, - 0.2289]), size=(100000, 100000), nnz=799969, - layout=torch.sparse_csr) -tensor([0.8836, 0.3169, 0.9227, ..., 0.6017, 0.3480, 0.8748]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 799969 -Density: 7.99969e-05 -Time: 10.528297424316406 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 14, ..., 799947, 799960, - 799969]), - col_indices=tensor([13025, 14589, 27413, ..., 85258, 89285, 92694]), - values=tensor([-0.0092, 0.8106, 0.5188, ..., -1.1562, 0.5281, - 0.2289]), size=(100000, 100000), nnz=799969, - layout=torch.sparse_csr) -tensor([0.8836, 0.3169, 0.9227, ..., 0.6017, 0.3480, 0.8748]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 799969 -Density: 7.99969e-05 -Time: 10.528297424316406 seconds - -[20.08, 20.04, 19.96, 20.04, 20.04, 20.24, 20.48, 20.56, 20.48, 20.36] -[20.0, 20.12, 20.84, 22.48, 25.32, 27.88, 30.36, 31.4, 32.84, 32.12, 32.12, 32.16, 32.32] -10.596783876419067 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 5005, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 8e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 799969, 'MATRIX_DENSITY': 7.99969e-05, 'TIME_S': 10.528297424316406, 'TIME_S_1KI': 2.103555928934347, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 353.08805488586427, 'W': 33.32030349996928} -[20.08, 20.04, 19.96, 20.04, 20.04, 20.24, 20.48, 20.56, 20.48, 20.36, 20.48, 20.6, 20.52, 20.28, 20.28, 20.44, 20.68, 20.96, 20.92, 21.04] -367.5 -18.375 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 5005, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 8e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 799969, 'MATRIX_DENSITY': 7.99969e-05, 'TIME_S': 10.528297424316406, 'TIME_S_1KI': 2.103555928934347, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 353.08805488586427, 'W': 33.32030349996928, 'J_1KI': 70.5470639132596, 'W_1KI': 6.657403296697158, 'W_D': 14.945303499969278, 'J_D': 158.3721511566639, 'W_D_1KI': 2.986074625368487, 'J_D_1KI': 0.5966183067669305} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_10000_0.0001.json b/pytorch/output_1core_after_test/altra_10_10_10_10000_0.0001.json deleted file mode 100644 index e87cd21..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_10000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 176497, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.582123756408691, "TIME_S_1KI": 0.05995639447927552, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 323.9779005908966, "W": 29.969251252049236, "J_1KI": 1.83560004187548, "W_1KI": 0.16980034364351368, "W_D": 11.822251252049234, "J_D": 127.80259702467916, "W_D_1KI": 0.06698273201272109, "J_D_1KI": 0.0003795120144405916} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_10000_0.0001.output b/pytorch/output_1core_after_test/altra_10_10_10_10000_0.0001.output deleted file mode 100644 index f59d798..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_10000_0.0001.output +++ /dev/null @@ -1,85 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 10000 -sd 0.0001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 9999, "MATRIX_DENSITY": 9.999e-05, "TIME_S": 0.06440186500549316} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 9999, 9999, 9999]), - col_indices=tensor([5106, 3897, 3155, ..., 1583, 3431, 5555]), - values=tensor([ 1.3508, -1.1736, 0.4296, ..., -0.8458, 0.0925, - 0.1832]), size=(10000, 10000), nnz=9999, - layout=torch.sparse_csr) -tensor([0.5935, 0.4331, 0.3309, ..., 0.4577, 0.4204, 0.6600]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 9999 -Density: 9.999e-05 -Time: 0.06440186500549316 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 163038 -ss 10000 -sd 0.0001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 9999, "MATRIX_DENSITY": 9.999e-05, "TIME_S": 9.699290990829468} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 9998, 9998, 9999]), - col_indices=tensor([ 552, 5534, 9404, ..., 8672, 9099, 1672]), - values=tensor([-0.4570, 0.0714, -1.0309, ..., 0.9768, -0.9088, - 0.5389]), size=(10000, 10000), nnz=9999, - layout=torch.sparse_csr) -tensor([0.9350, 0.7973, 0.4526, ..., 0.7485, 0.8481, 0.0598]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 9999 -Density: 9.999e-05 -Time: 9.699290990829468 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 176497 -ss 10000 -sd 0.0001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.582123756408691} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 4, ..., 9996, 9997, 10000]), - col_indices=tensor([7714, 1870, 5845, ..., 759, 3572, 7308]), - values=tensor([-1.0266, 0.5680, -0.8233, ..., -0.9435, -0.5643, - 1.5314]), size=(10000, 10000), nnz=10000, - layout=torch.sparse_csr) -tensor([0.6158, 0.7644, 0.3713, ..., 0.4226, 0.3057, 0.7915]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 10000 -Density: 0.0001 -Time: 10.582123756408691 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 4, ..., 9996, 9997, 10000]), - col_indices=tensor([7714, 1870, 5845, ..., 759, 3572, 7308]), - values=tensor([-1.0266, 0.5680, -0.8233, ..., -0.9435, -0.5643, - 1.5314]), size=(10000, 10000), nnz=10000, - layout=torch.sparse_csr) -tensor([0.6158, 0.7644, 0.3713, ..., 0.4226, 0.3057, 0.7915]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 10000 -Density: 0.0001 -Time: 10.582123756408691 seconds - -[20.32, 20.24, 20.16, 20.2, 19.92, 19.96, 20.48, 20.36, 20.36, 20.4] -[20.48, 20.52, 20.28, 24.08, 24.84, 26.56, 26.92, 24.56, 24.12, 22.96, 23.08, 22.88, 22.8, 23.0] -10.810343503952026 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 176497, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 0.0001, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.582123756408691, 'TIME_S_1KI': 0.05995639447927552, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 323.9779005908966, 'W': 29.969251252049236} -[20.32, 20.24, 20.16, 20.2, 19.92, 19.96, 20.48, 20.36, 20.36, 20.4, 20.24, 20.16, 19.92, 19.76, 19.76, 20.0, 20.12, 20.48, 20.44, 20.28] -362.94000000000005 -18.147000000000002 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 176497, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 0.0001, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.582123756408691, 'TIME_S_1KI': 0.05995639447927552, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 323.9779005908966, 'W': 29.969251252049236, 'J_1KI': 1.83560004187548, 'W_1KI': 0.16980034364351368, 'W_D': 11.822251252049234, 'J_D': 127.80259702467916, 'W_D_1KI': 0.06698273201272109, 'J_D_1KI': 0.0003795120144405916} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_10000_1e-05.json b/pytorch/output_1core_after_test/altra_10_10_10_10000_1e-05.json deleted file mode 100644 index 8316e6e..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_10000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 424922, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.511717319488525, "TIME_S_1KI": 0.024737992665685764, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 302.01861512184144, "W": 28.37936763657448, "J_1KI": 0.7107624814009194, "W_1KI": 0.0667872400971813, "W_D": 10.020367636574477, "J_D": 106.6386536643505, "W_D_1KI": 0.023581663544308077, "J_D_1KI": 5.549645239434079e-05} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_10000_1e-05.output b/pytorch/output_1core_after_test/altra_10_10_10_10000_1e-05.output deleted file mode 100644 index c5827be..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_10000_1e-05.output +++ /dev/null @@ -1,1521 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 10000 -sd 1e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.027333974838256836} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([7460, 95, 2149, 8123, 5727, 819, 2541, 6301, 542, - 8800, 3837, 8608, 8238, 8243, 6812, 3238, 9738, 7528, - 4343, 6604, 8963, 4548, 6434, 2490, 188, 3968, 6877, - 2232, 9703, 2759, 6563, 4697, 3603, 3015, 6966, 1179, - 365, 9728, 5008, 4613, 8729, 3571, 866, 3008, 8598, - 9902, 7373, 3547, 5108, 5057, 9100, 5450, 3291, 2188, - 9416, 3562, 5603, 6127, 9453, 5857, 507, 3032, 8589, - 4412, 6297, 709, 3280, 7018, 7966, 6103, 6818, 3382, - 923, 5941, 6465, 2968, 5119, 3767, 5339, 3738, 1075, - 8251, 1732, 3419, 340, 6259, 8647, 2190, 8333, 6170, - 577, 2449, 46, 8548, 3871, 3935, 6500, 1370, 4398, - 5430, 277, 9214, 6681, 6806, 7402, 647, 2796, 4494, - 763, 9260, 215, 348, 3127, 3327, 314, 1604, 8141, - 8072, 6415, 2241, 3077, 5903, 5315, 8232, 3551, 1285, - 9560, 80, 5883, 5271, 8441, 9225, 5372, 7470, 2869, - 8925, 3295, 3700, 8263, 4992, 2104, 8042, 5926, 8135, - 9134, 159, 4338, 8251, 2524, 6155, 2752, 4018, 7796, - 8152, 4601, 1150, 3514, 464, 3035, 4598, 38, 1082, - 3705, 9589, 5667, 3683, 5157, 7419, 7422, 8380, 4255, - 9254, 9942, 5160, 8623, 3467, 7102, 5470, 3244, 2675, - 4247, 8272, 1714, 8570, 4816, 84, 5545, 3498, 4467, - 327, 5836, 7183, 3768, 2069, 2738, 726, 1005, 1771, - 8491, 3814, 7133, 9113, 8216, 223, 4927, 124, 9899, - 2496, 7603, 8078, 9139, 9587, 7954, 3177, 7716, 6375, - 8705, 5626, 3540, 6616, 4988, 5453, 6154, 1243, 7642, - 2973, 1685, 4556, 7012, 3985, 905, 3455, 6098, 777, - 2470, 7239, 6313, 7870, 6716, 5111, 2961, 8484, 3879, - 8499, 7577, 8455, 2925, 7566, 8026, 3896, 4675, 1117, - 401, 8933, 2726, 9410, 5344, 4470, 1107, 2083, 3196, - 4849, 7953, 1175, 6661, 1281, 9620, 8256, 4397, 358, - 7084, 8258, 8688, 1005, 4820, 9392, 40, 8001, 4013, - 6165, 2522, 3576, 9639, 6977, 2906, 2414, 9216, 3927, - 6914, 9953, 8350, 9080, 4606, 1097, 6767, 81, 5112, - 6490, 3025, 7966, 4254, 5645, 1737, 6783, 7352, 8875, - 9139, 6057, 1172, 72, 7555, 9529, 1625, 6396, 7301, - 2072, 4505, 352, 8880, 4225, 6919, 2318, 3360, 5986, - 8500, 792, 7149, 2099, 6483, 7312, 5696, 3834, 6511, - 8107, 5247, 5157, 243, 7220, 3402, 2366, 9269, 3668, - 3206, 525, 7265, 7008, 1981, 2493, 7225, 9039, 3245, - 3749, 4259, 997, 2026, 8722, 4354, 6307, 1104, 6983, - 8417, 6975, 1137, 1279, 6185, 1354, 395, 8392, 3443, - 3567, 5324, 5648, 8193, 2387, 537, 5966, 927, 3538, - 4335, 9129, 7140, 280, 9663, 3382, 6623, 8188, 5811, - 9585, 1527, 6234, 9177, 9105, 2004, 7009, 2076, 5700, - 3226, 1380, 62, 5574, 4426, 8914, 4811, 1957, 5659, - 7279, 2285, 9569, 1907, 6475, 9584, 7803, 4196, 6347, - 5883, 7460, 2626, 4611, 1026, 4660, 2307, 6175, 3660, - 4370, 9782, 2347, 6752, 2814, 3076, 4729, 5255, 3578, - 933, 8855, 7013, 9544, 561, 9169, 9511, 6391, 3137, - 1914, 3702, 5439, 7628, 8176, 5635, 2966, 6377, 4171, - 3481, 2712, 1913, 5872, 2475, 917, 7191, 7338, 2285, - 3028, 5562, 4278, 5052, 7893, 359, 85, 3949, 7764, - 2913, 5096, 162, 7934, 5196, 5151, 4608, 1072, 2253, - 3680, 8109, 2333, 1154, 9156, 9269, 3244, 5804, 1209, - 574, 3230, 789, 6601, 1166, 7623, 5211, 1530, 1207, - 9411, 5176, 9804, 6367, 9213, 615, 3513, 2195, 1296, - 2074, 4504, 5853, 9277, 7787, 8912, 9170, 97, 4748, - 6775, 9405, 1928, 2201, 6023, 6459, 779, 7753, 5129, - 2598, 8457, 5307, 6894, 4655, 1667, 2588, 5648, 8611, - 8355, 9822, 3172, 7976, 9429, 4978, 7853, 6934, 182, - 5902, 6151, 4843, 6579, 7345, 8730, 57, 8196, 2401, - 2203, 7123, 3017, 5849, 5026, 2449, 4187, 8651, 5428, - 7443, 9287, 1497, 3618, 2858, 6331, 9853, 6625, 1776, - 9567, 8400, 2412, 3604, 7304, 2697, 7290, 3833, 514, - 5490, 2484, 1752, 9512, 6056, 2319, 7173, 9514, 9527, - 1665, 3786, 6014, 8683, 7423, 7427, 7147, 1033, 3549, - 4047, 9628, 1948, 7138, 9982, 2133, 4176, 4098, 9929, - 8209, 7241, 1772, 2975, 8790, 3921, 7625, 9721, 8654, - 295, 5603, 9799, 1966, 1900, 440, 3156, 3441, 4905, - 1921, 7346, 3640, 4370, 1764, 9265, 3994, 6616, 9461, - 1785, 5131, 474, 712, 6918, 3991, 8696, 2289, 8025, - 6258, 7654, 272, 7384, 1547, 6246, 9524, 7750, 8815, - 1291, 8928, 197, 7387, 474, 7406, 7099, 1897, 9553, - 7409, 9959, 9150, 3445, 4696, 9110, 6618, 6895, 2481, - 8052, 9333, 553, 6478, 8061, 1253, 3357, 7961, 7368, - 2899, 3657, 326, 7700, 5120, 9411, 1907, 1746, 5361, - 2806, 8980, 1357, 9556, 5658, 3176, 3112, 2455, 3776, - 6093, 394, 2239, 1471, 3689, 6730, 9976, 6174, 3235, - 1094, 5944, 7076, 6448, 5949, 6773, 7093, 6106, 4689, - 180, 8683, 3137, 7136, 2650, 5728, 5316, 5615, 5371, - 3169, 7054, 5314, 7225, 1523, 2854, 3651, 4351, 8414, - 8908, 9116, 3581, 62, 9352, 8481, 4364, 3631, 3533, - 9481, 7880, 7846, 7062, 6084, 122, 4314, 7433, 5176, - 6628, 619, 2367, 3794, 8519, 9752, 1169, 9340, 3260, - 184, 57, 945, 8839, 6495, 4422, 5717, 1439, 1711, - 9197, 7806, 6983, 8707, 7697, 2320, 9549, 6050, 4255, - 1508, 9413, 2730, 574, 712, 843, 9096, 7491, 3640, - 2075, 8251, 6406, 5094, 5188, 8580, 963, 3048, 7552, - 7743, 2587, 8955, 152, 3283, 6200, 3782, 7118, 3056, - 6641, 1384, 8696, 143, 2096, 7912, 4911, 9029, 6679, - 1372, 2855, 1243, 3630, 4447, 4396, 8065, 8498, 6811, - 7427, 576, 9682, 231, 7889, 5387, 9687, 6406, 3832, - 7322, 5014, 2123, 8246, 9612, 3974, 8028, 6761, 1143, - 1530, 2705, 9066, 707, 8998, 5656, 838, 5310, 7627, - 9650, 4803, 7934, 8361, 7088, 700, 818, 3511, 7721, - 2657, 8558, 7143, 5301, 1028, 6280, 1851, 7774, 8796, - 5384, 1246, 9096, 8952, 9332, 187, 1464, 8131, 3237, - 1984, 2905, 7422, 7692, 2271, 8460, 1604, 4216, 3232, - 5437, 5452, 359, 4380, 9446, 7602, 2980, 727, 7139, - 8473, 1986, 2573, 1577, 4057, 7763, 5405, 2384, 1132, - 7765, 4284, 3312, 3038, 5020, 2209, 8952, 6591, 4691, - 2626, 3371, 3440, 8230, 3902, 7229, 6754, 2979, 8105, - 9305, 3029, 818, 3649, 3328, 2199, 4506, 9369, 6373, - 2132, 376, 7329, 603, 4654, 6545, 7624, 7765, 4120, - 9223, 8969, 4668, 7246, 4439, 1612, 2731, 2081, 2398, - 1400, 8208, 2430, 8974, 5879, 4015, 3585, 4595, 1024, - 5619, 4161, 7849, 8154, 4129, 6942, 1045, 1977, 957, - 5175, 3150, 3180, 769, 7266, 8917, 2919, 3022, 7984, - 1066, 9397, 5456, 3715, 9084, 4389, 2178, 309, 1344, - 3394, 7176, 7512, 8425, 4128, 7359, 8381, 33, 3409, - 2822, 3571, 7200, 6183, 6427, 2299, 1821, 8364, 4280, - 541]), - values=tensor([-5.3331e-01, 2.3451e+00, -1.6284e+00, -3.1640e-01, - 5.3372e-01, 2.2538e+00, -2.3719e-01, -1.5649e-01, - -2.5401e+00, 3.8243e-01, -7.6606e-01, 1.5824e-01, - 1.2310e-01, -1.4896e-01, -3.6411e-01, 1.2615e+00, - 2.1487e+00, -2.5150e-01, 2.8277e-01, -1.2667e-01, - -6.4913e-01, 4.5276e-01, -5.3699e-02, 2.5517e-01, - 8.5620e-01, -1.4282e+00, -5.3483e-01, -4.1514e-01, - 8.2779e-01, 1.8094e+00, 7.2043e-01, -1.5692e+00, - 1.1543e+00, -1.5814e+00, 1.4533e+00, 8.4874e-01, - -1.8631e-01, 4.3557e-01, -4.3686e-02, -1.1970e+00, - -5.7082e-01, -9.4541e-01, 7.2159e-01, -1.6075e+00, - -4.6710e-01, -2.6212e-01, 1.6083e-01, -1.2108e+00, - -1.2062e+00, 1.9788e+00, -1.1959e+00, 1.9314e-01, - -1.0622e+00, -1.1161e+00, -2.3775e-01, -3.9181e-01, - 2.1708e+00, 7.9137e-01, 1.2551e+00, 2.1311e-01, - 2.5906e-01, -8.8102e-01, -2.1792e-01, 2.3150e-01, - 4.8055e-01, -3.5720e-01, 6.3307e-01, 1.6235e+00, - -2.5934e-02, 1.1347e+00, -1.5379e+00, -2.5918e-01, - -5.2667e-01, 1.2040e+00, 5.7701e-01, 2.8183e-01, - 3.4629e-02, -7.4945e-01, 1.7381e-01, -1.2043e+00, - -9.2281e-01, -1.1522e-01, 3.1995e-01, -2.1887e+00, - -9.5904e-01, -6.9718e-01, -1.0509e+00, 1.0166e-01, - -8.6962e-06, 7.7003e-02, -3.0839e-01, 6.1279e-01, - -5.1355e-01, -2.0582e+00, 1.2470e-01, -1.0140e+00, - -1.8082e-02, 6.7185e-02, 3.8362e-01, -7.0600e-01, - -7.3676e-01, 1.4422e+00, -1.3022e-01, 1.0821e+00, - 7.5284e-01, 1.3830e-01, -1.6148e+00, -1.2164e+00, - 1.3092e+00, -1.3827e+00, 2.6066e-01, -9.7765e-01, - -6.5429e-01, 7.7549e-01, -1.0592e+00, 1.3102e-01, - 1.9249e+00, -1.8753e-01, 2.4872e+00, -1.8568e-01, - -8.3977e-01, -9.8972e-01, -1.8838e+00, 8.0424e-02, - 1.2335e+00, -5.0714e-01, -6.6841e-01, 3.1051e-01, - 1.3083e+00, 2.5672e-01, 8.6518e-01, -1.4792e+00, - -2.2222e-01, 1.4114e+00, -2.2916e-01, -3.1784e-01, - -6.9726e-01, 1.1478e+00, 9.2987e-01, 1.5494e+00, - 1.4442e+00, 1.9851e+00, -1.8808e+00, 1.3685e-01, - 4.8185e-01, 8.3103e-01, -2.3277e-01, -7.9677e-01, - 8.5936e-01, 4.6349e-01, 3.0721e-01, -3.5891e-01, - -5.8659e-01, 4.9869e-01, -2.0636e+00, 4.0403e-01, - -1.1234e-01, 1.2901e+00, 4.6015e-01, -5.8841e-01, - -9.4308e-01, -1.1601e+00, -1.5377e-01, -8.5086e-01, - 2.1085e-01, 1.6078e-01, -1.1869e+00, 1.8905e-01, - 3.8879e-01, -8.5347e-01, 5.8599e-01, -3.2357e-01, - -7.0870e-01, -5.5627e-01, 9.5300e-02, 1.0885e+00, - -1.1468e-01, -1.0048e+00, -3.4468e-01, -6.3688e-01, - 1.2130e+00, -4.1603e-01, 2.7783e-01, -1.0551e+00, - -5.4470e-01, -2.9453e+00, -1.2627e+00, 1.4588e+00, - 3.2042e-01, -9.6737e-01, -1.3997e-01, 7.2959e-01, - 4.7752e-01, 7.2766e-01, -1.5237e+00, -1.3766e-01, - -1.0445e-01, 6.2036e-01, -9.4468e-01, 1.6889e-01, - 8.6980e-01, 3.2071e-01, -5.3421e-01, 8.5781e-01, - -9.8054e-01, 1.4352e+00, 5.7617e-01, -2.2036e-02, - -1.4434e-01, -8.4775e-01, 9.2057e-02, 8.1260e-01, - -1.9436e-01, -4.0359e-02, 4.2433e-01, -1.0471e+00, - 1.3929e+00, 2.4595e-01, 1.8927e+00, -1.4745e-01, - 4.7827e-01, -7.4241e-01, -2.1983e-01, 1.7816e+00, - 1.0828e+00, -9.0610e-01, 2.1061e+00, 1.5945e-01, - -1.3553e+00, 8.6011e-01, 2.1179e+00, -1.5751e-01, - -6.4053e-01, -1.1838e+00, -5.0648e-01, -1.2818e+00, - -7.8472e-01, 3.8967e-01, -2.4764e+00, 7.8290e-01, - -3.3933e-01, -1.4595e+00, 5.6590e-01, 5.2603e-02, - 2.8544e-01, 1.5744e+00, 1.3238e-01, 1.1685e+00, - 1.3537e+00, -9.9910e-02, -2.2707e-01, -1.6699e+00, - 1.8652e-01, -2.4180e-01, 7.0737e-01, -2.3261e-01, - 1.8604e-01, 4.1439e-01, -2.0398e-01, 1.0126e+00, - 1.1169e+00, -2.3771e+00, 6.9991e-01, -1.3085e-01, - -1.3131e+00, 5.5179e-01, -7.3064e-01, -8.5983e-01, - 1.4696e+00, 2.6813e-01, 1.7451e+00, -8.8199e-01, - -1.2510e+00, 1.5225e-01, -5.3710e-01, 2.6994e-01, - 2.1338e+00, 6.2676e-01, 1.0742e-01, 8.0533e-01, - -4.9632e-01, -3.1013e-01, 2.2449e+00, 1.5858e-01, - -1.1269e+00, 8.7236e-01, -1.1013e+00, -9.6488e-01, - 1.2228e+00, -6.7298e-01, -2.5933e-02, -2.6265e-01, - 7.9626e-01, -1.7005e-02, -2.6098e-01, -2.8154e-02, - -2.2069e-02, 8.9367e-02, 6.2559e-01, -5.7008e-01, - -1.9397e+00, -5.4029e-01, 4.8287e-01, -5.6209e-01, - 7.3547e-01, -5.4617e-01, 2.8210e-01, -7.0766e-01, - 1.8383e-01, -9.0840e-01, -3.9777e-02, -3.6273e-01, - 1.0501e+00, 4.4070e-01, -4.9748e-01, 8.8185e-01, - 3.2756e-01, -4.4458e-01, 2.1247e+00, -8.3443e-01, - -2.3744e-01, -7.3919e-01, -3.9113e-02, 2.4999e+00, - 1.2999e+00, 9.9155e-01, -2.4968e-01, 8.8838e-01, - -1.6692e+00, -1.8457e+00, -4.3595e-02, 1.0699e+00, - 3.0942e-01, 1.8413e-01, 2.0374e-01, 4.3677e-01, - -1.9167e+00, -7.1475e-01, 1.9173e+00, -1.3306e+00, - 7.9012e-01, 4.5154e-01, -8.9430e-01, 8.6898e-01, - -1.2595e+00, 1.9501e-01, 4.0063e-01, 2.9254e-01, - -1.7617e+00, 7.5678e-01, 2.4917e-01, -2.6033e-01, - -3.7532e-01, 3.3605e-01, 1.8478e+00, 4.6781e-02, - 1.9717e-01, -1.6361e-02, -7.5857e-02, 2.5777e-01, - 1.6117e+00, -3.7284e-01, 1.2489e-01, 1.3748e+00, - -1.2515e+00, -2.2354e+00, 1.4672e+00, -1.0873e+00, - -1.2652e+00, -1.0205e+00, 2.5671e-01, -3.7857e-01, - 8.0711e-01, -6.6585e-01, 8.3096e-01, -1.1639e+00, - 3.4684e-01, -1.0031e+00, 2.3046e+00, 1.5809e+00, - 7.8250e-01, 1.9413e+00, 8.9364e-01, 7.3635e-02, - -2.1026e+00, 3.4616e-01, 6.7930e-01, 7.4723e-01, - 7.7091e-02, 2.4268e-02, -1.5468e-01, 7.8547e-02, - -1.6866e-01, 5.9988e-01, 1.1022e+00, 2.2209e+00, - 4.6648e-01, -7.1787e-01, -1.7547e-01, -3.0510e-01, - -2.6526e-01, 1.0487e+00, 5.6737e-02, 1.5142e+00, - -1.1124e+00, -5.9427e-02, -1.6733e-01, 2.2425e-01, - 5.1230e-01, -4.0193e-01, 9.1291e-01, -1.7639e-01, - -1.5274e-01, -7.7398e-01, -5.2076e-01, -2.7437e-01, - -3.4923e-01, 9.9831e-01, -1.0540e-01, 2.6939e-01, - 3.9001e-01, 2.6053e+00, -1.5809e+00, -3.9444e-01, - 4.6795e-01, -4.5480e-01, 1.0119e+00, 2.0462e-01, - -6.6118e-01, -1.0366e+00, -2.1140e+00, 4.3487e-01, - 8.2006e-02, 6.4153e-01, -3.7127e-01, 1.0995e+00, - -7.3887e-01, -2.0481e-01, -1.2248e+00, 2.1174e-01, - -1.7427e+00, -4.6175e-01, 5.1499e-01, -5.4329e-02, - 9.9287e-01, 7.6656e-01, -8.4018e-01, 1.9566e-01, - -6.3788e-01, 8.3737e-01, 1.7869e-01, 7.9313e-01, - -9.7006e-01, -2.2245e+00, -5.1321e-01, 7.7103e-01, - -2.1132e+00, 7.6338e-01, -8.4482e-01, 1.0383e+00, - -2.9405e-01, 6.2286e-01, 1.1865e+00, 1.4175e+00, - -3.3556e-01, 3.5866e-01, 1.0944e+00, 8.3543e-01, - -2.9355e-01, -1.0349e+00, 8.6896e-01, -8.5778e-01, - -1.4023e+00, -1.6941e+00, 1.8298e+00, -1.0397e+00, - -1.0776e-01, -1.7032e+00, 1.2085e+00, 4.5130e-01, - 3.7186e-01, 3.0848e-01, -1.0107e+00, 1.4865e+00, - 1.1784e+00, 1.5366e+00, 2.7037e-01, -1.3555e+00, - 3.4245e-01, -6.3937e-02, 8.5431e-02, 4.8132e-02, - 1.2096e+00, 1.7321e+00, 1.3011e+00, -1.9125e+00, - -1.3764e+00, 4.8777e-01, 3.7304e-01, 1.8153e+00, - -1.3383e-01, 1.4625e+00, -1.0118e-01, 1.3776e+00, - -1.2051e+00, 4.5659e-01, 1.3111e+00, 1.0928e+00, - 9.5569e-01, 2.2154e+00, -1.7755e+00, -1.9174e-02, - 7.5716e-01, -8.5222e-01, -1.1447e+00, 1.0051e+00, - 9.6723e-01, -2.5858e-01, -6.7134e-01, 2.4719e-02, - -9.7816e-01, 1.6972e-01, 5.6151e-01, -5.5239e-01, - -1.7012e-01, -7.0925e-01, -1.8490e-01, -1.0149e+00, - 3.5714e-01, -8.4613e-01, -1.6651e+00, -6.5346e-01, - 5.2607e-01, -7.5853e-01, 9.8087e-01, 1.1815e+00, - -1.0057e+00, -1.1212e+00, 8.5381e-02, -2.7196e-01, - -3.4060e-01, 9.2932e-01, -5.7194e-01, 1.3497e-02, - 7.4192e-01, 4.5774e-01, 5.1947e-01, 6.5392e-01, - -1.1017e+00, 7.8621e-01, -3.8076e-01, -3.1040e-01, - 2.9371e-01, -6.5477e-01, 4.9920e-02, -1.4767e+00, - -7.5721e-01, -7.2225e-01, 5.0178e-01, 7.7550e-02, - -1.5994e+00, 1.8357e+00, 3.9384e-01, -1.0692e+00, - 2.9433e-01, -8.7962e-01, -9.6374e-01, -1.7315e+00, - 5.2822e-01, 8.8931e-01, 1.5915e+00, -1.8946e-01, - 4.9971e-01, 5.7954e-01, -6.2297e-01, -3.8343e-01, - 5.1758e-01, -1.1726e+00, -1.6753e+00, -8.7202e-01, - 1.7440e+00, -1.0032e-01, -3.6468e-02, 1.9556e+00, - -6.4411e-01, -1.7748e+00, -1.4129e-01, 8.7225e-01, - -2.7381e-01, -1.6566e-01, -1.1686e+00, -6.2660e-01, - 2.0552e-01, -1.6977e-01, -6.2654e-01, -4.1019e-01, - 4.6872e-02, 3.8940e-01, 1.4597e+00, -1.8205e-01, - -1.4174e+00, -2.2931e-01, -9.4222e-01, -5.6002e-01, - 1.0201e+00, -4.9476e-01, -7.8036e-01, -7.1060e-01, - -7.9259e-01, -9.0497e-01, 5.8370e-01, 2.1296e+00, - -1.8183e+00, 1.3633e+00, 5.9850e-01, -4.1502e-01, - 4.5102e-01, -1.8075e-01, -9.1883e-01, 1.5143e+00, - 1.3781e+00, 5.8915e-01, 7.1418e-03, -4.9443e-01, - 5.7161e-01, 1.6533e+00, 8.3268e-01, -8.9076e-02, - -6.3134e-01, -4.7644e-01, 9.0796e-01, 5.3308e-01, - 5.7129e-01, -2.5642e-01, -4.0829e-01, -8.9458e-01, - -7.5342e-01, 1.5121e+00, 4.0812e-01, 3.9086e-01, - -2.1670e-01, 3.3457e-01, -9.0204e-01, 5.5052e-01, - -6.4444e-01, -2.2246e-01, -1.7430e-02, 7.4431e-01, - 2.7436e-01, 7.5106e-01, 1.0716e+00, -9.1284e-02, - 4.3333e-01, -2.2290e+00, -6.2429e-01, 1.4511e+00, - -1.0787e-01, -1.8373e-01, 2.8556e-01, -7.5001e-01, - 1.5220e+00, 1.4545e+00, 1.1588e+00, 3.1940e-01, - -5.8514e-01, -4.7157e-02, 1.4142e+00, 6.5598e-03, - -2.9140e-01, -3.8873e-01, -2.9546e-01, -6.4273e-01, - -2.2064e+00, -3.4162e-01, -6.3695e-01, 1.0664e+00, - 2.8752e-01, 7.0688e-01, 3.2701e-01, 1.0046e+00, - -2.1607e-01, 1.8949e-01, 2.9352e-01, 8.5358e-01, - -6.8339e-01, -6.9741e-01, 1.0468e-01, -6.6405e-01, - -1.0986e+00, -3.2579e-01, -7.7232e-01, 6.7494e-01, - -6.2313e-01, 6.2446e-01, -5.8314e-01, -2.0339e+00, - 1.1519e-01, -2.0483e+00, -3.7730e-01, 7.2891e-01, - 2.1642e-01, -5.1623e-02, 3.0418e-01, 3.2266e-01, - -8.6217e-01, 2.3349e-01, 7.9314e-01, 2.2691e+00, - 1.9685e-02, 4.0684e-01, 1.8699e+00, 5.3840e-01, - 1.3162e+00, -3.4350e-01, -1.4511e-01, 2.2258e-01, - 1.0723e+00, 1.0034e+00, 2.8202e-01, 8.0714e-01, - 1.8549e+00, -2.3642e+00, -1.3380e+00, 8.1566e-01, - 2.4338e+00, 1.9122e+00, 1.3140e+00, -2.0712e+00, - 1.7910e+00, 1.2781e+00, -2.1684e-01, 2.1021e-01, - -3.8191e-01, 6.8519e-01, 3.1330e-02, -1.0617e-01, - -1.4492e+00, -7.0918e-02, -7.1993e-01, -1.1881e-01, - 8.9619e-01, 1.0526e+00, 7.0638e-01, 1.7071e+00, - -1.5887e-01, 8.0341e-01, 1.0693e+00, -6.3579e-01, - 2.7956e-01, -1.4041e+00, -5.6642e-01, -5.9750e-01, - 4.1010e-01, 7.2265e-01, 2.2648e-01, -9.3430e-01, - 8.2372e-01, -9.9145e-01, 4.2264e-01, -1.5604e-02, - -1.1972e+00, 7.2259e-01, -1.0648e-01, -4.8521e-01, - -4.5616e-03, 1.2769e+00, 4.3154e-01, -2.9439e+00, - 1.2515e-01, 6.9333e-01, -4.5450e-01, -2.7201e-01, - -1.4440e+00, -3.2356e-01, 1.3073e-01, -1.1688e+00, - 1.3524e-01, -2.0372e-01, 8.2446e-01, -1.0044e+00, - 1.7930e+00, 2.4112e-01, 2.1975e-01, 2.6068e-01, - -7.4533e-01, 1.3595e+00, -5.2883e-01, -9.8625e-01, - -1.5568e-01, 4.5578e-01, -1.0795e+00, -4.1734e-01, - -8.5339e-01, -1.6401e-01, -6.7863e-01, 1.1639e+00, - -4.0427e-01, 2.9276e-01, -1.4080e+00, -6.1845e-01, - 1.3330e+00, 1.6374e-02, -9.0147e-01, -1.2001e+00, - -1.0131e-01, 8.7924e-01, 1.1872e+00, -3.4299e-02, - -1.5000e-01, -7.0214e-01, -1.0774e+00, -8.0529e-01, - -6.9565e-01, -1.4163e+00, 1.8617e-01, -9.2503e-01, - -1.9544e+00, -4.3471e-01, -5.1463e-01, -9.5551e-02, - 5.9474e-01, -4.1036e-01, -7.6290e-01, 4.0169e-01, - 7.7647e-01, 2.0785e-01, -4.8375e-01, -2.5952e-01, - 5.6776e-01, -6.7107e-01, -7.0460e-01, -6.5613e-01, - 1.2953e+00, 2.2779e-01, 1.3147e-01, -1.7126e+00, - -7.3497e-01, -1.8652e+00, 1.3496e-01, 1.0975e-01, - -3.9361e-03, -2.2614e+00, 1.9451e+00, 1.2009e-01, - -5.3850e-01, 2.0398e+00, 1.3517e+00, 4.8214e-01, - 2.3948e+00, 1.6262e+00, -6.7120e-01, -9.1252e-02, - -5.6856e-01, -3.0926e-01, 6.4070e-01, -1.0963e+00, - 2.0179e+00, -2.8126e-02, 4.2232e-01, -3.1312e-01, - 1.3212e-01, 4.9461e-01, 1.2975e+00, -9.7064e-01, - -3.2489e+00, 2.4659e-01, 2.1334e+00, 3.2083e-01, - 1.7069e+00, 1.1402e+00, 1.6928e+00, 6.4233e-01, - 3.4668e-01, -1.1474e+00, 1.6597e+00, 2.9447e+00, - -9.9190e-02, 1.6647e+00, 4.8903e-01, 2.6406e+00, - -1.5926e+00, -1.6247e-01, -1.9894e+00, 5.5559e-01, - 8.9626e-02, -1.1995e+00, 6.7309e-01, 1.1395e+00, - -5.3494e-01, -2.3402e-01, 1.7088e-01, 1.1974e-01, - 6.0830e-01, 7.4116e-01, -9.1175e-01, -2.8305e+00, - -4.6993e-01, -6.2566e-01, 1.4503e-01, -4.8925e-01, - -7.1859e-01, 6.8666e-01, -5.0471e-01, -1.6635e+00, - 8.6350e-01, -3.2665e-03, 1.9566e-01, 8.8545e-01, - 2.4230e-01, 1.0816e+00, 8.0010e-01, 1.3755e+00, - 5.3929e-01, 1.7633e-01, 2.1499e+00, 1.9138e+00, - 9.7563e-01, 1.2187e+00, -1.2679e+00, 6.6809e-01, - 1.3472e+00, 1.1197e+00, 9.2292e-01, 1.8931e+00, - 1.3408e+00, 1.3865e+00, 5.5745e-02, 1.4854e+00, - 7.2765e-01, 5.9527e-01, 1.5207e+00, 1.8123e+00, - -2.7680e-01, -4.5392e-01, -6.1772e-02, 6.9972e-02, - -2.5296e-01, -7.5858e-01, -1.2379e+00, 2.0047e+00, - 4.0452e-01, 2.0643e-01, -2.4267e+00, -4.1213e-01, - 1.4337e+00, 5.1903e-01, 5.2186e-01, 1.2291e+00, - -7.7268e-01, -2.9521e+00, 4.7012e-01, -2.7846e-01, - -3.7091e-01, -9.6630e-01, -2.3178e-01, 4.4828e-01, - 8.0763e-01, -5.5254e-01, 3.8266e-01, 1.5772e-01, - -1.7076e+00, -7.1790e-01, 6.7222e-01, -1.9075e-01, - 5.4430e-01, 6.0005e-02, -1.3901e-01, 4.7989e-02, - 6.5414e-01, -2.0814e-01, -2.3934e+00, 8.0663e-01, - 1.5188e+00, 5.7357e-01, -2.0704e-01, -7.8699e-01, - 3.8495e-01, -1.3372e+00, 5.0254e-01, -7.7022e-01, - -7.5301e-01, 2.5725e+00, -1.4389e+00, -2.4912e-01, - 2.0878e+00, -1.4561e-01, 9.1726e-01, -9.0215e-01, - 6.0206e-01, -4.6242e-01, 2.1502e+00, 2.7872e-02, - -9.2343e-01, -4.9310e-02, -1.3370e+00, -1.2587e+00, - 1.4432e+00, 1.7337e+00, -9.6124e-01, 8.3853e-01]), - size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.6363, 0.3842, 0.2876, ..., 0.1443, 0.4194, 0.8396]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 1000 -Density: 1e-05 -Time: 0.027333974838256836 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 384137 -ss 10000 -sd 1e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.492179870605469} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([ 222, 7437, 7497, 2651, 7024, 4294, 9531, 6036, 4141, - 5259, 5882, 3456, 8920, 4694, 6915, 787, 9584, 2296, - 9098, 7651, 291, 1621, 1907, 2250, 2504, 5598, 8628, - 5279, 6883, 8331, 9762, 2269, 5148, 2914, 8285, 8913, - 7125, 3459, 213, 4146, 2395, 3210, 871, 2655, 6263, - 8364, 5969, 9795, 3554, 4540, 7138, 5899, 7013, 4013, - 5644, 9466, 2600, 6816, 5104, 1032, 1378, 2305, 76, - 6297, 6291, 137, 4965, 2824, 6800, 5148, 2743, 437, - 8685, 1012, 9755, 5654, 9260, 5677, 7338, 6326, 4500, - 313, 4535, 3617, 4221, 8117, 456, 113, 4432, 2399, - 9668, 8888, 9514, 7949, 1207, 6399, 5386, 9056, 6998, - 8528, 9992, 9784, 7541, 2605, 7481, 7390, 5616, 4777, - 8181, 6407, 5518, 738, 3418, 2407, 1241, 7351, 6300, - 2987, 4952, 7314, 5236, 4044, 5667, 3826, 5188, 5384, - 3216, 9761, 2233, 152, 6801, 39, 1685, 3892, 184, - 7744, 373, 1603, 8614, 6972, 1829, 8481, 1889, 6970, - 0, 6963, 1892, 354, 4479, 366, 6535, 4580, 7981, - 3708, 2805, 5543, 764, 7732, 6167, 4985, 9661, 7157, - 1347, 7167, 5278, 9744, 5648, 6378, 822, 3826, 9155, - 3077, 6608, 4210, 9679, 9422, 284, 2244, 2215, 7638, - 2821, 8490, 7671, 2322, 8615, 924, 4420, 4911, 421, - 7496, 8343, 9406, 9696, 8729, 213, 7044, 6150, 6562, - 8308, 6954, 8499, 3948, 4345, 9095, 3154, 2014, 3816, - 8521, 1875, 885, 488, 8942, 5736, 1458, 5982, 8937, - 9004, 2842, 403, 671, 3277, 6426, 6911, 4856, 6079, - 1085, 4480, 6965, 485, 6144, 9804, 9653, 4175, 9638, - 9184, 5270, 822, 96, 9560, 9085, 820, 9842, 6370, - 3744, 9063, 3356, 7270, 5608, 9859, 8136, 1103, 8699, - 6340, 1715, 1873, 100, 2146, 5935, 4809, 8306, 9585, - 1456, 727, 9963, 3483, 3943, 9438, 684, 4762, 9519, - 7953, 8686, 4519, 4030, 1788, 9362, 806, 3682, 3678, - 8035, 9828, 9423, 1388, 844, 3913, 5824, 8014, 2679, - 9278, 8708, 4811, 4887, 5804, 7981, 2566, 7325, 4480, - 4575, 1146, 4229, 5282, 789, 4412, 4083, 6580, 7161, - 3046, 1646, 2955, 4507, 8883, 8472, 5264, 3456, 1002, - 6169, 6277, 4879, 7880, 7716, 5971, 7981, 1600, 1317, - 1518, 7460, 9883, 8578, 3881, 1353, 106, 3129, 1317, - 8129, 2371, 9601, 5294, 6746, 7619, 3428, 433, 7113, - 6927, 9451, 4064, 7206, 2725, 2007, 1360, 8716, 4930, - 2311, 3649, 6039, 1362, 4891, 6010, 8110, 8722, 2927, - 6111, 1719, 1152, 6173, 3730, 8732, 2213, 1201, 4987, - 9611, 3494, 6095, 3801, 6745, 9791, 4389, 4058, 3157, - 1436, 9717, 5044, 7853, 405, 5926, 2045, 3479, 6890, - 6437, 5433, 1807, 9687, 2801, 5777, 5958, 738, 4394, - 4914, 6229, 3534, 6036, 1360, 8042, 3536, 4665, 2654, - 998, 5392, 1789, 6381, 5732, 2202, 9840, 4908, 9223, - 431, 852, 5567, 1269, 7016, 7367, 8771, 4162, 982, - 660, 8662, 8433, 8701, 7322, 133, 2968, 9424, 6222, - 7472, 6766, 727, 3945, 3914, 174, 5636, 4132, 6740, - 8342, 1518, 6942, 1957, 3375, 7302, 8526, 3988, 9636, - 8656, 2736, 7854, 9981, 2481, 6429, 2352, 9983, 336, - 9452, 9973, 6283, 2170, 8477, 1014, 8978, 383, 2747, - 4796, 3281, 6063, 5165, 523, 1550, 8316, 9618, 4347, - 7691, 640, 9983, 4814, 4917, 8648, 8641, 6960, 8654, - 8194, 9946, 6501, 2461, 6870, 522, 7166, 8522, 5378, - 7783, 6437, 3761, 6848, 487, 7834, 2707, 4573, 3150, - 1886, 7395, 603, 4, 6882, 4344, 9764, 2615, 4291, - 5597, 7800, 7068, 7767, 2052, 1527, 5121, 5567, 2558, - 719, 7109, 30, 8678, 7510, 6713, 13, 7263, 2526, - 8720, 797, 5505, 8284, 5262, 3860, 4131, 1529, 3744, - 6691, 8568, 3353, 3782, 9745, 2451, 9603, 9, 5014, - 1675, 326, 4831, 5058, 744, 1917, 9151, 384, 5308, - 4521, 1113, 5201, 2962, 9240, 4854, 2565, 7185, 4803, - 6516, 9426, 5468, 8916, 3947, 1129, 3384, 5955, 7305, - 1762, 2182, 1279, 7661, 7618, 8024, 6416, 2112, 294, - 5586, 8286, 2019, 8064, 3822, 3166, 674, 5115, 4579, - 7585, 6481, 6934, 2310, 2745, 7664, 9228, 2344, 1082, - 5900, 4241, 5198, 6902, 4122, 9053, 9395, 2357, 4177, - 5319, 6862, 8689, 8444, 2118, 7692, 3230, 1099, 555, - 6999, 8510, 3104, 7118, 474, 2001, 7640, 3772, 7923, - 2485, 2732, 4338, 3832, 953, 8582, 7676, 1649, 5816, - 1979, 5792, 1454, 7466, 9808, 7586, 2131, 219, 8012, - 4155, 9173, 690, 7679, 6230, 624, 4461, 830, 7491, - 1009, 2711, 9937, 3312, 3984, 1862, 2976, 9539, 9677, - 6908, 3951, 8422, 6822, 5448, 4324, 1742, 7025, 5881, - 7228, 8637, 3676, 3949, 1876, 2384, 533, 2297, 7040, - 6986, 5846, 2125, 7194, 2440, 8887, 5073, 4721, 6795, - 6631, 5185, 4757, 3106, 7527, 6654, 7428, 2184, 6139, - 2354, 5957, 4694, 5845, 2848, 6815, 4491, 7543, 3612, - 33, 4905, 8295, 7962, 2667, 7700, 3045, 4200, 6082, - 2996, 5730, 2940, 1339, 284, 7840, 6429, 5386, 7536, - 8828, 8024, 9285, 7972, 8705, 4694, 2763, 4111, 1331, - 7371, 9626, 1221, 2492, 8884, 3175, 7236, 9347, 6659, - 5183, 7813, 724, 8487, 570, 2265, 9427, 4563, 4, - 855, 926, 9210, 5071, 1765, 6449, 262, 2289, 2009, - 5213, 3833, 5764, 3762, 4796, 9023, 8666, 4774, 5447, - 1792, 9598, 3991, 6524, 1703, 5378, 2132, 9463, 1525, - 9912, 2341, 2422, 9830, 2456, 9455, 6569, 6288, 9607, - 2583, 2674, 4815, 7048, 6071, 9099, 3085, 4795, 9485, - 265, 1104, 6469, 3448, 2198, 4553, 3964, 5243, 5900, - 9885, 361, 7471, 9338, 4102, 9489, 6116, 3870, 1652, - 3479, 7537, 1442, 7516, 2564, 1325, 6853, 8330, 3144, - 4582, 3183, 112, 6074, 4334, 1811, 232, 3696, 9197, - 4391, 164, 9013, 779, 5752, 6552, 6137, 9656, 6883, - 9408, 7842, 9005, 1689, 9224, 2873, 1705, 619, 9308, - 8984, 3367, 1748, 2108, 9815, 890, 3123, 5600, 6373, - 7223, 8469, 7558, 5755, 5373, 4408, 1989, 1156, 7851, - 6802, 3290, 5585, 5879, 6882, 8201, 9509, 1994, 5556, - 6686, 7049, 678, 9442, 4523, 6139, 8990, 9732, 19, - 1104, 3037, 7165, 5459, 9425, 7753, 1446, 8297, 2251, - 8747, 2440, 9840, 5007, 8673, 7239, 1132, 8996, 4155, - 5007, 3985, 852, 2669, 2659, 490, 2080, 5294, 9538, - 2116, 9085, 5553, 9478, 2984, 2355, 8503, 2352, 3693, - 898, 2012, 9680, 3339, 2265, 8216, 4379, 3517, 2532, - 9350, 8375, 7445, 9646, 2565, 8696, 6785, 3904, 3637, - 7125, 4203, 3936, 5543, 7887, 8251, 8692, 2036, 3103, - 3640, 7586, 321, 9689, 3331, 3668, 425, 489, 2228, - 5992, 5609, 2293, 8178, 2969, 814, 7785, 6106, 6281, - 8942, 9191, 5316, 4749, 5776, 6149, 2814, 6739, 7100, - 4446, 8446, 314, 7890, 9674, 949, 1282, 9438, 7043, - 9270, 860, 6501, 8637, 5055, 2271, 3598, 5933, 5235, - 7643]), - values=tensor([-9.2308e-01, 3.9190e-01, 4.6147e-01, -1.5243e+00, - 8.8435e-01, 8.7643e-01, 8.6370e-02, -9.5048e-01, - 6.6148e-01, -2.2286e-01, -2.1210e-01, 1.0631e+00, - -1.1856e+00, 1.3237e+00, -7.3500e-01, 7.2881e-01, - 9.5345e-01, 8.5490e-01, -1.4805e+00, 3.8884e-01, - 1.0398e+00, -3.1729e-01, 7.4303e-01, -1.4616e+00, - -7.4354e-01, -9.5786e-01, -5.0256e-01, 3.8874e-01, - 8.7615e-01, -3.8267e-01, -2.3812e-01, 2.1738e-02, - -6.3561e-01, 8.9548e-01, 6.7787e-01, -8.9575e-01, - -6.1875e-02, -7.4814e-01, -4.4408e-01, 1.8475e-01, - 2.0256e-01, 8.6417e-01, 6.3708e-01, 8.2276e-01, - -9.2857e-01, 6.1846e-01, 5.4497e-02, -9.5464e-02, - 1.2219e-01, 4.4266e-01, -3.3395e-01, 6.1172e-01, - 1.0192e+00, 8.2553e-01, -1.6921e-01, 1.0993e-01, - 3.2796e-01, 1.1826e+00, 6.1254e-01, -2.0309e+00, - -1.0142e+00, -1.2415e-01, -2.9581e-01, 1.3233e+00, - 6.0525e-01, 8.8303e-01, -9.1170e-01, -1.2229e-01, - 6.4896e-01, -8.7184e-01, -1.5883e+00, -1.0555e-01, - -1.5801e+00, 1.9232e-01, -2.1416e-01, 9.1762e-01, - 4.9437e-01, -2.9085e+00, 1.4670e+00, -6.7523e-01, - -5.9541e-01, 1.4941e+00, 1.1518e+00, -5.0889e-01, - 8.6347e-01, 1.0362e+00, 1.0699e+00, -4.9015e-01, - -4.1805e-01, 2.4522e-01, -1.3462e+00, 4.5599e-01, - -6.0255e-01, -7.7498e-01, 5.1476e-01, 8.6688e-02, - 1.3810e+00, 2.4392e-01, -8.1518e-01, -7.0849e-01, - -1.1402e+00, 1.6862e-01, -2.3330e-02, 4.8937e-01, - 6.3702e-02, -1.5627e+00, 5.5834e-01, -9.1983e-01, - 1.5184e+00, 1.0502e+00, -8.6476e-01, -6.3217e-01, - -1.0941e+00, -1.2324e+00, -1.8643e+00, 1.7384e+00, - -2.2322e+00, -1.3759e+00, -6.7691e-03, -1.1450e+00, - -2.5912e-01, 5.2269e-01, 1.4332e+00, 2.1224e+00, - 7.2591e-02, 8.5723e-01, 2.1385e-01, -2.8271e-01, - 9.5505e-02, -2.9837e-01, 3.9834e-02, 3.5296e-01, - 9.8691e-01, -4.8036e-01, -4.2731e-01, -6.0840e-01, - -7.7277e-01, 9.6658e-01, 1.8818e+00, 1.3455e+00, - -2.2085e-02, 1.1734e+00, -2.8834e-01, -4.7535e-01, - -1.2872e+00, -1.4206e+00, -9.0742e-01, -1.2156e+00, - 9.2424e-01, 1.2747e+00, -1.3423e+00, 1.1057e+00, - 5.5999e-01, -7.9548e-01, -1.0512e+00, -1.3689e+00, - -1.2810e+00, -1.5709e+00, 6.5795e-01, -2.4083e-01, - -1.0035e+00, -7.9954e-01, 9.4141e-01, -9.8110e-01, - -7.4363e-01, -1.9710e-04, -6.2434e-01, -2.4168e-01, - -2.0966e-01, 1.7743e+00, 1.1130e+00, 8.5413e-02, - 7.1515e-01, -1.2740e+00, 5.3621e-01, -9.4976e-01, - 3.8530e-01, -7.0580e-01, -4.2630e-01, 7.4625e-01, - -1.1147e+00, 1.5249e+00, -7.5284e-01, -1.7929e+00, - 5.0779e-01, -2.2783e-02, 7.0551e-01, 5.4506e-01, - 3.6738e-01, -7.3332e-01, 1.7846e+00, -2.2376e+00, - 7.1808e-03, -1.3452e+00, -1.4568e+00, -4.1102e-01, - -2.6547e-01, -3.6465e-01, 3.9283e-02, -2.0897e-01, - -7.0794e-01, 2.2944e+00, 2.1244e-01, 1.6130e+00, - 7.3068e-01, -1.2226e+00, -1.4305e+00, -1.0358e+00, - 1.8651e+00, -8.0607e-01, -2.2131e+00, -8.3005e-01, - 1.6394e-01, -1.0806e+00, 1.6939e+00, -2.2018e-01, - 2.2983e-01, 1.6012e+00, -8.2722e-01, 6.0368e-01, - 2.0081e-02, -2.1384e-01, 5.0059e-01, 1.8201e+00, - 7.1827e-01, -4.3786e-01, 6.0990e-02, -6.8157e-01, - -3.2614e-01, 4.3492e-01, 1.4770e+00, 5.5508e-01, - -5.5228e-01, -9.9538e-01, 1.4518e+00, -2.1732e-01, - -5.2129e-01, 2.2839e-01, 2.5065e-01, 1.5576e+00, - 8.3294e-03, 6.0393e-02, -7.6758e-02, 1.2932e+00, - 4.6032e-01, 1.2215e-01, -1.8953e+00, 1.4964e+00, - 1.1704e-03, 1.0478e-01, -6.3642e-01, 2.8263e-01, - -9.2815e-01, 4.4299e-01, 5.6913e-02, 5.6315e-02, - -3.8207e-01, -6.2028e-01, 8.9305e-01, -1.6633e+00, - 2.6726e+00, 8.0995e-01, 1.3469e+00, 5.0910e-02, - 6.0359e-01, 7.8988e-01, -8.5580e-02, -3.6119e-01, - 1.6538e+00, -1.5523e+00, -4.8093e-01, -1.0072e+00, - -1.1715e+00, 3.5961e-01, 2.4570e+00, -1.6814e+00, - 8.3141e-01, 4.0549e-01, 6.9588e-01, 2.6609e+00, - -8.9285e-01, -2.8212e+00, 3.4359e-01, -1.4809e-01, - 2.0822e+00, -5.1056e-01, -1.0414e+00, 8.1470e-01, - -4.7150e-01, 1.0228e+00, -6.6643e-01, -1.0700e+00, - -6.5295e-01, 1.7645e+00, -1.3508e+00, 1.7110e-01, - -1.1082e-01, 9.7554e-01, -9.0538e-01, -4.0931e-01, - 8.4945e-01, 1.2745e+00, -3.4857e-01, -2.3241e-01, - -3.3331e-01, 4.8169e-01, -9.3358e-01, 5.3797e-01, - -2.5148e-01, 1.3383e+00, 5.6388e-01, -6.9501e-01, - -1.1310e+00, 1.9994e+00, 7.4035e-01, -2.5915e-01, - -8.1669e-01, 3.1080e-01, -1.2149e+00, 7.7269e-01, - 8.5924e-01, -1.2092e+00, -7.4760e-01, -7.6193e-01, - 5.7806e-02, 1.4560e+00, 5.7550e-01, 2.0002e+00, - -1.9819e+00, -1.2746e+00, -2.8615e-01, 2.1939e-01, - 3.9049e-01, -5.9689e-02, 1.7802e-01, 2.3999e-01, - 1.0994e+00, 2.4875e+00, -1.5412e+00, -4.7932e-01, - -9.3811e-01, 4.7964e-01, 4.0463e-01, -3.4804e-01, - -7.6732e-01, 5.8697e-01, 1.7141e+00, -3.6710e-01, - 1.3014e+00, -5.6319e-01, 1.4497e-01, 1.7775e-01, - 4.3781e-01, -8.7782e-01, 4.6982e-01, -9.7505e-01, - 1.1560e+00, 5.9976e-01, 7.4817e-02, 1.3433e-01, - -1.9760e+00, -2.7692e-01, -1.3873e-01, 2.3904e-01, - -1.1655e+00, -1.1487e+00, -1.8715e+00, -1.0689e-01, - 1.7622e-02, 8.2422e-02, -2.0288e+00, -7.1919e-01, - -6.0561e-01, 9.3375e-01, -3.9688e-01, 6.6401e-02, - 5.6909e-01, -6.3784e-01, -1.3337e+00, -1.0495e+00, - -5.1682e-01, -2.8169e-01, -9.0957e-02, 2.5883e-01, - 1.3805e-01, -2.1052e-01, -3.2110e-01, 2.0761e+00, - 1.8323e+00, -8.8117e-02, -5.3948e-01, 4.4215e-01, - 1.4409e+00, 2.4265e-01, -9.2819e-02, -6.4594e-03, - 1.0671e+00, 1.7354e+00, 1.8838e+00, 3.6698e-02, - 1.3602e-01, -3.5024e-01, 1.3839e-01, -6.2042e-01, - -1.0561e+00, -8.3556e-01, 8.6439e-01, 8.7880e-01, - 8.3209e-01, 3.8549e-01, 1.5120e+00, -1.4269e+00, - 1.5956e+00, -2.1098e+00, 7.0479e-02, 4.9980e-01, - -8.0969e-01, 1.1193e+00, 5.4612e-01, -3.6869e-01, - -1.6549e-01, -1.0239e+00, -6.4093e-02, 7.9872e-01, - -1.8738e-01, 8.3017e-01, 1.2285e+00, -1.1410e+00, - 5.6377e-01, 1.1095e+00, -6.1059e-01, 1.4741e+00, - -2.6411e-01, 1.0483e+00, -1.0970e+00, 2.9836e-01, - -4.1527e-01, 4.3922e-01, -6.9227e-01, -3.7834e-01, - 7.3327e-02, 6.7747e-01, -8.4458e-01, 1.3610e+00, - 2.4696e-01, 1.0794e-01, -1.5574e+00, -2.3793e-03, - -1.4184e+00, 1.0903e+00, -8.2223e-01, 2.3586e+00, - 7.0845e-02, 4.5798e-01, -1.1603e+00, 1.6127e+00, - 9.2263e-01, 3.1279e+00, 2.2688e-01, -6.9933e-02, - -9.5080e-01, 6.3278e-01, 3.9960e-01, 1.4227e+00, - -1.2125e+00, -6.0355e-01, -1.0767e+00, -1.2560e+00, - -1.0836e+00, -9.6371e-01, -1.4332e+00, 3.6846e-01, - 4.5214e-01, -3.7910e-01, 4.8076e-01, -8.9814e-01, - -1.0856e+00, -3.7464e-01, -5.4679e-01, -3.1363e-03, - -4.7894e-01, 5.1069e-01, 5.5970e-02, 1.4076e+00, - -8.2195e-01, 1.7974e+00, 1.4838e+00, 2.4162e-01, - -1.5833e+00, -1.0972e+00, -9.2334e-01, -1.6747e+00, - 2.4426e-01, -3.9679e-01, 2.2121e-01, 3.3912e-01, - -1.0385e+00, 8.2539e-01, 1.9766e+00, -1.3383e+00, - -1.5256e+00, -1.2689e+00, -4.8765e-01, 7.9891e-01, - 6.7421e-02, 9.6194e-01, -1.9767e-01, 6.4307e-01, - -2.5900e+00, -7.5070e-02, 8.7809e-01, 4.9216e-01, - 7.0668e-01, -4.7312e-01, 6.3624e-01, -2.5760e-01, - -2.5484e-01, -1.8498e+00, 4.1188e-01, 2.0605e-01, - 1.5619e+00, 2.8817e-01, 6.1987e-01, 9.9805e-01, - -1.4598e+00, -8.2982e-01, 1.3080e+00, 1.2244e+00, - -9.6403e-01, -1.2519e-02, -4.8887e-01, -1.4435e+00, - 4.1738e-01, -4.9702e-01, 1.0180e+00, -3.1120e-01, - -3.6660e-01, 4.4461e-01, -8.3801e-01, 1.3617e+00, - 8.3120e-01, -2.3965e-01, -9.6693e-01, 1.7504e+00, - -2.9334e-01, 5.7783e-01, 1.3604e+00, -7.2748e-01, - -1.2782e+00, 5.1983e-01, -1.2631e+00, -5.5356e-01, - 7.1794e-01, -1.1434e+00, -2.1767e+00, -1.2580e+00, - 8.5211e-01, 8.3793e-01, 1.0443e+00, -1.1354e+00, - -8.1820e-01, -1.7038e+00, 1.5721e+00, 1.5399e+00, - -5.4647e-01, -4.9829e-02, -1.8400e-02, 2.3192e+00, - 6.2466e-01, 9.0257e-01, -9.3564e-02, 5.7818e-01, - 9.4875e-01, 1.5456e+00, 8.3523e-03, -8.9098e-01, - 2.5857e-01, 3.1525e-01, -2.6174e+00, 2.4639e-01, - -1.2470e+00, 8.8763e-01, 1.2092e+00, -3.8374e-01, - 2.4571e-01, 1.2475e-01, 7.2673e-01, -6.6444e-01, - 2.3321e+00, -1.3895e+00, 7.7810e-01, -8.8032e-01, - -4.3382e-02, 6.9891e-01, 1.9737e+00, -1.9049e-01, - 1.3387e+00, -8.4748e-01, -5.2098e-01, -4.2636e-01, - 2.2711e+00, 8.2960e-01, 1.1211e+00, -1.1398e-01, - -2.6238e-01, 1.1514e+00, -1.2983e+00, 6.6281e-02, - -1.7162e+00, -6.7643e-01, -1.7726e+00, 2.3596e+00, - -3.5267e-01, 1.0614e-01, -3.5835e-01, 8.3147e-01, - 6.1239e-01, 8.0734e-01, -1.0080e+00, 9.5074e-01, - -5.5172e-01, 1.5359e+00, 1.4239e+00, 5.7758e-02, - 1.8023e-01, 9.8149e-01, 7.5606e-01, 7.4760e-02, - 2.1639e+00, 1.4254e+00, -2.3543e-01, 2.0504e-01, - -5.6021e-01, -8.9357e-01, -1.0669e+00, 4.7931e-01, - -1.4126e+00, -4.2486e-01, -1.7404e-01, -1.1783e-01, - 1.0376e+00, 7.6731e-01, -1.8069e+00, -7.5564e-01, - 1.8137e+00, -2.6841e-01, -1.5221e+00, 1.7779e+00, - 9.9609e-01, -4.6881e-01, 1.2519e+00, 1.5969e+00, - 3.1877e-01, -1.1493e+00, -1.6495e+00, -2.4727e-01, - -1.0109e+00, 1.1833e+00, -2.1647e-01, 1.7100e+00, - -2.3341e+00, 1.8983e+00, 2.9595e-01, 1.2066e+00, - 3.0804e-01, -2.4516e-01, 1.1600e+00, -1.1721e+00, - -5.1258e-01, 9.1749e-01, 1.3424e-01, 1.2507e+00, - -9.1994e-01, 1.0126e+00, -6.7365e-01, -1.3478e+00, - -1.9358e-01, -5.8859e-01, 7.9299e-01, -6.6933e-01, - 1.8074e+00, -6.0294e-01, 2.8435e+00, 1.2632e+00, - 1.0172e+00, -6.9232e-02, -5.5733e-01, -1.3726e-01, - 1.8919e-01, 8.6067e-01, 1.3801e+00, -1.0515e+00, - 4.9680e-01, 3.3493e-01, 5.6910e-03, 3.6546e-01, - 1.9498e+00, 3.3876e-01, 1.5613e-01, 1.5899e+00, - 6.1153e-01, -1.3484e+00, -1.0071e+00, 9.7018e-01, - -4.5485e-03, 3.4052e+00, 4.1842e-02, 3.4191e-02, - 4.2163e-01, 5.6429e-01, -3.3095e-01, -1.9823e+00, - 3.4409e-01, -3.9496e-01, -3.2065e-01, 1.1268e+00, - 5.1818e-01, -6.0511e-01, -5.0187e-01, -7.9695e-02, - -6.8073e-03, 1.3501e+00, 2.6785e-01, 8.8758e-01, - -8.7181e-01, -1.7174e+00, 8.6980e-01, 1.7620e+00, - -1.4275e+00, -6.3058e-01, 7.2802e-01, 1.9769e-01, - 9.0484e-01, -2.4758e-01, -4.5294e-01, -2.4297e-01, - -5.4630e-01, -6.7723e-02, 1.0947e+00, -1.0740e+00, - -2.8676e-01, -1.4573e+00, -1.2242e+00, -4.3670e-01, - -1.4930e-02, -1.8159e+00, 9.7545e-02, 1.5692e-01, - -2.9113e-01, 2.2439e-01, -1.9719e+00, -1.0255e+00, - 4.6983e-01, 1.0289e+00, 2.0381e-01, 1.1241e+00, - -1.0298e+00, 6.9126e-01, 1.3663e+00, 5.8548e-01, - 2.8719e-01, 1.5788e-01, 6.0533e-01, 7.6163e-01, - 2.6252e-02, 1.1224e+00, 1.0702e+00, 3.6492e-01, - 6.5048e-01, -2.8061e+00, -4.9540e-01, -2.4496e+00, - 1.2840e+00, 5.6031e-01, 1.0611e+00, -1.7643e-01, - -2.9870e-01, 1.8292e+00, -9.7444e-01, 6.4294e-01, - -4.4184e-01, 1.6495e+00, 8.4507e-01, 2.8115e+00, - -4.9231e-01, -2.2196e-01, 1.7085e-03, -2.2362e+00, - 6.9399e-02, 7.0683e-01, -4.6486e-01, 4.9054e-01, - -2.6745e-01, -7.2107e-02, 2.2297e+00, -8.6260e-01, - 4.1607e-01, -6.8105e-01, 3.4527e-01, 1.0790e+00, - -8.9867e-01, -1.1843e-01, 4.2536e-01, -9.1061e-02, - 5.7913e-01, 1.1831e-01, 9.1385e-01, -1.0097e-01, - 5.1024e-01, -7.3128e-01, 4.7572e-01, 5.4390e-01, - -5.2805e-01, -4.3437e-01, -3.2783e-01, 8.2348e-01, - -6.0431e-01, -1.7446e-01, 8.1916e-01, -1.5495e+00, - 1.1487e+00, -6.7907e-02, -1.0785e+00, -6.6431e-01, - 2.7317e+00, 6.9948e-01, -3.9939e-02, -1.2845e+00, - -6.8966e-01, -1.2464e+00, 5.3664e-01, -4.4391e-01, - 2.8727e-01, 1.5617e-01, 3.5918e-01, -8.9393e-01, - -4.2595e-01, 3.0267e-01, 8.7252e-01, 9.3551e-01, - -1.8582e-01, 2.8078e-01, 2.2729e-01, 1.1572e-01, - 1.9407e+00, -3.5872e-01, 2.2790e+00, -1.8550e-01, - 1.1591e+00, -2.2087e+00, -1.2969e+00, -4.2126e-01, - 2.0327e+00, 1.6373e+00, -5.6060e-01, -1.9648e-01, - -2.2031e-01, 2.4133e-01, -9.7083e-01, 5.0378e-01, - -3.6136e-01, 7.3410e-01, -2.3676e-03, -4.5842e-01, - 8.8199e-01, -1.3959e+00, -1.5434e+00, -1.5357e-01, - 1.4332e+00, -1.0869e+00, -7.5001e-01, -2.7957e-01, - -2.2581e-01, -2.2605e-01, 9.8498e-01, 2.3781e-01, - -9.5479e-01, -1.0322e+00, 5.0765e-01, -2.7704e-03, - -1.5530e-01, 1.6642e+00, 8.5327e-01, -7.9324e-01, - -3.9517e-01, -1.1159e+00, -5.1454e-01, 6.8049e-01, - -3.9313e-01, -3.9525e-01, 5.6955e-02, 4.7978e-01, - 1.2852e+00, 2.0234e+00, -1.3945e+00, 5.1137e-01, - 2.0118e+00, -1.0376e+00, -9.7502e-01, 3.8129e-01, - 1.3055e-01, -8.5262e-02, 5.3578e-01, 2.2649e-01, - -4.0689e-01, 2.1538e-01, 9.0363e-01, 3.2339e-01, - -6.5027e-01, 9.7910e-02, 2.7742e-01, -2.2149e+00, - -8.2605e-01, 8.5492e-01, -1.7839e-01, -4.4041e-01, - -1.8058e+00, -2.2123e+00, 5.6262e-01, 1.5871e+00, - 1.7581e+00, -3.8752e-01, 2.3891e-01, -3.1783e-01, - -2.2437e+00, -8.2439e-02, 9.8071e-04, -1.8987e+00, - -2.0817e-01, -8.3570e-02, -1.6037e+00, 2.2163e+00, - -1.4126e+00, -9.9244e-01, -9.0841e-01, -1.1223e+00, - 1.3394e+00, 1.5321e+00, 2.4150e-01, 6.2020e-01, - 7.5968e-01, -1.3688e-01, 7.8251e-01, -3.0338e-01, - 1.7261e+00, 1.3440e+00, 1.1799e+00, 1.2787e+00, - -1.2860e+00, -1.4121e-01, 3.9634e-01, 1.3659e+00, - 1.7980e-01, -1.5247e+00, -6.2077e-01, -4.8023e-01, - 1.9832e-01, 1.8391e-01, -2.1843e+00, -3.6737e+00, - 4.4713e-01, 1.2023e+00, -1.7265e-01, -1.0576e+00, - -7.1539e-01, 5.7103e-01, -9.8519e-01, 1.4610e-01, - 1.9980e-01, -7.5910e-01, -1.1646e-01, 6.9658e-01, - -9.2715e-01, 5.8373e-01, -9.1087e-01, -8.5754e-01, - -2.3282e-01, 1.2754e+00, -9.8724e-01, -2.4305e-01, - 5.0300e-02, 4.6731e-01, -1.4429e+00, -1.3015e+00, - 2.5617e-01, 2.3793e-01, 1.7810e-01, -3.5815e-01, - 2.7962e-02, 1.4784e+00, 1.9412e-01, -2.0901e-01, - -1.8121e-01, 1.3716e+00, 5.6026e-01, 2.5568e+00, - -1.4505e-01, 1.4079e+00, -8.9658e-01, 9.1084e-01]), - size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.8947, 0.0284, 0.0077, ..., 0.6573, 0.1699, 0.6198]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 1000 -Density: 1e-05 -Time: 9.492179870605469 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 424922 -ss 10000 -sd 1e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.511717319488525} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 999, 999, 1000]), - col_indices=tensor([7108, 2220, 704, 4717, 5102, 2170, 5932, 5251, 5529, - 3902, 6102, 6905, 6279, 3643, 9310, 6829, 1180, 3607, - 6587, 1134, 1919, 479, 6501, 8277, 3375, 7202, 3069, - 1170, 6504, 1553, 5984, 204, 4506, 3565, 5764, 4811, - 5011, 8721, 3327, 1434, 6705, 7101, 2604, 8374, 4930, - 467, 3859, 3129, 2811, 5462, 5535, 1210, 7854, 800, - 2832, 8802, 2528, 9725, 3293, 1524, 501, 4374, 845, - 5331, 4616, 5781, 3967, 8494, 2592, 132, 2896, 6970, - 7494, 6070, 1304, 7729, 1651, 2600, 1766, 3537, 928, - 8434, 6493, 7622, 325, 4438, 2164, 3694, 4800, 162, - 4394, 8726, 1546, 6860, 2735, 9497, 7834, 1130, 3531, - 5046, 1451, 4821, 9046, 6886, 3403, 6440, 5905, 505, - 2149, 7243, 9311, 8746, 4756, 2312, 8837, 8885, 3196, - 4096, 3853, 5671, 1756, 2320, 3087, 3499, 4220, 883, - 2423, 6275, 7322, 8487, 7157, 50, 8087, 4426, 7346, - 8369, 948, 4030, 9024, 5101, 3298, 1287, 6551, 5394, - 4195, 1444, 9216, 2448, 1685, 747, 6093, 1098, 2255, - 5136, 2693, 7477, 2833, 6388, 524, 7868, 6532, 4649, - 4095, 7953, 6844, 2980, 7317, 9765, 254, 3356, 9746, - 1260, 1679, 4888, 1282, 5761, 9971, 3597, 843, 991, - 7753, 6460, 8121, 8035, 2627, 5074, 8950, 8865, 901, - 6473, 3823, 54, 4022, 1737, 1587, 3026, 3415, 7311, - 9836, 7719, 8470, 7167, 8040, 6645, 1982, 7476, 1454, - 2111, 1328, 7301, 2182, 6945, 5890, 7032, 8350, 2394, - 498, 4791, 3797, 2189, 2418, 2140, 1024, 7447, 6252, - 3649, 9694, 5920, 434, 268, 9032, 1728, 7146, 3176, - 2804, 8852, 9193, 466, 1700, 9969, 5209, 5700, 6361, - 7035, 4764, 7695, 5288, 1863, 5050, 3389, 2003, 2176, - 8480, 7211, 1968, 892, 9473, 6842, 4655, 6630, 2156, - 8482, 1011, 4467, 2778, 5507, 4025, 4507, 168, 2998, - 9663, 6903, 6132, 951, 7987, 8316, 9690, 539, 5211, - 4919, 4293, 3580, 4335, 6561, 4438, 8053, 6255, 7660, - 9896, 400, 7120, 1642, 58, 3687, 2069, 9295, 7664, - 6885, 1418, 3520, 6406, 4175, 1574, 8905, 2549, 3173, - 6264, 5964, 6686, 2660, 3023, 7713, 9056, 2880, 3833, - 3848, 4364, 3412, 7713, 2112, 4280, 9873, 1520, 2652, - 2091, 247, 8489, 470, 1353, 3141, 2949, 8417, 3599, - 2312, 9750, 7946, 2868, 7191, 6638, 3145, 1191, 4226, - 3508, 7416, 2811, 7827, 5492, 5692, 8264, 1955, 7103, - 5689, 9405, 9457, 9608, 7689, 4813, 737, 3303, 9197, - 2692, 5580, 2670, 7134, 444, 925, 8510, 5745, 3719, - 9842, 9475, 9659, 4818, 6295, 3416, 6378, 5567, 7608, - 2184, 1343, 5034, 1245, 1939, 9404, 6095, 3587, 8626, - 6297, 8908, 2020, 2628, 4180, 1484, 456, 4290, 1489, - 8894, 5034, 7659, 2567, 9975, 7766, 8637, 6671, 2460, - 9262, 4868, 2751, 8920, 4246, 3805, 6541, 7892, 4806, - 8053, 9615, 1710, 6807, 594, 2367, 6505, 2601, 7978, - 8431, 8982, 6578, 7503, 153, 4929, 1513, 9756, 6323, - 2184, 8191, 6874, 284, 5184, 3912, 2415, 3657, 473, - 1952, 9812, 1591, 2591, 2622, 1522, 7116, 1588, 8908, - 1558, 5282, 5494, 8131, 4139, 5385, 4982, 9491, 8237, - 3669, 7663, 4841, 6197, 8135, 3214, 289, 2603, 2719, - 4848, 8098, 9552, 4692, 7971, 6435, 1114, 8403, 6544, - 2106, 1434, 8748, 6793, 6162, 6427, 5646, 658, 2724, - 6422, 2779, 8816, 8893, 4064, 6004, 1106, 9168, 3711, - 3339, 3324, 2776, 9908, 288, 3667, 3385, 4656, 9509, - 9325, 7747, 979, 9616, 5935, 7821, 3411, 1823, 9109, - 2907, 8611, 6882, 2442, 6531, 5138, 3482, 6213, 2527, - 600, 2589, 8766, 7230, 451, 9956, 1481, 8058, 6627, - 7201, 7503, 6300, 7159, 3217, 7756, 6336, 851, 1794, - 2662, 9745, 6596, 9557, 8321, 986, 1770, 3997, 5534, - 4202, 9, 7231, 6831, 9932, 9490, 7309, 102, 5877, - 7127, 3576, 7057, 5341, 5432, 1187, 3240, 7899, 3396, - 2072, 2721, 2481, 2800, 7736, 8619, 8664, 8853, 5723, - 6616, 3564, 673, 9638, 8341, 5526, 6241, 6996, 5420, - 6654, 3115, 1298, 253, 4414, 7367, 1534, 3837, 9761, - 428, 8103, 5835, 9754, 6827, 2175, 9964, 7434, 561, - 4279, 6776, 441, 8263, 7355, 5298, 8660, 318, 6950, - 6637, 1488, 8710, 1388, 597, 9561, 241, 134, 3711, - 8361, 9297, 7828, 9783, 7892, 287, 9912, 8079, 2899, - 4022, 7707, 5947, 5287, 165, 2592, 5367, 4023, 2061, - 3338, 7729, 8326, 3033, 9768, 3315, 6006, 7582, 9247, - 623, 8878, 9343, 1795, 3348, 1945, 509, 8049, 9916, - 566, 5212, 3790, 8879, 7213, 2649, 5876, 4653, 3684, - 335, 760, 2959, 4736, 51, 4733, 3016, 9095, 7089, - 7197, 7691, 5784, 6059, 5358, 2754, 883, 667, 6646, - 1252, 6475, 7562, 8120, 8189, 3101, 7819, 4174, 1386, - 3387, 5808, 3886, 8012, 2265, 8917, 2690, 6516, 6697, - 3628, 3103, 4110, 2348, 6901, 5464, 7106, 7725, 4381, - 7112, 8776, 2146, 9479, 5359, 1123, 2067, 8702, 7671, - 8386, 4122, 7930, 2070, 8473, 5177, 2473, 1155, 8901, - 8573, 640, 1510, 5361, 3548, 6713, 5575, 9393, 9190, - 1459, 4051, 6013, 6290, 7629, 4952, 744, 8264, 7897, - 7913, 4026, 1724, 7599, 4403, 1391, 6776, 3625, 3335, - 7715, 9959, 1038, 4939, 5709, 7498, 6307, 2236, 6754, - 2258, 7159, 1819, 880, 260, 5840, 5929, 9703, 5870, - 253, 7795, 7093, 3750, 7126, 1717, 6509, 1049, 7347, - 4935, 1625, 3595, 8750, 8432, 53, 905, 1782, 6761, - 6721, 668, 5994, 3888, 3211, 2987, 6123, 788, 6901, - 2437, 2312, 8212, 556, 130, 4010, 1345, 8029, 1308, - 4503, 4807, 9662, 3462, 2757, 5791, 6927, 5781, 1828, - 6619, 3326, 7024, 3608, 2458, 7979, 7543, 2790, 1612, - 1805, 1787, 8794, 9741, 7993, 3704, 9039, 9254, 2600, - 334, 5416, 7480, 7628, 7663, 6884, 1345, 3500, 2501, - 4789, 8659, 1856, 2232, 3695, 5714, 1114, 1603, 258, - 5532, 1628, 1818, 2160, 1905, 3356, 8059, 3136, 1778, - 1248, 8424, 5567, 2850, 9771, 3211, 6986, 8722, 213, - 9766, 882, 3941, 450, 1389, 8247, 3205, 1238, 3324, - 5793, 69, 6088, 7364, 3886, 8969, 1412, 953, 499, - 6629, 2218, 4915, 6257, 2383, 522, 8716, 8699, 1620, - 7817, 8689, 3567, 9254, 3061, 231, 5744, 426, 6833, - 6764, 4302, 4591, 1217, 4510, 1345, 3201, 3722, 7122, - 1855, 281, 8632, 2023, 8209, 5256, 6947, 1447, 2643, - 8259, 1471, 788, 6604, 601, 2972, 8124, 3977, 4028, - 9614, 6822, 2923, 9821, 3255, 1326, 3441, 6852, 8830, - 9806, 9512, 6229, 216, 6212, 5442, 9243, 5741, 84, - 1873, 4975, 3463, 7615, 3711, 3175, 4868, 6387, 4767, - 8951, 3595, 5399, 8876, 2732, 3369, 7303, 58, 5594, - 5541, 8587, 39, 1065, 5345, 9359, 9831, 757, 7524, - 9889, 5863, 3121, 3966, 7854, 264, 3327, 5017, 1001, - 1158, 6014, 3699, 956, 5178, 210, 3051, 8717, 5009, - 7959]), - values=tensor([-1.1267e+00, 9.9958e-01, -3.9930e-01, 6.4552e-01, - 1.1757e+00, 4.0321e-01, -1.4581e-01, -4.8418e-01, - 1.1093e+00, -1.4493e+00, -2.0397e-01, 1.3948e-01, - -7.2084e-01, -1.4625e+00, 6.3078e-01, 1.4585e+00, - -7.6630e-01, -5.0233e-01, -6.9056e-01, 1.2087e-01, - -1.5290e+00, -3.5878e-01, -1.9362e+00, 5.8830e-01, - -7.9054e-02, 5.9895e-01, -5.1073e-02, 3.6354e-01, - -4.4211e-01, 4.4232e-01, 6.2773e-01, -2.9480e-02, - -1.2139e-01, -9.5736e-01, 1.3368e+00, 1.1286e+00, - -3.2338e+00, -1.7408e+00, -4.8078e-01, -3.5725e-01, - -2.0872e+00, -2.4299e-01, -9.9385e-01, -1.1473e+00, - 1.4434e+00, -1.3116e+00, 1.6035e+00, 5.0371e-01, - -4.2531e-01, 8.1825e-01, -8.5244e-02, 1.3817e+00, - 2.1899e-02, 6.3117e-01, 3.1498e-01, -4.6574e-02, - -1.3576e+00, -3.4294e-01, -9.6518e-01, 2.8513e+00, - -1.7599e+00, 1.3326e+00, -4.4524e-01, 5.1345e-01, - 3.9285e-01, 6.4504e-01, 1.2254e-01, -2.1038e+00, - -2.8071e-01, 1.3569e+00, 4.2690e-01, 3.7668e-01, - 1.6267e+00, -5.4795e-01, 2.0884e-02, -7.1271e-01, - -5.1393e-01, -4.0461e-01, -1.4888e+00, 1.3559e+00, - -8.7715e-01, 5.0365e-01, 5.6218e-01, -1.0504e+00, - 5.4016e-01, 4.3472e-02, 1.1200e+00, -7.1064e-01, - 4.2861e-01, 9.7914e-01, 1.1025e+00, -5.3951e-01, - -1.5139e-01, 7.6996e-01, 1.5436e+00, 3.3483e-01, - -5.9882e-01, -8.5978e-02, -1.8589e+00, -1.4164e+00, - -5.5418e-01, 7.1993e-03, -3.8422e-01, -1.0387e+00, - 1.3088e+00, 1.0944e+00, 5.6801e-01, 1.9888e-01, - -1.2131e+00, 3.0093e-01, 3.5923e-01, -1.1487e-01, - -9.3524e-01, -7.1549e-01, -4.0647e-01, -6.0793e-01, - -9.8182e-01, -3.2825e-01, -1.0166e+00, 5.7350e-01, - -1.3696e+00, 2.8665e-01, 1.7033e+00, -1.0412e+00, - -9.6257e-02, 1.6325e+00, -4.2908e-02, -2.2275e+00, - 1.3168e+00, 1.5920e+00, 2.7449e-01, 2.9711e-01, - -8.7433e-01, 2.0635e-01, 8.7190e-01, -7.0198e-01, - -5.3026e-01, -2.2125e-01, -1.4890e-01, -1.6937e+00, - 2.5218e+00, 3.4151e-01, 1.7715e+00, 9.9308e-01, - -6.1605e-01, 3.4861e-01, -8.6710e-01, 1.1228e+00, - -1.5127e-01, 4.6625e-01, 7.6783e-01, -7.8146e-01, - 2.7244e-01, 7.9931e-01, 7.7499e-01, -3.8151e-02, - 1.6044e-01, -7.6815e-01, 7.4150e-01, -2.0216e+00, - 3.4279e-01, -6.9332e-01, -1.1046e+00, 6.3305e-01, - 9.4908e-01, 7.8525e-01, -3.6906e-01, -8.2064e-01, - 1.1086e+00, 2.5720e-01, 2.0831e-01, -4.0543e-01, - -9.8334e-02, 2.6264e-01, 4.1938e-01, -1.6166e-02, - 3.6583e-02, -5.9790e-01, 1.8471e+00, -6.0387e-01, - 8.7327e-01, -1.2842e+00, 2.8938e-01, 4.5303e-01, - -1.4272e+00, -5.9600e-01, -4.2216e-02, 4.8783e-01, - 9.0039e-01, 2.5473e-01, 8.4128e-02, 2.1929e-01, - -4.8285e-01, -1.3081e-01, 1.9005e-01, -2.5354e-01, - 6.7135e-01, -2.2116e+00, -1.3787e+00, -6.9996e-01, - 1.2621e-01, -2.6132e-01, 1.0058e+00, -6.1614e-01, - -1.0139e+00, -6.4972e-01, -1.2616e-01, 6.7127e-01, - -4.5025e-01, 1.1922e+00, -1.2968e+00, 3.5343e-01, - -1.3843e-01, 2.0874e+00, 6.1470e-01, -3.8680e-01, - -2.8126e-01, -4.0711e-01, 2.3860e+00, -6.5034e-02, - 4.4629e-02, -8.5664e-01, 7.3610e-01, -1.4055e-01, - 1.8281e-02, -1.4813e-01, 1.9067e+00, -2.3178e-01, - -9.5242e-03, 5.0058e-01, 6.8275e-01, -1.6442e+00, - 7.9270e-01, 1.6876e-01, 4.9035e-01, -7.3098e-01, - 1.6857e+00, -1.5385e-02, -1.2309e+00, 2.7868e-02, - -2.5895e-01, -1.3541e+00, -3.1320e-01, 1.3052e+00, - 1.8746e+00, -2.2703e+00, 7.6529e-01, 2.1938e+00, - -8.0849e-01, -2.0685e-01, -1.5279e+00, 2.0311e-01, - 9.1010e-01, 6.5864e-01, -2.4717e-01, 9.3591e-03, - -1.8092e-01, -2.5631e-01, -7.7386e-01, -7.7014e-01, - 1.3536e+00, 1.1345e-01, 8.8867e-01, -6.2931e-01, - -1.0718e+00, 2.5273e-01, -3.6488e-01, 7.1753e-01, - -6.5983e-01, 1.2105e+00, -1.2745e+00, -8.8942e-02, - -1.4474e+00, -1.1304e-01, 2.0534e-01, -1.2666e+00, - -3.8933e-01, 1.6688e+00, 1.7180e-01, -1.7145e+00, - -1.0401e+00, 3.9994e-01, -6.3626e-01, 1.5008e+00, - -6.9937e-01, -2.1368e-03, -3.9993e-01, -3.4445e-01, - -6.1780e-01, 9.9404e-01, 9.7140e-01, 1.2518e+00, - 7.1295e-01, -6.4379e-01, -1.4742e+00, -1.1242e-01, - -2.4549e-01, 5.7863e-01, -1.9438e-01, 8.6387e-01, - -2.9757e-01, 2.0117e+00, -1.4603e+00, -8.9796e-01, - 5.4261e-01, 3.5468e-01, -2.6611e-01, 8.0542e-01, - -1.6641e+00, -1.2072e+00, -4.0468e-01, -2.2144e-01, - -1.3792e+00, -1.4653e+00, 6.7146e-02, -4.4393e-01, - 7.3877e-01, -6.2024e-03, -1.0859e+00, 1.0258e+00, - 1.3175e+00, 1.2756e+00, -1.1664e+00, 6.0869e-01, - 1.0807e+00, -1.2743e+00, 1.7831e-01, -2.1641e+00, - -1.1412e-01, -1.8029e+00, -7.9972e-02, -3.1443e-01, - 1.3506e+00, 2.1213e+00, -1.9520e+00, -1.9620e+00, - 2.2530e-01, 4.3830e-01, 8.6959e-01, 3.5598e-01, - 4.7856e-01, -7.5741e-01, 2.3955e-01, -7.1372e-01, - -4.5864e-01, -8.5716e-01, 1.1229e+00, 3.6050e-01, - -3.4282e-01, 2.8908e-01, 1.9974e+00, -1.2157e-01, - -9.2857e-01, 3.6541e-01, -2.1761e-01, 6.4334e-02, - -3.4179e-01, -4.6368e-02, -1.7049e+00, -6.1027e-01, - -2.0144e-01, 3.0683e-01, 2.1303e-01, 3.6512e-01, - -8.4555e-01, -7.0046e-01, -1.0592e+00, 3.5977e-01, - -1.7323e+00, -7.4612e-01, 1.0045e+00, 5.3813e-02, - -1.9158e-02, 3.2793e-01, 1.5522e-01, -1.9235e+00, - -3.8306e-01, 9.6631e-01, 1.0257e+00, -6.3863e-01, - -3.0958e-01, -2.9434e-01, -1.1155e-01, 4.2824e-01, - -2.0360e+00, 5.5342e-02, 2.5492e+00, 1.7885e+00, - 4.0732e-01, -5.3381e-03, 8.9006e-01, 1.9158e+00, - 3.1895e-01, 5.2801e-01, -4.7398e-01, 6.1251e-01, - -3.5335e-01, 1.4366e+00, -8.9446e-01, 7.4810e-01, - -9.2632e-02, 1.2390e-01, -7.3278e-01, 2.4728e-01, - 5.3944e-01, -1.9828e+00, 1.2280e+00, -5.2395e-01, - 6.7825e-01, -7.5624e-01, 2.3092e-01, -2.3055e-01, - 5.7026e-01, 9.3626e-02, 6.1179e-01, -1.0464e-01, - -7.4954e-01, -8.1160e-01, 7.2279e-01, 1.0875e+00, - 1.4650e-01, -3.6989e-01, 1.4935e+00, -5.1846e-01, - -1.9360e+00, -3.1383e-02, 1.2420e+00, -2.4575e-02, - 8.0621e-01, 5.4210e-01, -5.3231e-01, 1.4048e+00, - 1.3481e+00, 1.0432e+00, 3.6835e-01, -1.2238e+00, - -4.4686e-02, -1.1742e+00, -7.7776e-03, 1.5963e-01, - 5.3408e-01, -1.3289e+00, -8.0803e-01, -7.9219e-01, - -4.8870e-01, -2.6218e+00, 1.6701e+00, 4.2614e-05, - 4.5417e-01, -3.1126e-01, 1.6318e-01, -5.1281e-01, - 2.7290e-01, 2.3852e-01, 2.8414e-01, 5.7907e-02, - 1.2328e+00, 8.9690e-01, 6.5984e-01, -1.3531e+00, - 1.3553e+00, -6.7554e-01, 2.8924e+00, -3.8556e-01, - 9.6318e-01, -1.3858e+00, -3.9751e-01, -9.8241e-02, - 1.9035e-01, 2.2944e-01, -7.5765e-01, 5.6757e-01, - 1.0232e+00, -3.4510e-01, -9.3572e-01, -1.0157e+00, - -1.4187e+00, 1.2851e-01, -2.2670e-01, 4.6081e-01, - 1.1325e+00, 1.3678e+00, 5.7692e-01, -2.3623e+00, - 1.4895e+00, 2.0900e-01, 2.4488e+00, 5.2715e-02, - -1.5091e+00, -6.1269e-01, 2.0397e+00, -1.6557e+00, - 9.3604e-01, 3.8774e-01, -1.1719e+00, 1.7493e+00, - -9.0466e-01, -8.8712e-01, -1.1755e+00, 4.1774e-01, - 2.0647e+00, 3.6203e-01, 7.1944e-02, -1.6484e+00, - -1.3147e+00, 1.2646e+00, -1.9325e-01, -1.3378e+00, - -1.2077e+00, 7.2163e-01, -1.0747e+00, 8.2830e-01, - 1.1765e+00, -1.1117e+00, -1.1088e+00, 1.2445e+00, - 1.2034e+00, 1.9564e+00, 4.1252e-01, -2.7859e-01, - 7.4551e-01, 5.6915e-01, 8.5772e-01, -4.2962e-03, - -1.1617e+00, 3.2091e-01, 6.1092e-01, 5.6982e-01, - -8.1123e-01, -1.3074e+00, -3.6988e-02, 1.0890e+00, - -3.3022e-01, 5.7173e-01, 1.2934e+00, 6.4620e-01, - 8.0755e-02, 2.2641e-01, 2.9551e-01, 1.0243e+00, - -2.5140e-01, -1.1377e-01, -1.5102e+00, -7.2232e-01, - 3.9729e-01, 3.0677e-01, 4.3487e-01, 2.4759e+00, - 4.8101e-01, -2.0766e-01, 1.0500e+00, 7.4032e-01, - 6.9707e-02, 2.2844e+00, -1.3175e+00, 1.2178e+00, - 4.0456e-01, -2.4854e-01, -1.5566e+00, -1.6633e-01, - -3.2689e-01, -2.1630e+00, 2.1144e-01, -2.1187e-01, - 1.0553e+00, -4.7398e-01, 5.5106e-01, 6.6889e-01, - 3.6349e-01, -3.2650e-01, 3.6386e-01, -1.0836e-04, - -6.3491e-01, 7.2262e-01, -5.0944e-01, -4.2257e-01, - 2.8635e-02, 4.9464e-01, 1.3431e-01, -4.3234e-01, - 8.7360e-01, -1.6526e+00, -1.6500e+00, -4.5130e-01, - -9.5255e-01, -1.4026e-01, -9.5895e-01, 9.4986e-01, - -3.4725e-01, -2.6373e-01, -5.6352e-01, 9.8595e-01, - -7.8576e-01, -8.5672e-01, -8.4422e-01, 1.2291e+00, - -3.7749e-01, -8.3716e-01, -8.0912e-01, 2.2822e+00, - 4.6359e-01, 1.4793e+00, -6.8537e-01, -1.5239e-01, - -7.1455e-01, 3.7699e-01, 7.5125e-01, -2.1301e+00, - 1.1240e+00, 8.0201e-01, -2.3065e+00, 1.1086e+00, - 1.7327e-01, -2.4006e+00, 5.8133e-01, 1.5551e+00, - -1.0745e+00, -1.0167e+00, 4.8847e-01, -1.4299e+00, - -6.1642e-01, -9.1207e-02, -2.1746e+00, -8.1222e-01, - 3.2331e-01, 2.6657e-01, -6.5705e-02, -1.1855e+00, - 1.6791e+00, -1.1067e+00, -2.1613e-01, -5.4110e-01, - -1.4790e+00, 3.8794e-01, 1.3764e+00, 1.3263e+00, - -2.3916e-01, 2.2738e-01, 3.6634e-01, -8.9084e-01, - -1.2323e-01, -1.3621e+00, -2.8386e-01, -1.4290e+00, - -7.9432e-01, -5.5912e-01, -1.1564e+00, 4.3015e-01, - -1.4916e+00, 1.1167e+00, 1.1563e-01, -5.0441e-01, - -2.9505e-01, 8.4223e-01, 1.0062e+00, 1.4742e+00, - -3.6713e-01, 3.1320e-01, -1.0665e+00, 5.8598e-01, - 1.3278e+00, -9.7937e-01, 2.7199e-01, -2.5446e-01, - 4.9309e-01, -1.2864e+00, 3.1660e-01, 2.4644e+00, - -1.5332e+00, -5.0134e-01, 5.7625e-02, -5.5293e-01, - 3.1004e-01, 5.4177e-01, -2.7351e-01, 4.2005e-01, - 2.4091e-01, -1.2441e+00, -1.0709e+00, 1.7822e+00, - 8.1190e-01, 6.6988e-01, 1.3869e+00, -1.0551e+00, - -3.9592e-01, -3.9359e-01, 3.1190e-01, -2.0555e+00, - 1.0543e+00, 4.8794e-01, -2.5062e-01, 1.1599e+00, - -7.8921e-01, -7.3898e-01, -2.2051e-01, 1.0343e+00, - 9.9336e-01, 1.4121e+00, -1.0680e+00, 1.2419e+00, - 4.9369e-02, 1.2102e+00, 1.1111e+00, 1.1182e+00, - -3.4679e-01, -7.5134e-01, 5.8000e-01, -8.7335e-01, - -8.6145e-01, -6.6027e-01, 9.8669e-01, 1.3038e+00, - -2.0355e+00, 7.6677e-01, -7.3597e-01, -2.1282e-01, - 1.7098e-01, 6.5113e-01, -3.7585e-01, 4.3648e-01, - -3.3096e-01, 1.0013e+00, 3.0245e-01, 8.3589e-02, - 1.6531e+00, -3.9269e-01, 2.2536e+00, -1.6756e+00, - -6.2681e-01, -1.1492e+00, 9.5103e-01, -1.8672e+00, - -4.8700e-01, -1.1202e+00, 4.3629e-01, -1.8129e+00, - 1.5488e+00, -8.6519e-01, -1.1055e-01, -1.2084e+00, - 1.0274e+00, 1.1153e+00, -8.6412e-01, -1.4593e+00, - 1.0950e+00, -1.2805e+00, -5.4514e-01, 9.0279e-02, - 7.6631e-02, 8.9327e-01, 1.6325e+00, 5.4458e-01, - -4.4955e-01, -6.7540e-01, -2.0631e-01, 5.2594e-01, - 6.3364e-02, 7.5805e-01, 1.5051e+00, 3.1998e-01, - -1.3501e+00, 2.2329e-02, -8.5761e-01, 9.1674e-02, - -9.0289e-02, -2.3372e+00, 1.9521e-01, -7.4988e-01, - 2.1691e-01, 6.7169e-01, 4.7404e-01, 1.3214e+00, - -7.4827e-01, -9.5198e-01, 6.8075e-01, 1.1011e+00, - 1.4847e+00, 3.0317e-01, 1.8905e+00, 2.0706e+00, - -4.2235e-02, -3.1811e-01, 6.3626e-01, -1.4095e-01, - 4.0645e-01, 4.7106e-01, 8.5588e-01, 4.1556e-01, - -9.0174e-01, -6.9849e-01, -2.8260e-01, 1.3868e+00, - -8.9030e-01, -7.8324e-01, 8.3402e-01, 1.0210e+00, - 2.3830e-01, 2.1314e+00, 1.4930e-01, -1.5145e-01, - 3.0541e-01, -1.2231e+00, 8.0039e-02, -1.7829e-01, - 1.0454e+00, 2.3048e-01, 8.0772e-02, -7.5490e-01, - 7.6959e-01, 2.9459e-01, 1.9243e+00, 1.9807e-01, - -2.3998e+00, -4.6317e-01, 3.9033e-01, 2.1506e-02, - 1.0376e-01, -1.7278e+00, -1.2808e+00, 1.1586e+00, - -1.0938e+00, 1.1228e+00, 1.0289e+00, 1.4500e+00, - -3.3856e-01, 2.3863e-02, -9.6556e-02, 2.7553e-01, - 4.9822e-01, 1.3587e+00, 6.3876e-01, -7.3612e-01, - -1.2484e+00, 4.9202e-01, -8.7324e-01, 4.7128e-01, - -1.6325e-01, 8.4262e-01, 6.2810e-01, 9.1072e-02, - -6.8368e-01, 5.1082e-01, 4.9404e-02, 4.8629e-01, - -2.4629e+00, 2.7725e-01, -5.0478e-02, 1.0547e-01, - -1.3535e+00, 1.3360e+00, 2.5663e-01, -8.4745e-01, - 4.0864e-01, 9.3690e-01, 9.2998e-01, 6.0752e-01, - 1.6514e+00, -1.4550e+00, -8.5073e-01, -3.3732e-01, - -1.1255e+00, -5.8642e-01, -1.5153e+00, 2.7172e-01, - 1.6280e-01, 3.5330e-02, -4.0081e-01, 4.1629e-01, - -6.0387e-01, -1.5833e-01, -1.0447e-01, -6.7319e-01, - 1.0655e+00, -5.2951e-01, 3.5344e-01, -5.1418e-01, - 2.9477e-01, -2.3069e+00, 2.9380e-01, -9.6063e-02, - -3.7041e-01, -1.1153e+00, 7.8732e-01, 9.1250e-01, - -1.2550e+00, -1.2419e+00, 8.9287e-01, -1.1626e+00, - 1.8944e+00, -9.3486e-01, 1.7889e+00, 1.4593e-02, - 1.1929e+00, 4.6717e-01, 2.5026e-01, -7.1843e-01, - 6.7783e-01, -7.1813e-01, 4.0677e-01, -3.8997e-01, - -4.0408e-01, -7.8754e-01, 1.1204e+00, -1.0734e+00, - 1.3668e+00, -9.9504e-01, 4.1913e-01, -3.3019e-01, - -1.2084e+00, 6.7457e-01, -5.9583e-02, -9.1102e-01, - 1.1890e+00, 4.1728e-01, -1.4197e+00, 1.7318e-01, - -8.6549e-01, -9.2566e-03, 4.5611e-01, -3.1321e-01, - -7.0098e-01, -1.2817e+00, 2.3514e-01, -6.9337e-01, - -1.8220e-01, -1.3402e+00, 2.5257e-01, -6.5032e-02, - 7.9468e-02, 2.0058e+00, -8.6056e-01, -2.4988e+00, - 2.0434e-03, -6.4709e-01, -2.6952e-01, 2.5683e+00, - -5.1225e-01, -1.4214e+00, -2.7738e-01, -2.4940e-01, - -6.3407e-01, 5.6016e-01, -1.1201e+00, 1.1526e-01, - 3.9586e-01, -1.2663e+00, 1.2897e-01, -1.2513e+00, - -6.6935e-01, -1.8699e-01, 1.1745e+00, 5.7926e-02, - -4.8682e-01, -5.8512e-02, 3.8640e-01, 2.4479e+00, - -7.7442e-01, -6.6656e-01, -9.3995e-01, 8.7680e-01, - 1.0151e-01, -4.5578e-01, -1.0889e+00, 2.7776e-01, - 4.8723e-01, -1.3868e+00, 1.3061e-01, 9.1692e-01, - 1.2626e+00, -9.0738e-01, -7.4655e-01, 1.3039e+00, - -1.0467e+00, 1.0425e+00, -3.0889e-01, -7.0529e-01, - -9.2769e-01, 5.0140e-01, -7.9478e-01, -5.1331e-01, - 8.8456e-01, 8.2267e-01, 6.2436e-01, -1.2896e+00, - 3.1233e-02, 1.5354e+00, -1.0748e+00, -3.1208e-01, - -4.3868e-02, -5.7822e-01, -1.7090e+00, 2.4373e-01, - -3.4570e-01, 2.1444e+00, 2.3934e-02, -6.5855e-01, - 9.3654e-01, 2.1205e+00, 2.8175e-01, -1.9388e+00]), - size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.7786, 0.1109, 0.6260, ..., 0.7374, 0.7261, 0.0829]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 1000 -Density: 1e-05 -Time: 10.511717319488525 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 999, 999, 1000]), - col_indices=tensor([7108, 2220, 704, 4717, 5102, 2170, 5932, 5251, 5529, - 3902, 6102, 6905, 6279, 3643, 9310, 6829, 1180, 3607, - 6587, 1134, 1919, 479, 6501, 8277, 3375, 7202, 3069, - 1170, 6504, 1553, 5984, 204, 4506, 3565, 5764, 4811, - 5011, 8721, 3327, 1434, 6705, 7101, 2604, 8374, 4930, - 467, 3859, 3129, 2811, 5462, 5535, 1210, 7854, 800, - 2832, 8802, 2528, 9725, 3293, 1524, 501, 4374, 845, - 5331, 4616, 5781, 3967, 8494, 2592, 132, 2896, 6970, - 7494, 6070, 1304, 7729, 1651, 2600, 1766, 3537, 928, - 8434, 6493, 7622, 325, 4438, 2164, 3694, 4800, 162, - 4394, 8726, 1546, 6860, 2735, 9497, 7834, 1130, 3531, - 5046, 1451, 4821, 9046, 6886, 3403, 6440, 5905, 505, - 2149, 7243, 9311, 8746, 4756, 2312, 8837, 8885, 3196, - 4096, 3853, 5671, 1756, 2320, 3087, 3499, 4220, 883, - 2423, 6275, 7322, 8487, 7157, 50, 8087, 4426, 7346, - 8369, 948, 4030, 9024, 5101, 3298, 1287, 6551, 5394, - 4195, 1444, 9216, 2448, 1685, 747, 6093, 1098, 2255, - 5136, 2693, 7477, 2833, 6388, 524, 7868, 6532, 4649, - 4095, 7953, 6844, 2980, 7317, 9765, 254, 3356, 9746, - 1260, 1679, 4888, 1282, 5761, 9971, 3597, 843, 991, - 7753, 6460, 8121, 8035, 2627, 5074, 8950, 8865, 901, - 6473, 3823, 54, 4022, 1737, 1587, 3026, 3415, 7311, - 9836, 7719, 8470, 7167, 8040, 6645, 1982, 7476, 1454, - 2111, 1328, 7301, 2182, 6945, 5890, 7032, 8350, 2394, - 498, 4791, 3797, 2189, 2418, 2140, 1024, 7447, 6252, - 3649, 9694, 5920, 434, 268, 9032, 1728, 7146, 3176, - 2804, 8852, 9193, 466, 1700, 9969, 5209, 5700, 6361, - 7035, 4764, 7695, 5288, 1863, 5050, 3389, 2003, 2176, - 8480, 7211, 1968, 892, 9473, 6842, 4655, 6630, 2156, - 8482, 1011, 4467, 2778, 5507, 4025, 4507, 168, 2998, - 9663, 6903, 6132, 951, 7987, 8316, 9690, 539, 5211, - 4919, 4293, 3580, 4335, 6561, 4438, 8053, 6255, 7660, - 9896, 400, 7120, 1642, 58, 3687, 2069, 9295, 7664, - 6885, 1418, 3520, 6406, 4175, 1574, 8905, 2549, 3173, - 6264, 5964, 6686, 2660, 3023, 7713, 9056, 2880, 3833, - 3848, 4364, 3412, 7713, 2112, 4280, 9873, 1520, 2652, - 2091, 247, 8489, 470, 1353, 3141, 2949, 8417, 3599, - 2312, 9750, 7946, 2868, 7191, 6638, 3145, 1191, 4226, - 3508, 7416, 2811, 7827, 5492, 5692, 8264, 1955, 7103, - 5689, 9405, 9457, 9608, 7689, 4813, 737, 3303, 9197, - 2692, 5580, 2670, 7134, 444, 925, 8510, 5745, 3719, - 9842, 9475, 9659, 4818, 6295, 3416, 6378, 5567, 7608, - 2184, 1343, 5034, 1245, 1939, 9404, 6095, 3587, 8626, - 6297, 8908, 2020, 2628, 4180, 1484, 456, 4290, 1489, - 8894, 5034, 7659, 2567, 9975, 7766, 8637, 6671, 2460, - 9262, 4868, 2751, 8920, 4246, 3805, 6541, 7892, 4806, - 8053, 9615, 1710, 6807, 594, 2367, 6505, 2601, 7978, - 8431, 8982, 6578, 7503, 153, 4929, 1513, 9756, 6323, - 2184, 8191, 6874, 284, 5184, 3912, 2415, 3657, 473, - 1952, 9812, 1591, 2591, 2622, 1522, 7116, 1588, 8908, - 1558, 5282, 5494, 8131, 4139, 5385, 4982, 9491, 8237, - 3669, 7663, 4841, 6197, 8135, 3214, 289, 2603, 2719, - 4848, 8098, 9552, 4692, 7971, 6435, 1114, 8403, 6544, - 2106, 1434, 8748, 6793, 6162, 6427, 5646, 658, 2724, - 6422, 2779, 8816, 8893, 4064, 6004, 1106, 9168, 3711, - 3339, 3324, 2776, 9908, 288, 3667, 3385, 4656, 9509, - 9325, 7747, 979, 9616, 5935, 7821, 3411, 1823, 9109, - 2907, 8611, 6882, 2442, 6531, 5138, 3482, 6213, 2527, - 600, 2589, 8766, 7230, 451, 9956, 1481, 8058, 6627, - 7201, 7503, 6300, 7159, 3217, 7756, 6336, 851, 1794, - 2662, 9745, 6596, 9557, 8321, 986, 1770, 3997, 5534, - 4202, 9, 7231, 6831, 9932, 9490, 7309, 102, 5877, - 7127, 3576, 7057, 5341, 5432, 1187, 3240, 7899, 3396, - 2072, 2721, 2481, 2800, 7736, 8619, 8664, 8853, 5723, - 6616, 3564, 673, 9638, 8341, 5526, 6241, 6996, 5420, - 6654, 3115, 1298, 253, 4414, 7367, 1534, 3837, 9761, - 428, 8103, 5835, 9754, 6827, 2175, 9964, 7434, 561, - 4279, 6776, 441, 8263, 7355, 5298, 8660, 318, 6950, - 6637, 1488, 8710, 1388, 597, 9561, 241, 134, 3711, - 8361, 9297, 7828, 9783, 7892, 287, 9912, 8079, 2899, - 4022, 7707, 5947, 5287, 165, 2592, 5367, 4023, 2061, - 3338, 7729, 8326, 3033, 9768, 3315, 6006, 7582, 9247, - 623, 8878, 9343, 1795, 3348, 1945, 509, 8049, 9916, - 566, 5212, 3790, 8879, 7213, 2649, 5876, 4653, 3684, - 335, 760, 2959, 4736, 51, 4733, 3016, 9095, 7089, - 7197, 7691, 5784, 6059, 5358, 2754, 883, 667, 6646, - 1252, 6475, 7562, 8120, 8189, 3101, 7819, 4174, 1386, - 3387, 5808, 3886, 8012, 2265, 8917, 2690, 6516, 6697, - 3628, 3103, 4110, 2348, 6901, 5464, 7106, 7725, 4381, - 7112, 8776, 2146, 9479, 5359, 1123, 2067, 8702, 7671, - 8386, 4122, 7930, 2070, 8473, 5177, 2473, 1155, 8901, - 8573, 640, 1510, 5361, 3548, 6713, 5575, 9393, 9190, - 1459, 4051, 6013, 6290, 7629, 4952, 744, 8264, 7897, - 7913, 4026, 1724, 7599, 4403, 1391, 6776, 3625, 3335, - 7715, 9959, 1038, 4939, 5709, 7498, 6307, 2236, 6754, - 2258, 7159, 1819, 880, 260, 5840, 5929, 9703, 5870, - 253, 7795, 7093, 3750, 7126, 1717, 6509, 1049, 7347, - 4935, 1625, 3595, 8750, 8432, 53, 905, 1782, 6761, - 6721, 668, 5994, 3888, 3211, 2987, 6123, 788, 6901, - 2437, 2312, 8212, 556, 130, 4010, 1345, 8029, 1308, - 4503, 4807, 9662, 3462, 2757, 5791, 6927, 5781, 1828, - 6619, 3326, 7024, 3608, 2458, 7979, 7543, 2790, 1612, - 1805, 1787, 8794, 9741, 7993, 3704, 9039, 9254, 2600, - 334, 5416, 7480, 7628, 7663, 6884, 1345, 3500, 2501, - 4789, 8659, 1856, 2232, 3695, 5714, 1114, 1603, 258, - 5532, 1628, 1818, 2160, 1905, 3356, 8059, 3136, 1778, - 1248, 8424, 5567, 2850, 9771, 3211, 6986, 8722, 213, - 9766, 882, 3941, 450, 1389, 8247, 3205, 1238, 3324, - 5793, 69, 6088, 7364, 3886, 8969, 1412, 953, 499, - 6629, 2218, 4915, 6257, 2383, 522, 8716, 8699, 1620, - 7817, 8689, 3567, 9254, 3061, 231, 5744, 426, 6833, - 6764, 4302, 4591, 1217, 4510, 1345, 3201, 3722, 7122, - 1855, 281, 8632, 2023, 8209, 5256, 6947, 1447, 2643, - 8259, 1471, 788, 6604, 601, 2972, 8124, 3977, 4028, - 9614, 6822, 2923, 9821, 3255, 1326, 3441, 6852, 8830, - 9806, 9512, 6229, 216, 6212, 5442, 9243, 5741, 84, - 1873, 4975, 3463, 7615, 3711, 3175, 4868, 6387, 4767, - 8951, 3595, 5399, 8876, 2732, 3369, 7303, 58, 5594, - 5541, 8587, 39, 1065, 5345, 9359, 9831, 757, 7524, - 9889, 5863, 3121, 3966, 7854, 264, 3327, 5017, 1001, - 1158, 6014, 3699, 956, 5178, 210, 3051, 8717, 5009, - 7959]), - values=tensor([-1.1267e+00, 9.9958e-01, -3.9930e-01, 6.4552e-01, - 1.1757e+00, 4.0321e-01, -1.4581e-01, -4.8418e-01, - 1.1093e+00, -1.4493e+00, -2.0397e-01, 1.3948e-01, - -7.2084e-01, -1.4625e+00, 6.3078e-01, 1.4585e+00, - -7.6630e-01, -5.0233e-01, -6.9056e-01, 1.2087e-01, - -1.5290e+00, -3.5878e-01, -1.9362e+00, 5.8830e-01, - -7.9054e-02, 5.9895e-01, -5.1073e-02, 3.6354e-01, - -4.4211e-01, 4.4232e-01, 6.2773e-01, -2.9480e-02, - -1.2139e-01, -9.5736e-01, 1.3368e+00, 1.1286e+00, - -3.2338e+00, -1.7408e+00, -4.8078e-01, -3.5725e-01, - -2.0872e+00, -2.4299e-01, -9.9385e-01, -1.1473e+00, - 1.4434e+00, -1.3116e+00, 1.6035e+00, 5.0371e-01, - -4.2531e-01, 8.1825e-01, -8.5244e-02, 1.3817e+00, - 2.1899e-02, 6.3117e-01, 3.1498e-01, -4.6574e-02, - -1.3576e+00, -3.4294e-01, -9.6518e-01, 2.8513e+00, - -1.7599e+00, 1.3326e+00, -4.4524e-01, 5.1345e-01, - 3.9285e-01, 6.4504e-01, 1.2254e-01, -2.1038e+00, - -2.8071e-01, 1.3569e+00, 4.2690e-01, 3.7668e-01, - 1.6267e+00, -5.4795e-01, 2.0884e-02, -7.1271e-01, - -5.1393e-01, -4.0461e-01, -1.4888e+00, 1.3559e+00, - -8.7715e-01, 5.0365e-01, 5.6218e-01, -1.0504e+00, - 5.4016e-01, 4.3472e-02, 1.1200e+00, -7.1064e-01, - 4.2861e-01, 9.7914e-01, 1.1025e+00, -5.3951e-01, - -1.5139e-01, 7.6996e-01, 1.5436e+00, 3.3483e-01, - -5.9882e-01, -8.5978e-02, -1.8589e+00, -1.4164e+00, - -5.5418e-01, 7.1993e-03, -3.8422e-01, -1.0387e+00, - 1.3088e+00, 1.0944e+00, 5.6801e-01, 1.9888e-01, - -1.2131e+00, 3.0093e-01, 3.5923e-01, -1.1487e-01, - -9.3524e-01, -7.1549e-01, -4.0647e-01, -6.0793e-01, - -9.8182e-01, -3.2825e-01, -1.0166e+00, 5.7350e-01, - -1.3696e+00, 2.8665e-01, 1.7033e+00, -1.0412e+00, - -9.6257e-02, 1.6325e+00, -4.2908e-02, -2.2275e+00, - 1.3168e+00, 1.5920e+00, 2.7449e-01, 2.9711e-01, - -8.7433e-01, 2.0635e-01, 8.7190e-01, -7.0198e-01, - -5.3026e-01, -2.2125e-01, -1.4890e-01, -1.6937e+00, - 2.5218e+00, 3.4151e-01, 1.7715e+00, 9.9308e-01, - -6.1605e-01, 3.4861e-01, -8.6710e-01, 1.1228e+00, - -1.5127e-01, 4.6625e-01, 7.6783e-01, -7.8146e-01, - 2.7244e-01, 7.9931e-01, 7.7499e-01, -3.8151e-02, - 1.6044e-01, -7.6815e-01, 7.4150e-01, -2.0216e+00, - 3.4279e-01, -6.9332e-01, -1.1046e+00, 6.3305e-01, - 9.4908e-01, 7.8525e-01, -3.6906e-01, -8.2064e-01, - 1.1086e+00, 2.5720e-01, 2.0831e-01, -4.0543e-01, - -9.8334e-02, 2.6264e-01, 4.1938e-01, -1.6166e-02, - 3.6583e-02, -5.9790e-01, 1.8471e+00, -6.0387e-01, - 8.7327e-01, -1.2842e+00, 2.8938e-01, 4.5303e-01, - -1.4272e+00, -5.9600e-01, -4.2216e-02, 4.8783e-01, - 9.0039e-01, 2.5473e-01, 8.4128e-02, 2.1929e-01, - -4.8285e-01, -1.3081e-01, 1.9005e-01, -2.5354e-01, - 6.7135e-01, -2.2116e+00, -1.3787e+00, -6.9996e-01, - 1.2621e-01, -2.6132e-01, 1.0058e+00, -6.1614e-01, - -1.0139e+00, -6.4972e-01, -1.2616e-01, 6.7127e-01, - -4.5025e-01, 1.1922e+00, -1.2968e+00, 3.5343e-01, - -1.3843e-01, 2.0874e+00, 6.1470e-01, -3.8680e-01, - -2.8126e-01, -4.0711e-01, 2.3860e+00, -6.5034e-02, - 4.4629e-02, -8.5664e-01, 7.3610e-01, -1.4055e-01, - 1.8281e-02, -1.4813e-01, 1.9067e+00, -2.3178e-01, - -9.5242e-03, 5.0058e-01, 6.8275e-01, -1.6442e+00, - 7.9270e-01, 1.6876e-01, 4.9035e-01, -7.3098e-01, - 1.6857e+00, -1.5385e-02, -1.2309e+00, 2.7868e-02, - -2.5895e-01, -1.3541e+00, -3.1320e-01, 1.3052e+00, - 1.8746e+00, -2.2703e+00, 7.6529e-01, 2.1938e+00, - -8.0849e-01, -2.0685e-01, -1.5279e+00, 2.0311e-01, - 9.1010e-01, 6.5864e-01, -2.4717e-01, 9.3591e-03, - -1.8092e-01, -2.5631e-01, -7.7386e-01, -7.7014e-01, - 1.3536e+00, 1.1345e-01, 8.8867e-01, -6.2931e-01, - -1.0718e+00, 2.5273e-01, -3.6488e-01, 7.1753e-01, - -6.5983e-01, 1.2105e+00, -1.2745e+00, -8.8942e-02, - -1.4474e+00, -1.1304e-01, 2.0534e-01, -1.2666e+00, - -3.8933e-01, 1.6688e+00, 1.7180e-01, -1.7145e+00, - -1.0401e+00, 3.9994e-01, -6.3626e-01, 1.5008e+00, - -6.9937e-01, -2.1368e-03, -3.9993e-01, -3.4445e-01, - -6.1780e-01, 9.9404e-01, 9.7140e-01, 1.2518e+00, - 7.1295e-01, -6.4379e-01, -1.4742e+00, -1.1242e-01, - -2.4549e-01, 5.7863e-01, -1.9438e-01, 8.6387e-01, - -2.9757e-01, 2.0117e+00, -1.4603e+00, -8.9796e-01, - 5.4261e-01, 3.5468e-01, -2.6611e-01, 8.0542e-01, - -1.6641e+00, -1.2072e+00, -4.0468e-01, -2.2144e-01, - -1.3792e+00, -1.4653e+00, 6.7146e-02, -4.4393e-01, - 7.3877e-01, -6.2024e-03, -1.0859e+00, 1.0258e+00, - 1.3175e+00, 1.2756e+00, -1.1664e+00, 6.0869e-01, - 1.0807e+00, -1.2743e+00, 1.7831e-01, -2.1641e+00, - -1.1412e-01, -1.8029e+00, -7.9972e-02, -3.1443e-01, - 1.3506e+00, 2.1213e+00, -1.9520e+00, -1.9620e+00, - 2.2530e-01, 4.3830e-01, 8.6959e-01, 3.5598e-01, - 4.7856e-01, -7.5741e-01, 2.3955e-01, -7.1372e-01, - -4.5864e-01, -8.5716e-01, 1.1229e+00, 3.6050e-01, - -3.4282e-01, 2.8908e-01, 1.9974e+00, -1.2157e-01, - -9.2857e-01, 3.6541e-01, -2.1761e-01, 6.4334e-02, - -3.4179e-01, -4.6368e-02, -1.7049e+00, -6.1027e-01, - -2.0144e-01, 3.0683e-01, 2.1303e-01, 3.6512e-01, - -8.4555e-01, -7.0046e-01, -1.0592e+00, 3.5977e-01, - -1.7323e+00, -7.4612e-01, 1.0045e+00, 5.3813e-02, - -1.9158e-02, 3.2793e-01, 1.5522e-01, -1.9235e+00, - -3.8306e-01, 9.6631e-01, 1.0257e+00, -6.3863e-01, - -3.0958e-01, -2.9434e-01, -1.1155e-01, 4.2824e-01, - -2.0360e+00, 5.5342e-02, 2.5492e+00, 1.7885e+00, - 4.0732e-01, -5.3381e-03, 8.9006e-01, 1.9158e+00, - 3.1895e-01, 5.2801e-01, -4.7398e-01, 6.1251e-01, - -3.5335e-01, 1.4366e+00, -8.9446e-01, 7.4810e-01, - -9.2632e-02, 1.2390e-01, -7.3278e-01, 2.4728e-01, - 5.3944e-01, -1.9828e+00, 1.2280e+00, -5.2395e-01, - 6.7825e-01, -7.5624e-01, 2.3092e-01, -2.3055e-01, - 5.7026e-01, 9.3626e-02, 6.1179e-01, -1.0464e-01, - -7.4954e-01, -8.1160e-01, 7.2279e-01, 1.0875e+00, - 1.4650e-01, -3.6989e-01, 1.4935e+00, -5.1846e-01, - -1.9360e+00, -3.1383e-02, 1.2420e+00, -2.4575e-02, - 8.0621e-01, 5.4210e-01, -5.3231e-01, 1.4048e+00, - 1.3481e+00, 1.0432e+00, 3.6835e-01, -1.2238e+00, - -4.4686e-02, -1.1742e+00, -7.7776e-03, 1.5963e-01, - 5.3408e-01, -1.3289e+00, -8.0803e-01, -7.9219e-01, - -4.8870e-01, -2.6218e+00, 1.6701e+00, 4.2614e-05, - 4.5417e-01, -3.1126e-01, 1.6318e-01, -5.1281e-01, - 2.7290e-01, 2.3852e-01, 2.8414e-01, 5.7907e-02, - 1.2328e+00, 8.9690e-01, 6.5984e-01, -1.3531e+00, - 1.3553e+00, -6.7554e-01, 2.8924e+00, -3.8556e-01, - 9.6318e-01, -1.3858e+00, -3.9751e-01, -9.8241e-02, - 1.9035e-01, 2.2944e-01, -7.5765e-01, 5.6757e-01, - 1.0232e+00, -3.4510e-01, -9.3572e-01, -1.0157e+00, - -1.4187e+00, 1.2851e-01, -2.2670e-01, 4.6081e-01, - 1.1325e+00, 1.3678e+00, 5.7692e-01, -2.3623e+00, - 1.4895e+00, 2.0900e-01, 2.4488e+00, 5.2715e-02, - -1.5091e+00, -6.1269e-01, 2.0397e+00, -1.6557e+00, - 9.3604e-01, 3.8774e-01, -1.1719e+00, 1.7493e+00, - -9.0466e-01, -8.8712e-01, -1.1755e+00, 4.1774e-01, - 2.0647e+00, 3.6203e-01, 7.1944e-02, -1.6484e+00, - -1.3147e+00, 1.2646e+00, -1.9325e-01, -1.3378e+00, - -1.2077e+00, 7.2163e-01, -1.0747e+00, 8.2830e-01, - 1.1765e+00, -1.1117e+00, -1.1088e+00, 1.2445e+00, - 1.2034e+00, 1.9564e+00, 4.1252e-01, -2.7859e-01, - 7.4551e-01, 5.6915e-01, 8.5772e-01, -4.2962e-03, - -1.1617e+00, 3.2091e-01, 6.1092e-01, 5.6982e-01, - -8.1123e-01, -1.3074e+00, -3.6988e-02, 1.0890e+00, - -3.3022e-01, 5.7173e-01, 1.2934e+00, 6.4620e-01, - 8.0755e-02, 2.2641e-01, 2.9551e-01, 1.0243e+00, - -2.5140e-01, -1.1377e-01, -1.5102e+00, -7.2232e-01, - 3.9729e-01, 3.0677e-01, 4.3487e-01, 2.4759e+00, - 4.8101e-01, -2.0766e-01, 1.0500e+00, 7.4032e-01, - 6.9707e-02, 2.2844e+00, -1.3175e+00, 1.2178e+00, - 4.0456e-01, -2.4854e-01, -1.5566e+00, -1.6633e-01, - -3.2689e-01, -2.1630e+00, 2.1144e-01, -2.1187e-01, - 1.0553e+00, -4.7398e-01, 5.5106e-01, 6.6889e-01, - 3.6349e-01, -3.2650e-01, 3.6386e-01, -1.0836e-04, - -6.3491e-01, 7.2262e-01, -5.0944e-01, -4.2257e-01, - 2.8635e-02, 4.9464e-01, 1.3431e-01, -4.3234e-01, - 8.7360e-01, -1.6526e+00, -1.6500e+00, -4.5130e-01, - -9.5255e-01, -1.4026e-01, -9.5895e-01, 9.4986e-01, - -3.4725e-01, -2.6373e-01, -5.6352e-01, 9.8595e-01, - -7.8576e-01, -8.5672e-01, -8.4422e-01, 1.2291e+00, - -3.7749e-01, -8.3716e-01, -8.0912e-01, 2.2822e+00, - 4.6359e-01, 1.4793e+00, -6.8537e-01, -1.5239e-01, - -7.1455e-01, 3.7699e-01, 7.5125e-01, -2.1301e+00, - 1.1240e+00, 8.0201e-01, -2.3065e+00, 1.1086e+00, - 1.7327e-01, -2.4006e+00, 5.8133e-01, 1.5551e+00, - -1.0745e+00, -1.0167e+00, 4.8847e-01, -1.4299e+00, - -6.1642e-01, -9.1207e-02, -2.1746e+00, -8.1222e-01, - 3.2331e-01, 2.6657e-01, -6.5705e-02, -1.1855e+00, - 1.6791e+00, -1.1067e+00, -2.1613e-01, -5.4110e-01, - -1.4790e+00, 3.8794e-01, 1.3764e+00, 1.3263e+00, - -2.3916e-01, 2.2738e-01, 3.6634e-01, -8.9084e-01, - -1.2323e-01, -1.3621e+00, -2.8386e-01, -1.4290e+00, - -7.9432e-01, -5.5912e-01, -1.1564e+00, 4.3015e-01, - -1.4916e+00, 1.1167e+00, 1.1563e-01, -5.0441e-01, - -2.9505e-01, 8.4223e-01, 1.0062e+00, 1.4742e+00, - -3.6713e-01, 3.1320e-01, -1.0665e+00, 5.8598e-01, - 1.3278e+00, -9.7937e-01, 2.7199e-01, -2.5446e-01, - 4.9309e-01, -1.2864e+00, 3.1660e-01, 2.4644e+00, - -1.5332e+00, -5.0134e-01, 5.7625e-02, -5.5293e-01, - 3.1004e-01, 5.4177e-01, -2.7351e-01, 4.2005e-01, - 2.4091e-01, -1.2441e+00, -1.0709e+00, 1.7822e+00, - 8.1190e-01, 6.6988e-01, 1.3869e+00, -1.0551e+00, - -3.9592e-01, -3.9359e-01, 3.1190e-01, -2.0555e+00, - 1.0543e+00, 4.8794e-01, -2.5062e-01, 1.1599e+00, - -7.8921e-01, -7.3898e-01, -2.2051e-01, 1.0343e+00, - 9.9336e-01, 1.4121e+00, -1.0680e+00, 1.2419e+00, - 4.9369e-02, 1.2102e+00, 1.1111e+00, 1.1182e+00, - -3.4679e-01, -7.5134e-01, 5.8000e-01, -8.7335e-01, - -8.6145e-01, -6.6027e-01, 9.8669e-01, 1.3038e+00, - -2.0355e+00, 7.6677e-01, -7.3597e-01, -2.1282e-01, - 1.7098e-01, 6.5113e-01, -3.7585e-01, 4.3648e-01, - -3.3096e-01, 1.0013e+00, 3.0245e-01, 8.3589e-02, - 1.6531e+00, -3.9269e-01, 2.2536e+00, -1.6756e+00, - -6.2681e-01, -1.1492e+00, 9.5103e-01, -1.8672e+00, - -4.8700e-01, -1.1202e+00, 4.3629e-01, -1.8129e+00, - 1.5488e+00, -8.6519e-01, -1.1055e-01, -1.2084e+00, - 1.0274e+00, 1.1153e+00, -8.6412e-01, -1.4593e+00, - 1.0950e+00, -1.2805e+00, -5.4514e-01, 9.0279e-02, - 7.6631e-02, 8.9327e-01, 1.6325e+00, 5.4458e-01, - -4.4955e-01, -6.7540e-01, -2.0631e-01, 5.2594e-01, - 6.3364e-02, 7.5805e-01, 1.5051e+00, 3.1998e-01, - -1.3501e+00, 2.2329e-02, -8.5761e-01, 9.1674e-02, - -9.0289e-02, -2.3372e+00, 1.9521e-01, -7.4988e-01, - 2.1691e-01, 6.7169e-01, 4.7404e-01, 1.3214e+00, - -7.4827e-01, -9.5198e-01, 6.8075e-01, 1.1011e+00, - 1.4847e+00, 3.0317e-01, 1.8905e+00, 2.0706e+00, - -4.2235e-02, -3.1811e-01, 6.3626e-01, -1.4095e-01, - 4.0645e-01, 4.7106e-01, 8.5588e-01, 4.1556e-01, - -9.0174e-01, -6.9849e-01, -2.8260e-01, 1.3868e+00, - -8.9030e-01, -7.8324e-01, 8.3402e-01, 1.0210e+00, - 2.3830e-01, 2.1314e+00, 1.4930e-01, -1.5145e-01, - 3.0541e-01, -1.2231e+00, 8.0039e-02, -1.7829e-01, - 1.0454e+00, 2.3048e-01, 8.0772e-02, -7.5490e-01, - 7.6959e-01, 2.9459e-01, 1.9243e+00, 1.9807e-01, - -2.3998e+00, -4.6317e-01, 3.9033e-01, 2.1506e-02, - 1.0376e-01, -1.7278e+00, -1.2808e+00, 1.1586e+00, - -1.0938e+00, 1.1228e+00, 1.0289e+00, 1.4500e+00, - -3.3856e-01, 2.3863e-02, -9.6556e-02, 2.7553e-01, - 4.9822e-01, 1.3587e+00, 6.3876e-01, -7.3612e-01, - -1.2484e+00, 4.9202e-01, -8.7324e-01, 4.7128e-01, - -1.6325e-01, 8.4262e-01, 6.2810e-01, 9.1072e-02, - -6.8368e-01, 5.1082e-01, 4.9404e-02, 4.8629e-01, - -2.4629e+00, 2.7725e-01, -5.0478e-02, 1.0547e-01, - -1.3535e+00, 1.3360e+00, 2.5663e-01, -8.4745e-01, - 4.0864e-01, 9.3690e-01, 9.2998e-01, 6.0752e-01, - 1.6514e+00, -1.4550e+00, -8.5073e-01, -3.3732e-01, - -1.1255e+00, -5.8642e-01, -1.5153e+00, 2.7172e-01, - 1.6280e-01, 3.5330e-02, -4.0081e-01, 4.1629e-01, - -6.0387e-01, -1.5833e-01, -1.0447e-01, -6.7319e-01, - 1.0655e+00, -5.2951e-01, 3.5344e-01, -5.1418e-01, - 2.9477e-01, -2.3069e+00, 2.9380e-01, -9.6063e-02, - -3.7041e-01, -1.1153e+00, 7.8732e-01, 9.1250e-01, - -1.2550e+00, -1.2419e+00, 8.9287e-01, -1.1626e+00, - 1.8944e+00, -9.3486e-01, 1.7889e+00, 1.4593e-02, - 1.1929e+00, 4.6717e-01, 2.5026e-01, -7.1843e-01, - 6.7783e-01, -7.1813e-01, 4.0677e-01, -3.8997e-01, - -4.0408e-01, -7.8754e-01, 1.1204e+00, -1.0734e+00, - 1.3668e+00, -9.9504e-01, 4.1913e-01, -3.3019e-01, - -1.2084e+00, 6.7457e-01, -5.9583e-02, -9.1102e-01, - 1.1890e+00, 4.1728e-01, -1.4197e+00, 1.7318e-01, - -8.6549e-01, -9.2566e-03, 4.5611e-01, -3.1321e-01, - -7.0098e-01, -1.2817e+00, 2.3514e-01, -6.9337e-01, - -1.8220e-01, -1.3402e+00, 2.5257e-01, -6.5032e-02, - 7.9468e-02, 2.0058e+00, -8.6056e-01, -2.4988e+00, - 2.0434e-03, -6.4709e-01, -2.6952e-01, 2.5683e+00, - -5.1225e-01, -1.4214e+00, -2.7738e-01, -2.4940e-01, - -6.3407e-01, 5.6016e-01, -1.1201e+00, 1.1526e-01, - 3.9586e-01, -1.2663e+00, 1.2897e-01, -1.2513e+00, - -6.6935e-01, -1.8699e-01, 1.1745e+00, 5.7926e-02, - -4.8682e-01, -5.8512e-02, 3.8640e-01, 2.4479e+00, - -7.7442e-01, -6.6656e-01, -9.3995e-01, 8.7680e-01, - 1.0151e-01, -4.5578e-01, -1.0889e+00, 2.7776e-01, - 4.8723e-01, -1.3868e+00, 1.3061e-01, 9.1692e-01, - 1.2626e+00, -9.0738e-01, -7.4655e-01, 1.3039e+00, - -1.0467e+00, 1.0425e+00, -3.0889e-01, -7.0529e-01, - -9.2769e-01, 5.0140e-01, -7.9478e-01, -5.1331e-01, - 8.8456e-01, 8.2267e-01, 6.2436e-01, -1.2896e+00, - 3.1233e-02, 1.5354e+00, -1.0748e+00, -3.1208e-01, - -4.3868e-02, -5.7822e-01, -1.7090e+00, 2.4373e-01, - -3.4570e-01, 2.1444e+00, 2.3934e-02, -6.5855e-01, - 9.3654e-01, 2.1205e+00, 2.8175e-01, -1.9388e+00]), - size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.7786, 0.1109, 0.6260, ..., 0.7374, 0.7261, 0.0829]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 1000 -Density: 1e-05 -Time: 10.511717319488525 seconds - -[20.56, 20.36, 20.2, 20.2, 20.16, 20.16, 20.16, 20.16, 20.12, 20.32] -[20.4, 20.64, 20.64, 22.84, 24.64, 25.92, 26.88, 27.0, 24.84, 23.96, 24.0, 23.64, 23.48] -10.642189741134644 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 424922, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 1e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.511717319488525, 'TIME_S_1KI': 0.024737992665685764, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 302.01861512184144, 'W': 28.37936763657448} -[20.56, 20.36, 20.2, 20.2, 20.16, 20.16, 20.16, 20.16, 20.12, 20.32, 20.2, 20.16, 20.28, 20.44, 20.88, 20.92, 20.84, 20.84, 20.64, 20.24] -367.18 -18.359 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 424922, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 1e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.511717319488525, 'TIME_S_1KI': 0.024737992665685764, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 302.01861512184144, 'W': 28.37936763657448, 'J_1KI': 0.7107624814009194, 'W_1KI': 0.0667872400971813, 'W_D': 10.020367636574477, 'J_D': 106.6386536643505, 'W_D_1KI': 0.023581663544308077, 'J_D_1KI': 5.549645239434079e-05} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_10000_2e-05.json b/pytorch/output_1core_after_test/altra_10_10_10_10000_2e-05.json deleted file mode 100644 index 82084f3..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_10000_2e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 362139, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 2000, "MATRIX_DENSITY": 2e-05, "TIME_S": 10.370163679122925, "TIME_S_1KI": 0.028635865452555302, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 306.58433523178104, "W": 28.67356603561499, "J_1KI": 0.8465929801313337, "W_1KI": 0.07917834322073841, "W_D": 10.23556603561499, "J_D": 109.4410163302422, "W_D_1KI": 0.0282641914723766, "J_D_1KI": 7.804790832353488e-05} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_10000_2e-05.output b/pytorch/output_1core_after_test/altra_10_10_10_10000_2e-05.output deleted file mode 100644 index b13a048..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_10000_2e-05.output +++ /dev/null @@ -1,85 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 10000 -sd 2e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 2000, "MATRIX_DENSITY": 2e-05, "TIME_S": 0.032781124114990234} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 2000, 2000, 2000]), - col_indices=tensor([6871, 3733, 5965, ..., 5141, 3011, 301]), - values=tensor([-0.4304, -1.2708, -0.2632, ..., 0.7868, 3.1604, - 1.2152]), size=(10000, 10000), nnz=2000, - layout=torch.sparse_csr) -tensor([0.4678, 0.6370, 0.2747, ..., 0.6866, 0.0324, 0.0834]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 2000 -Density: 2e-05 -Time: 0.032781124114990234 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 320306 -ss 10000 -sd 2e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 2000, "MATRIX_DENSITY": 2e-05, "TIME_S": 9.287068843841553} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 1999, 1999, 2000]), - col_indices=tensor([3694, 6091, 5443, ..., 5395, 436, 2041]), - values=tensor([-0.9064, 1.9159, -0.4201, ..., -1.3373, 0.3655, - -0.2885]), size=(10000, 10000), nnz=2000, - layout=torch.sparse_csr) -tensor([0.4015, 0.1742, 0.4171, ..., 0.9425, 0.5446, 0.5222]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 2000 -Density: 2e-05 -Time: 9.287068843841553 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 362139 -ss 10000 -sd 2e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 2000, "MATRIX_DENSITY": 2e-05, "TIME_S": 10.370163679122925} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 2000, 2000, 2000]), - col_indices=tensor([7968, 3420, 6634, ..., 1670, 1798, 8896]), - values=tensor([ 0.3730, 1.3738, -1.4562, ..., -0.4679, -0.5220, - -3.1368]), size=(10000, 10000), nnz=2000, - layout=torch.sparse_csr) -tensor([0.3918, 0.4219, 0.1314, ..., 0.5461, 0.2473, 0.0750]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 2000 -Density: 2e-05 -Time: 10.370163679122925 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 2000, 2000, 2000]), - col_indices=tensor([7968, 3420, 6634, ..., 1670, 1798, 8896]), - values=tensor([ 0.3730, 1.3738, -1.4562, ..., -0.4679, -0.5220, - -3.1368]), size=(10000, 10000), nnz=2000, - layout=torch.sparse_csr) -tensor([0.3918, 0.4219, 0.1314, ..., 0.5461, 0.2473, 0.0750]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 2000 -Density: 2e-05 -Time: 10.370163679122925 seconds - -[20.08, 20.28, 20.48, 20.6, 20.52, 20.52, 20.52, 20.48, 20.56, 20.56] -[20.68, 20.6, 20.64, 24.08, 25.96, 27.16, 27.88, 28.12, 24.28, 23.44, 23.32, 23.2, 23.12] -10.69222903251648 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 362139, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 2e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 2000, 'MATRIX_DENSITY': 2e-05, 'TIME_S': 10.370163679122925, 'TIME_S_1KI': 0.028635865452555302, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 306.58433523178104, 'W': 28.67356603561499} -[20.08, 20.28, 20.48, 20.6, 20.52, 20.52, 20.52, 20.48, 20.56, 20.56, 20.12, 20.04, 20.32, 20.64, 20.56, 20.68, 20.68, 20.6, 20.64, 20.52] -368.76 -18.438 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 362139, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 2e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 2000, 'MATRIX_DENSITY': 2e-05, 'TIME_S': 10.370163679122925, 'TIME_S_1KI': 0.028635865452555302, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 306.58433523178104, 'W': 28.67356603561499, 'J_1KI': 0.8465929801313337, 'W_1KI': 0.07917834322073841, 'W_D': 10.23556603561499, 'J_D': 109.4410163302422, 'W_D_1KI': 0.0282641914723766, 'J_D_1KI': 7.804790832353488e-05} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_10000_5e-05.json b/pytorch/output_1core_after_test/altra_10_10_10_10000_5e-05.json deleted file mode 100644 index ae56efa..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_10000_5e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 236282, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.53740906715393, "TIME_S_1KI": 0.04459674908437346, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 324.57128691673273, "W": 29.735476961651, "J_1KI": 1.3736606551355275, "W_1KI": 0.1258474067497778, "W_D": 11.572476961650999, "J_D": 126.31691582083694, "W_D_1KI": 0.0489773954920434, "J_D_1KI": 0.00020728365043483382} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_10000_5e-05.output b/pytorch/output_1core_after_test/altra_10_10_10_10000_5e-05.output deleted file mode 100644 index a4d24b8..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_10000_5e-05.output +++ /dev/null @@ -1,85 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 10000 -sd 5e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.04942727088928223} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 5000, 5000, 5000]), - col_indices=tensor([1168, 6226, 690, ..., 5217, 476, 6738]), - values=tensor([ 0.0787, 0.3273, -0.2779, ..., 1.4194, 0.8103, - 1.1550]), size=(10000, 10000), nnz=5000, - layout=torch.sparse_csr) -tensor([0.0064, 0.2511, 0.1525, ..., 0.6286, 0.9165, 0.4351]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 5000 -Density: 5e-05 -Time: 0.04942727088928223 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 212433 -ss 10000 -sd 5e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 9.440152168273926} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 4999, 4999, 5000]), - col_indices=tensor([9942, 1214, 2847, ..., 8254, 7960, 457]), - values=tensor([-0.9693, 1.4816, 0.2851, ..., 2.1213, -0.2351, - 0.2580]), size=(10000, 10000), nnz=5000, - layout=torch.sparse_csr) -tensor([0.7203, 0.5491, 0.5318, ..., 0.5029, 0.8608, 0.3592]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 5000 -Density: 5e-05 -Time: 9.440152168273926 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 236282 -ss 10000 -sd 5e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.53740906715393} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 3, ..., 5000, 5000, 5000]), - col_indices=tensor([5447, 7579, 9073, ..., 5210, 7678, 9855]), - values=tensor([-2.4915, -1.5336, 2.5123, ..., -0.4713, 0.8329, - -0.6699]), size=(10000, 10000), nnz=5000, - layout=torch.sparse_csr) -tensor([0.8488, 0.5259, 0.1601, ..., 0.9858, 0.5655, 0.9639]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 5000 -Density: 5e-05 -Time: 10.53740906715393 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 3, ..., 5000, 5000, 5000]), - col_indices=tensor([5447, 7579, 9073, ..., 5210, 7678, 9855]), - values=tensor([-2.4915, -1.5336, 2.5123, ..., -0.4713, 0.8329, - -0.6699]), size=(10000, 10000), nnz=5000, - layout=torch.sparse_csr) -tensor([0.8488, 0.5259, 0.1601, ..., 0.9858, 0.5655, 0.9639]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 5000 -Density: 5e-05 -Time: 10.53740906715393 seconds - -[20.44, 20.28, 20.44, 20.48, 20.6, 20.4, 20.24, 20.2, 19.8, 19.92] -[19.92, 20.2, 21.36, 22.96, 22.96, 24.68, 25.4, 25.96, 24.48, 23.6, 23.48, 23.36, 23.28, 23.24] -10.915287733078003 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 236282, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 5e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.53740906715393, 'TIME_S_1KI': 0.04459674908437346, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 324.57128691673273, 'W': 29.735476961651} -[20.44, 20.28, 20.44, 20.48, 20.6, 20.4, 20.24, 20.2, 19.8, 19.92, 20.2, 20.0, 19.96, 20.04, 20.28, 20.16, 20.08, 20.0, 20.0, 20.04] -363.26 -18.163 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 236282, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 5e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.53740906715393, 'TIME_S_1KI': 0.04459674908437346, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 324.57128691673273, 'W': 29.735476961651, 'J_1KI': 1.3736606551355275, 'W_1KI': 0.1258474067497778, 'W_D': 11.572476961650999, 'J_D': 126.31691582083694, 'W_D_1KI': 0.0489773954920434, 'J_D_1KI': 0.00020728365043483382} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_10000_8e-05.json b/pytorch/output_1core_after_test/altra_10_10_10_10000_8e-05.json deleted file mode 100644 index 3b4f9a7..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_10000_8e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 185363, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 8000, "MATRIX_DENSITY": 8e-05, "TIME_S": 10.317452430725098, "TIME_S_1KI": 0.055660797628033096, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 282.4831217098236, "W": 27.119152879595266, "J_1KI": 1.5239455647018207, "W_1KI": 0.1463029454615822, "W_D": 8.652152879595263, "J_D": 90.12402289223665, "W_D_1KI": 0.046676806480232105, "J_D_1KI": 0.0002518129641850429} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_10000_8e-05.output b/pytorch/output_1core_after_test/altra_10_10_10_10000_8e-05.output deleted file mode 100644 index e5a816f..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_10000_8e-05.output +++ /dev/null @@ -1,84 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 10000 -sd 8e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 8000, "MATRIX_DENSITY": 8e-05, "TIME_S": 0.060555219650268555} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 7999, 8000, 8000]), - col_indices=tensor([9977, 5306, 1222, ..., 6555, 7712, 2915]), - values=tensor([0.7927, 2.3954, 0.9167, ..., 1.0032, 0.5486, 0.7967]), - size=(10000, 10000), nnz=8000, layout=torch.sparse_csr) -tensor([0.2944, 0.3792, 0.7257, ..., 0.8753, 0.2073, 0.0871]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 8000 -Density: 8e-05 -Time: 0.060555219650268555 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 173395 -ss 10000 -sd 8e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 8000, "MATRIX_DENSITY": 8e-05, "TIME_S": 9.82205057144165} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 3, ..., 7999, 7999, 8000]), - col_indices=tensor([5642, 5987, 9672, ..., 9618, 963, 3909]), - values=tensor([ 2.0260, 0.0167, -1.0249, ..., -0.9431, 1.2350, - -0.3906]), size=(10000, 10000), nnz=8000, - layout=torch.sparse_csr) -tensor([0.6617, 0.5997, 0.7114, ..., 0.4730, 0.1362, 0.1168]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 8000 -Density: 8e-05 -Time: 9.82205057144165 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 185363 -ss 10000 -sd 8e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 8000, "MATRIX_DENSITY": 8e-05, "TIME_S": 10.317452430725098} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 7999, 7999, 8000]), - col_indices=tensor([8903, 3321, 7408, ..., 5922, 9897, 4802]), - values=tensor([ 0.5381, -0.2046, 1.4195, ..., -1.2433, 1.3727, - 0.7226]), size=(10000, 10000), nnz=8000, - layout=torch.sparse_csr) -tensor([0.3619, 0.5493, 0.6101, ..., 0.1612, 0.4763, 0.2515]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 8000 -Density: 8e-05 -Time: 10.317452430725098 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 7999, 7999, 8000]), - col_indices=tensor([8903, 3321, 7408, ..., 5922, 9897, 4802]), - values=tensor([ 0.5381, -0.2046, 1.4195, ..., -1.2433, 1.3727, - 0.7226]), size=(10000, 10000), nnz=8000, - layout=torch.sparse_csr) -tensor([0.3619, 0.5493, 0.6101, ..., 0.1612, 0.4763, 0.2515]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 8000 -Density: 8e-05 -Time: 10.317452430725098 seconds - -[20.48, 20.32, 20.48, 20.4, 20.4, 20.48, 20.36, 20.36, 20.2, 20.12] -[19.8, 19.92, 20.64, 20.64, 22.16, 23.8, 24.56, 24.84, 24.4, 24.04, 23.36, 23.0, 23.16] -10.416369676589966 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 185363, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 8e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 8000, 'MATRIX_DENSITY': 8e-05, 'TIME_S': 10.317452430725098, 'TIME_S_1KI': 0.055660797628033096, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 282.4831217098236, 'W': 27.119152879595266} -[20.48, 20.32, 20.48, 20.4, 20.4, 20.48, 20.36, 20.36, 20.2, 20.12, 20.36, 20.48, 20.72, 20.76, 20.72, 20.64, 20.68, 20.68, 20.68, 21.0] -369.34000000000003 -18.467000000000002 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 185363, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 8e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 8000, 'MATRIX_DENSITY': 8e-05, 'TIME_S': 10.317452430725098, 'TIME_S_1KI': 0.055660797628033096, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 282.4831217098236, 'W': 27.119152879595266, 'J_1KI': 1.5239455647018207, 'W_1KI': 0.1463029454615822, 'W_D': 8.652152879595263, 'J_D': 90.12402289223665, 'W_D_1KI': 0.046676806480232105, 'J_D_1KI': 0.0002518129641850429} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_150000_0.0001.json b/pytorch/output_1core_after_test/altra_10_10_10_150000_0.0001.json deleted file mode 100644 index 212c1d3..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_150000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1834, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 2249895, "MATRIX_DENSITY": 9.999533333333333e-05, "TIME_S": 10.790888786315918, "TIME_S_1KI": 5.88379977443616, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 367.17931696891776, "W": 32.98158336912441, "J_1KI": 200.2068249557894, "W_1KI": 17.983415141289207, "W_D": 14.585583369124407, "J_D": 162.37924295902246, "W_D_1KI": 7.952880790144169, "J_D_1KI": 4.336358118944476} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_150000_0.0001.output b/pytorch/output_1core_after_test/altra_10_10_10_150000_0.0001.output deleted file mode 100644 index 839422b..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_150000_0.0001.output +++ /dev/null @@ -1,71 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 150000 -sd 0.0001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 2249891, "MATRIX_DENSITY": 9.999515555555556e-05, "TIME_S": 5.724584579467773} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 16, 33, ..., 2249859, - 2249877, 2249891]), - col_indices=tensor([ 15294, 19172, 20091, ..., 131171, 142636, - 143029]), - values=tensor([ 1.3380, -0.3137, -0.5749, ..., 0.3744, 0.3646, - -2.0781]), size=(150000, 150000), nnz=2249891, - layout=torch.sparse_csr) -tensor([0.3273, 0.5078, 0.8205, ..., 0.0347, 0.2426, 0.7008]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 2249891 -Density: 9.999515555555556e-05 -Time: 5.724584579467773 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1834 -ss 150000 -sd 0.0001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 2249895, "MATRIX_DENSITY": 9.999533333333333e-05, "TIME_S": 10.790888786315918} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 20, 31, ..., 2249870, - 2249883, 2249895]), - col_indices=tensor([ 7356, 13304, 13563, ..., 98372, 126446, - 139883]), - values=tensor([-1.7231, -0.1071, 2.0159, ..., -1.1470, -0.3672, - 1.5446]), size=(150000, 150000), nnz=2249895, - layout=torch.sparse_csr) -tensor([0.4200, 0.0608, 0.0953, ..., 0.0975, 0.4627, 0.0936]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 2249895 -Density: 9.999533333333333e-05 -Time: 10.790888786315918 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 20, 31, ..., 2249870, - 2249883, 2249895]), - col_indices=tensor([ 7356, 13304, 13563, ..., 98372, 126446, - 139883]), - values=tensor([-1.7231, -0.1071, 2.0159, ..., -1.1470, -0.3672, - 1.5446]), size=(150000, 150000), nnz=2249895, - layout=torch.sparse_csr) -tensor([0.4200, 0.0608, 0.0953, ..., 0.0975, 0.4627, 0.0936]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 2249895 -Density: 9.999533333333333e-05 -Time: 10.790888786315918 seconds - -[19.96, 19.88, 20.12, 20.16, 20.32, 20.32, 20.32, 20.52, 20.6, 20.48] -[20.52, 20.44, 21.04, 22.2, 24.32, 26.88, 29.28, 31.0, 32.16, 32.32, 32.32, 32.24, 32.24, 32.36] -11.132858991622925 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1834, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 0.0001, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [150000, 150000], 'MATRIX_ROWS': 150000, 'MATRIX_SIZE': 22500000000, 'MATRIX_NNZ': 2249895, 'MATRIX_DENSITY': 9.999533333333333e-05, 'TIME_S': 10.790888786315918, 'TIME_S_1KI': 5.88379977443616, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 367.17931696891776, 'W': 32.98158336912441} -[19.96, 19.88, 20.12, 20.16, 20.32, 20.32, 20.32, 20.52, 20.6, 20.48, 20.56, 20.68, 20.56, 20.76, 20.92, 20.72, 20.64, 20.48, 20.32, 20.2] -367.92 -18.396 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1834, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 0.0001, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [150000, 150000], 'MATRIX_ROWS': 150000, 'MATRIX_SIZE': 22500000000, 'MATRIX_NNZ': 2249895, 'MATRIX_DENSITY': 9.999533333333333e-05, 'TIME_S': 10.790888786315918, 'TIME_S_1KI': 5.88379977443616, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 367.17931696891776, 'W': 32.98158336912441, 'J_1KI': 200.2068249557894, 'W_1KI': 17.983415141289207, 'W_D': 14.585583369124407, 'J_D': 162.37924295902246, 'W_D_1KI': 7.952880790144169, 'J_D_1KI': 4.336358118944476} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_150000_1e-05.json b/pytorch/output_1core_after_test/altra_10_10_10_150000_1e-05.json deleted file mode 100644 index 230714c..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_150000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 7234, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 225000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.380627632141113, "TIME_S_1KI": 1.4349775548992416, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 386.428590927124, "W": 35.67289032159632, "J_1KI": 53.41838414806801, "W_1KI": 4.931281493170627, "W_D": 17.146890321596317, "J_D": 185.74465388202657, "W_D_1KI": 2.3703193698640193, "J_D_1KI": 0.32766372267957133} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_150000_1e-05.output b/pytorch/output_1core_after_test/altra_10_10_10_150000_1e-05.output deleted file mode 100644 index d6ae063..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_150000_1e-05.output +++ /dev/null @@ -1,69 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 150000 -sd 1e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 224999, "MATRIX_DENSITY": 9.999955555555555e-06, "TIME_S": 1.4513022899627686} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 7, ..., 224997, 224998, - 224999]), - col_indices=tensor([ 1060, 41338, 58835, ..., 108277, 73571, - 97514]), - values=tensor([-0.6349, 1.6645, 0.3729, ..., 0.0544, 1.2271, - 0.2155]), size=(150000, 150000), nnz=224999, - layout=torch.sparse_csr) -tensor([0.9990, 0.4988, 0.7512, ..., 0.3421, 0.7043, 0.0135]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 224999 -Density: 9.999955555555555e-06 -Time: 1.4513022899627686 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 7234 -ss 150000 -sd 1e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 225000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.380627632141113} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 225000, 225000, - 225000]), - col_indices=tensor([97170, 89495, 1274, ..., 10485, 28306, 74671]), - values=tensor([-1.0399, 1.1242, -1.2641, ..., 2.0193, 0.6877, - -0.5401]), size=(150000, 150000), nnz=225000, - layout=torch.sparse_csr) -tensor([0.5775, 0.5693, 0.4176, ..., 0.7548, 0.5748, 0.0697]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 225000 -Density: 1e-05 -Time: 10.380627632141113 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 225000, 225000, - 225000]), - col_indices=tensor([97170, 89495, 1274, ..., 10485, 28306, 74671]), - values=tensor([-1.0399, 1.1242, -1.2641, ..., 2.0193, 0.6877, - -0.5401]), size=(150000, 150000), nnz=225000, - layout=torch.sparse_csr) -tensor([0.5775, 0.5693, 0.4176, ..., 0.7548, 0.5748, 0.0697]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 225000 -Density: 1e-05 -Time: 10.380627632141113 seconds - -[20.28, 20.4, 20.4, 20.44, 20.32, 20.76, 20.76, 20.8, 21.04, 20.88] -[20.4, 20.28, 22.2, 22.8, 25.44, 27.92, 27.92, 30.48, 30.52, 31.6, 31.28, 31.64, 31.72, 31.84] -10.832556247711182 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 7234, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 1e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [150000, 150000], 'MATRIX_ROWS': 150000, 'MATRIX_SIZE': 22500000000, 'MATRIX_NNZ': 225000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.380627632141113, 'TIME_S_1KI': 1.4349775548992416, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 386.428590927124, 'W': 35.67289032159632} -[20.28, 20.4, 20.4, 20.44, 20.32, 20.76, 20.76, 20.8, 21.04, 20.88, 20.44, 20.36, 20.4, 20.24, 20.4, 20.72, 20.64, 20.6, 20.96, 20.96] -370.52000000000004 -18.526000000000003 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 7234, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 1e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [150000, 150000], 'MATRIX_ROWS': 150000, 'MATRIX_SIZE': 22500000000, 'MATRIX_NNZ': 225000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.380627632141113, 'TIME_S_1KI': 1.4349775548992416, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 386.428590927124, 'W': 35.67289032159632, 'J_1KI': 53.41838414806801, 'W_1KI': 4.931281493170627, 'W_D': 17.146890321596317, 'J_D': 185.74465388202657, 'W_D_1KI': 2.3703193698640193, 'J_D_1KI': 0.32766372267957133} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_150000_2e-05.json b/pytorch/output_1core_after_test/altra_10_10_10_150000_2e-05.json deleted file mode 100644 index f6e1be9..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_150000_2e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 5267, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 449995, "MATRIX_DENSITY": 1.9999777777777777e-05, "TIME_S": 10.143731594085693, "TIME_S_1KI": 1.9259030936179407, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 345.50767498016353, "W": 33.64124287914862, "J_1KI": 65.59857128918996, "W_1KI": 6.387173510375664, "W_D": 15.246242879148621, "J_D": 156.5844029092788, "W_D_1KI": 2.894673035722161, "J_D_1KI": 0.5495866785118969} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_150000_2e-05.output b/pytorch/output_1core_after_test/altra_10_10_10_150000_2e-05.output deleted file mode 100644 index 35f006f..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_150000_2e-05.output +++ /dev/null @@ -1,71 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 150000 -sd 2e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 449996, "MATRIX_DENSITY": 1.9999822222222222e-05, "TIME_S": 1.9931979179382324} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 9, ..., 449993, 449994, - 449996]), - col_indices=tensor([ 22593, 92994, 310, ..., 102409, 47111, - 69289]), - values=tensor([ 0.6471, 0.9609, 0.5622, ..., 2.1388, -1.1845, - 0.1991]), size=(150000, 150000), nnz=449996, - layout=torch.sparse_csr) -tensor([0.7282, 0.9879, 0.2896, ..., 0.4436, 0.3832, 0.9789]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 449996 -Density: 1.9999822222222222e-05 -Time: 1.9931979179382324 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 5267 -ss 150000 -sd 2e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 449995, "MATRIX_DENSITY": 1.9999777777777777e-05, "TIME_S": 10.143731594085693} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 5, ..., 449988, 449991, - 449995]), - col_indices=tensor([ 72009, 57024, 70057, ..., 83430, 119068, - 138373]), - values=tensor([-0.9929, 0.3427, 2.6993, ..., 1.4662, 0.4304, - 0.3433]), size=(150000, 150000), nnz=449995, - layout=torch.sparse_csr) -tensor([0.8078, 0.3699, 0.1921, ..., 0.5384, 0.5304, 0.7616]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 449995 -Density: 1.9999777777777777e-05 -Time: 10.143731594085693 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 5, ..., 449988, 449991, - 449995]), - col_indices=tensor([ 72009, 57024, 70057, ..., 83430, 119068, - 138373]), - values=tensor([-0.9929, 0.3427, 2.6993, ..., 1.4662, 0.4304, - 0.3433]), size=(150000, 150000), nnz=449995, - layout=torch.sparse_csr) -tensor([0.8078, 0.3699, 0.1921, ..., 0.5384, 0.5304, 0.7616]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 449995 -Density: 1.9999777777777777e-05 -Time: 10.143731594085693 seconds - -[20.48, 20.48, 20.72, 20.64, 20.64, 20.48, 20.48, 20.32, 20.16, 20.4] -[20.36, 20.52, 23.76, 24.84, 27.28, 27.28, 29.96, 32.12, 31.04, 31.96, 31.24, 31.2, 31.32] -10.270359992980957 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 5267, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 2e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [150000, 150000], 'MATRIX_ROWS': 150000, 'MATRIX_SIZE': 22500000000, 'MATRIX_NNZ': 449995, 'MATRIX_DENSITY': 1.9999777777777777e-05, 'TIME_S': 10.143731594085693, 'TIME_S_1KI': 1.9259030936179407, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 345.50767498016353, 'W': 33.64124287914862} -[20.48, 20.48, 20.72, 20.64, 20.64, 20.48, 20.48, 20.32, 20.16, 20.4, 20.24, 20.2, 20.16, 20.28, 20.24, 20.52, 20.6, 20.64, 20.52, 20.52] -367.9 -18.395 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 5267, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 2e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [150000, 150000], 'MATRIX_ROWS': 150000, 'MATRIX_SIZE': 22500000000, 'MATRIX_NNZ': 449995, 'MATRIX_DENSITY': 1.9999777777777777e-05, 'TIME_S': 10.143731594085693, 'TIME_S_1KI': 1.9259030936179407, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 345.50767498016353, 'W': 33.64124287914862, 'J_1KI': 65.59857128918996, 'W_1KI': 6.387173510375664, 'W_D': 15.246242879148621, 'J_D': 156.5844029092788, 'W_D_1KI': 2.894673035722161, 'J_D_1KI': 0.5495866785118969} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_150000_5e-05.json b/pytorch/output_1core_after_test/altra_10_10_10_150000_5e-05.json deleted file mode 100644 index 40cd5f6..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_150000_5e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 2982, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 1124963, "MATRIX_DENSITY": 4.9998355555555557e-05, "TIME_S": 10.834063529968262, "TIME_S_1KI": 3.633153430572858, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 388.3960797214508, "W": 36.8830912456038, "J_1KI": 130.24684095286747, "W_1KI": 12.368575199733, "W_D": 18.6480912456038, "J_D": 196.3730611908436, "W_D_1KI": 6.253551725554594, "J_D_1KI": 2.0970998408969126} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_150000_5e-05.output b/pytorch/output_1core_after_test/altra_10_10_10_150000_5e-05.output deleted file mode 100644 index 6242bc7..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_150000_5e-05.output +++ /dev/null @@ -1,71 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 150000 -sd 5e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 1124975, "MATRIX_DENSITY": 4.999888888888889e-05, "TIME_S": 3.5205483436584473} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 18, ..., 1124957, - 1124964, 1124975]), - col_indices=tensor([ 45673, 46869, 68642, ..., 93007, 132415, - 145624]), - values=tensor([ 1.0589, -0.8292, -0.9400, ..., -0.5244, 0.2483, - -1.2673]), size=(150000, 150000), nnz=1124975, - layout=torch.sparse_csr) -tensor([0.2890, 0.6092, 0.8181, ..., 0.3578, 0.3655, 0.2203]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 1124975 -Density: 4.999888888888889e-05 -Time: 3.5205483436584473 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 2982 -ss 150000 -sd 5e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 1124963, "MATRIX_DENSITY": 4.9998355555555557e-05, "TIME_S": 10.834063529968262} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 16, ..., 1124949, - 1124959, 1124963]), - col_indices=tensor([ 30949, 52207, 66032, ..., 93409, 116462, - 142125]), - values=tensor([-1.0709, 0.6351, 0.9891, ..., -0.8011, -0.8370, - -1.9774]), size=(150000, 150000), nnz=1124963, - layout=torch.sparse_csr) -tensor([0.8443, 0.8073, 0.2696, ..., 0.1079, 0.2448, 0.9883]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 1124963 -Density: 4.9998355555555557e-05 -Time: 10.834063529968262 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 16, ..., 1124949, - 1124959, 1124963]), - col_indices=tensor([ 30949, 52207, 66032, ..., 93409, 116462, - 142125]), - values=tensor([-1.0709, 0.6351, 0.9891, ..., -0.8011, -0.8370, - -1.9774]), size=(150000, 150000), nnz=1124963, - layout=torch.sparse_csr) -tensor([0.8443, 0.8073, 0.2696, ..., 0.1079, 0.2448, 0.9883]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 1124963 -Density: 4.9998355555555557e-05 -Time: 10.834063529968262 seconds - -[20.44, 20.4, 20.2, 20.2, 20.28, 20.4, 20.28, 20.24, 20.24, 20.16] -[20.12, 20.08, 20.52, 25.28, 26.48, 29.56, 32.08, 31.44, 32.04, 32.2, 32.04, 31.84, 31.76, 32.04] -10.53046441078186 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 2982, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 5e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [150000, 150000], 'MATRIX_ROWS': 150000, 'MATRIX_SIZE': 22500000000, 'MATRIX_NNZ': 1124963, 'MATRIX_DENSITY': 4.9998355555555557e-05, 'TIME_S': 10.834063529968262, 'TIME_S_1KI': 3.633153430572858, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 388.3960797214508, 'W': 36.8830912456038} -[20.44, 20.4, 20.2, 20.2, 20.28, 20.4, 20.28, 20.24, 20.24, 20.16, 20.24, 20.24, 20.16, 20.16, 20.12, 20.12, 20.12, 20.32, 20.52, 20.56] -364.7 -18.235 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 2982, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 5e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [150000, 150000], 'MATRIX_ROWS': 150000, 'MATRIX_SIZE': 22500000000, 'MATRIX_NNZ': 1124963, 'MATRIX_DENSITY': 4.9998355555555557e-05, 'TIME_S': 10.834063529968262, 'TIME_S_1KI': 3.633153430572858, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 388.3960797214508, 'W': 36.8830912456038, 'J_1KI': 130.24684095286747, 'W_1KI': 12.368575199733, 'W_D': 18.6480912456038, 'J_D': 196.3730611908436, 'W_D_1KI': 6.253551725554594, 'J_D_1KI': 2.0970998408969126} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_150000_8e-05.json b/pytorch/output_1core_after_test/altra_10_10_10_150000_8e-05.json deleted file mode 100644 index 56c91e0..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_150000_8e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 2120, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 1799917, "MATRIX_DENSITY": 7.999631111111111e-05, "TIME_S": 10.117506265640259, "TIME_S_1KI": 4.772408615868047, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 423.4199336338043, "W": 36.590743995468394, "J_1KI": 199.72638378953033, "W_1KI": 17.259784903522828, "W_D": 18.381743995468394, "J_D": 212.70944432282448, "W_D_1KI": 8.670633960126601, "J_D_1KI": 4.089921679305} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_150000_8e-05.output b/pytorch/output_1core_after_test/altra_10_10_10_150000_8e-05.output deleted file mode 100644 index 8df716d..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_150000_8e-05.output +++ /dev/null @@ -1,71 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 150000 -sd 8e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 1799926, "MATRIX_DENSITY": 7.999671111111111e-05, "TIME_S": 4.95142388343811} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 15, ..., 1799908, - 1799919, 1799926]), - col_indices=tensor([ 1214, 9526, 13372, ..., 119996, 126785, - 136891]), - values=tensor([-0.1068, 0.2457, -0.1318, ..., 1.2429, -0.8245, - -0.4292]), size=(150000, 150000), nnz=1799926, - layout=torch.sparse_csr) -tensor([0.8310, 0.0944, 0.9117, ..., 0.1166, 0.0113, 0.2839]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 1799926 -Density: 7.999671111111111e-05 -Time: 4.95142388343811 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 2120 -ss 150000 -sd 8e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 1799917, "MATRIX_DENSITY": 7.999631111111111e-05, "TIME_S": 10.117506265640259} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 8, 18, ..., 1799892, - 1799905, 1799917]), - col_indices=tensor([ 1343, 1624, 10718, ..., 128180, 139489, - 145861]), - values=tensor([ 1.0168, -0.7101, 0.5768, ..., -0.0198, -0.1886, - -0.2993]), size=(150000, 150000), nnz=1799917, - layout=torch.sparse_csr) -tensor([0.6697, 0.3813, 0.8738, ..., 0.3394, 0.0147, 0.2298]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 1799917 -Density: 7.999631111111111e-05 -Time: 10.117506265640259 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 8, 18, ..., 1799892, - 1799905, 1799917]), - col_indices=tensor([ 1343, 1624, 10718, ..., 128180, 139489, - 145861]), - values=tensor([ 1.0168, -0.7101, 0.5768, ..., -0.0198, -0.1886, - -0.2993]), size=(150000, 150000), nnz=1799917, - layout=torch.sparse_csr) -tensor([0.6697, 0.3813, 0.8738, ..., 0.3394, 0.0147, 0.2298]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 1799917 -Density: 7.999631111111111e-05 -Time: 10.117506265640259 seconds - -[20.28, 20.24, 20.28, 20.28, 20.24, 20.08, 20.04, 19.92, 20.12, 20.08] -[20.32, 20.6, 23.68, 23.68, 25.6, 28.08, 30.28, 33.04, 31.4, 31.84, 32.6, 32.6, 32.4, 32.52, 32.6] -11.571777105331421 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 2120, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 8e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [150000, 150000], 'MATRIX_ROWS': 150000, 'MATRIX_SIZE': 22500000000, 'MATRIX_NNZ': 1799917, 'MATRIX_DENSITY': 7.999631111111111e-05, 'TIME_S': 10.117506265640259, 'TIME_S_1KI': 4.772408615868047, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 423.4199336338043, 'W': 36.590743995468394} -[20.28, 20.24, 20.28, 20.28, 20.24, 20.08, 20.04, 19.92, 20.12, 20.08, 20.36, 20.24, 20.2, 20.2, 20.52, 20.36, 20.24, 20.24, 20.4, 20.44] -364.18 -18.209 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 2120, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 8e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [150000, 150000], 'MATRIX_ROWS': 150000, 'MATRIX_SIZE': 22500000000, 'MATRIX_NNZ': 1799917, 'MATRIX_DENSITY': 7.999631111111111e-05, 'TIME_S': 10.117506265640259, 'TIME_S_1KI': 4.772408615868047, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 423.4199336338043, 'W': 36.590743995468394, 'J_1KI': 199.72638378953033, 'W_1KI': 17.259784903522828, 'W_D': 18.381743995468394, 'J_D': 212.70944432282448, 'W_D_1KI': 8.670633960126601, 'J_D_1KI': 4.089921679305} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_200000_0.0001.json b/pytorch/output_1core_after_test/altra_10_10_10_200000_0.0001.json deleted file mode 100644 index f6b60d5..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_200000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 3999801, "MATRIX_DENSITY": 9.9995025e-05, "TIME_S": 13.38718843460083, "TIME_S_1KI": 13.38718843460083, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 463.430492515564, "W": 35.3250089736942, "J_1KI": 463.430492515564, "W_1KI": 35.3250089736942, "W_D": 16.8970089736942, "J_D": 221.67267378616333, "W_D_1KI": 16.8970089736942, "J_D_1KI": 16.8970089736942} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_200000_0.0001.output b/pytorch/output_1core_after_test/altra_10_10_10_200000_0.0001.output deleted file mode 100644 index 14a8d26..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_200000_0.0001.output +++ /dev/null @@ -1,49 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 200000 -sd 0.0001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 3999801, "MATRIX_DENSITY": 9.9995025e-05, "TIME_S": 13.38718843460083} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 30, 49, ..., 3999760, - 3999779, 3999801]), - col_indices=tensor([ 725, 16500, 17380, ..., 191062, 191507, - 194960]), - values=tensor([ 1.0478, 2.6646, -0.7212, ..., -0.5863, -0.3201, - 2.0476]), size=(200000, 200000), nnz=3999801, - layout=torch.sparse_csr) -tensor([0.6554, 0.2512, 0.1221, ..., 0.3923, 0.3935, 0.0593]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 3999801 -Density: 9.9995025e-05 -Time: 13.38718843460083 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 30, 49, ..., 3999760, - 3999779, 3999801]), - col_indices=tensor([ 725, 16500, 17380, ..., 191062, 191507, - 194960]), - values=tensor([ 1.0478, 2.6646, -0.7212, ..., -0.5863, -0.3201, - 2.0476]), size=(200000, 200000), nnz=3999801, - layout=torch.sparse_csr) -tensor([0.6554, 0.2512, 0.1221, ..., 0.3923, 0.3935, 0.0593]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 3999801 -Density: 9.9995025e-05 -Time: 13.38718843460083 seconds - -[20.04, 20.24, 20.72, 20.6, 20.56, 20.76, 20.76, 20.6, 20.48, 20.36] -[20.4, 20.4, 20.64, 22.08, 24.48, 25.76, 28.6, 30.32, 31.84, 31.76, 32.68, 32.84, 32.8, 32.96, 32.96, 32.88, 32.68] -13.119048118591309 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 0.0001, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [200000, 200000], 'MATRIX_ROWS': 200000, 'MATRIX_SIZE': 40000000000, 'MATRIX_NNZ': 3999801, 'MATRIX_DENSITY': 9.9995025e-05, 'TIME_S': 13.38718843460083, 'TIME_S_1KI': 13.38718843460083, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 463.430492515564, 'W': 35.3250089736942} -[20.04, 20.24, 20.72, 20.6, 20.56, 20.76, 20.76, 20.6, 20.48, 20.36, 20.2, 20.08, 20.04, 20.12, 20.12, 20.56, 20.64, 20.72, 20.84, 20.84] -368.56000000000006 -18.428000000000004 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 0.0001, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [200000, 200000], 'MATRIX_ROWS': 200000, 'MATRIX_SIZE': 40000000000, 'MATRIX_NNZ': 3999801, 'MATRIX_DENSITY': 9.9995025e-05, 'TIME_S': 13.38718843460083, 'TIME_S_1KI': 13.38718843460083, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 463.430492515564, 'W': 35.3250089736942, 'J_1KI': 463.430492515564, 'W_1KI': 35.3250089736942, 'W_D': 16.8970089736942, 'J_D': 221.67267378616333, 'W_D_1KI': 16.8970089736942, 'J_D_1KI': 16.8970089736942} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_200000_1e-05.json b/pytorch/output_1core_after_test/altra_10_10_10_200000_1e-05.json deleted file mode 100644 index 8332037..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_200000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 4517, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 399999, "MATRIX_DENSITY": 9.999975e-06, "TIME_S": 10.48474931716919, "TIME_S_1KI": 2.321175407830239, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 350.1860060596466, "W": 33.203515607185345, "J_1KI": 77.52623556777654, "W_1KI": 7.350789375068706, "W_D": 14.75751560718534, "J_D": 155.6424178385734, "W_D_1KI": 3.267105514099035, "J_D_1KI": 0.7232910148547785} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_200000_1e-05.output b/pytorch/output_1core_after_test/altra_10_10_10_200000_1e-05.output deleted file mode 100644 index a7c2b56..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_200000_1e-05.output +++ /dev/null @@ -1,71 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 200000 -sd 1e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 399997, "MATRIX_DENSITY": 9.999925e-06, "TIME_S": 2.32439923286438} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 4, ..., 399995, 399996, - 399997]), - col_indices=tensor([ 53721, 100176, 137115, ..., 76474, 111928, - 67722]), - values=tensor([ 0.8787, 0.6153, 0.7457, ..., 1.5157, 1.9555, - -1.5636]), size=(200000, 200000), nnz=399997, - layout=torch.sparse_csr) -tensor([0.4176, 0.1900, 0.9801, ..., 0.0553, 0.1816, 0.9381]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 399997 -Density: 9.999925e-06 -Time: 2.32439923286438 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 4517 -ss 200000 -sd 1e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 399999, "MATRIX_DENSITY": 9.999975e-06, "TIME_S": 10.48474931716919} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 3, ..., 399994, 399998, - 399999]), - col_indices=tensor([ 23160, 67764, 94980, ..., 158664, 163872, - 193419]), - values=tensor([ 0.4031, -0.2737, -0.3940, ..., -0.8147, 0.5871, - 0.1087]), size=(200000, 200000), nnz=399999, - layout=torch.sparse_csr) -tensor([0.9178, 0.7444, 0.9877, ..., 0.0829, 0.8958, 0.1485]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 399999 -Density: 9.999975e-06 -Time: 10.48474931716919 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 3, ..., 399994, 399998, - 399999]), - col_indices=tensor([ 23160, 67764, 94980, ..., 158664, 163872, - 193419]), - values=tensor([ 0.4031, -0.2737, -0.3940, ..., -0.8147, 0.5871, - 0.1087]), size=(200000, 200000), nnz=399999, - layout=torch.sparse_csr) -tensor([0.9178, 0.7444, 0.9877, ..., 0.0829, 0.8958, 0.1485]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 399999 -Density: 9.999975e-06 -Time: 10.48474931716919 seconds - -[20.44, 20.44, 20.44, 20.28, 20.32, 20.44, 20.28, 20.44, 20.6, 20.32] -[20.24, 20.2, 21.16, 23.04, 25.4, 27.88, 30.44, 30.44, 31.44, 32.28, 31.92, 32.08, 32.28] -10.546654462814331 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 4517, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 1e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [200000, 200000], 'MATRIX_ROWS': 200000, 'MATRIX_SIZE': 40000000000, 'MATRIX_NNZ': 399999, 'MATRIX_DENSITY': 9.999975e-06, 'TIME_S': 10.48474931716919, 'TIME_S_1KI': 2.321175407830239, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 350.1860060596466, 'W': 33.203515607185345} -[20.44, 20.44, 20.44, 20.28, 20.32, 20.44, 20.28, 20.44, 20.6, 20.32, 20.36, 20.4, 20.52, 20.8, 20.8, 20.84, 20.72, 20.56, 20.32, 20.32] -368.9200000000001 -18.446000000000005 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 4517, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 1e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [200000, 200000], 'MATRIX_ROWS': 200000, 'MATRIX_SIZE': 40000000000, 'MATRIX_NNZ': 399999, 'MATRIX_DENSITY': 9.999975e-06, 'TIME_S': 10.48474931716919, 'TIME_S_1KI': 2.321175407830239, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 350.1860060596466, 'W': 33.203515607185345, 'J_1KI': 77.52623556777654, 'W_1KI': 7.350789375068706, 'W_D': 14.75751560718534, 'J_D': 155.6424178385734, 'W_D_1KI': 3.267105514099035, 'J_D_1KI': 0.7232910148547785} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_200000_2e-05.json b/pytorch/output_1core_after_test/altra_10_10_10_200000_2e-05.json deleted file mode 100644 index a7e00af..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_200000_2e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 3140, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 799993, "MATRIX_DENSITY": 1.9999825e-05, "TIME_S": 10.441259145736694, "TIME_S_1KI": 3.325241766158183, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 358.6360774898529, "W": 34.0829065787975, "J_1KI": 114.21531130250091, "W_1KI": 10.854428846750796, "W_D": 15.735906578797497, "J_D": 165.58047354674338, "W_D_1KI": 5.0114352161775475, "J_D_1KI": 1.5959984764896646} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_200000_2e-05.output b/pytorch/output_1core_after_test/altra_10_10_10_200000_2e-05.output deleted file mode 100644 index 847b45a..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_200000_2e-05.output +++ /dev/null @@ -1,71 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 200000 -sd 2e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 799992, "MATRIX_DENSITY": 1.99998e-05, "TIME_S": 3.34374737739563} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 6, ..., 799986, 799987, - 799992]), - col_indices=tensor([ 86419, 94662, 114023, ..., 79708, 99766, - 133740]), - values=tensor([ 0.4049, -1.9715, 0.0309, ..., 0.2150, 1.8425, - -0.1233]), size=(200000, 200000), nnz=799992, - layout=torch.sparse_csr) -tensor([0.9939, 0.9385, 0.2115, ..., 0.3578, 0.8825, 0.1238]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 799992 -Density: 1.99998e-05 -Time: 3.34374737739563 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 3140 -ss 200000 -sd 2e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 799993, "MATRIX_DENSITY": 1.9999825e-05, "TIME_S": 10.441259145736694} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 8, ..., 799988, 799989, - 799993]), - col_indices=tensor([ 56748, 164626, 184621, ..., 38108, 111530, - 145433]), - values=tensor([-0.3915, -2.3778, -0.8361, ..., 0.8031, 1.0234, - -0.4414]), size=(200000, 200000), nnz=799993, - layout=torch.sparse_csr) -tensor([0.2982, 0.7175, 0.9318, ..., 0.5875, 0.3003, 0.7472]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 799993 -Density: 1.9999825e-05 -Time: 10.441259145736694 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 8, ..., 799988, 799989, - 799993]), - col_indices=tensor([ 56748, 164626, 184621, ..., 38108, 111530, - 145433]), - values=tensor([-0.3915, -2.3778, -0.8361, ..., 0.8031, 1.0234, - -0.4414]), size=(200000, 200000), nnz=799993, - layout=torch.sparse_csr) -tensor([0.2982, 0.7175, 0.9318, ..., 0.5875, 0.3003, 0.7472]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 799993 -Density: 1.9999825e-05 -Time: 10.441259145736694 seconds - -[20.04, 20.24, 20.24, 20.2, 20.2, 20.44, 20.36, 20.48, 20.8, 20.8] -[20.72, 20.68, 23.56, 24.64, 26.8, 29.72, 32.0, 31.16, 31.16, 32.16, 31.92, 31.88, 31.88] -10.522461652755737 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 3140, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 2e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [200000, 200000], 'MATRIX_ROWS': 200000, 'MATRIX_SIZE': 40000000000, 'MATRIX_NNZ': 799993, 'MATRIX_DENSITY': 1.9999825e-05, 'TIME_S': 10.441259145736694, 'TIME_S_1KI': 3.325241766158183, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 358.6360774898529, 'W': 34.0829065787975} -[20.04, 20.24, 20.24, 20.2, 20.2, 20.44, 20.36, 20.48, 20.8, 20.8, 20.32, 20.2, 20.28, 20.32, 20.4, 20.4, 20.48, 20.36, 20.6, 20.72] -366.94 -18.347 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 3140, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 2e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [200000, 200000], 'MATRIX_ROWS': 200000, 'MATRIX_SIZE': 40000000000, 'MATRIX_NNZ': 799993, 'MATRIX_DENSITY': 1.9999825e-05, 'TIME_S': 10.441259145736694, 'TIME_S_1KI': 3.325241766158183, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 358.6360774898529, 'W': 34.0829065787975, 'J_1KI': 114.21531130250091, 'W_1KI': 10.854428846750796, 'W_D': 15.735906578797497, 'J_D': 165.58047354674338, 'W_D_1KI': 5.0114352161775475, 'J_D_1KI': 1.5959984764896646} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_200000_5e-05.json b/pytorch/output_1core_after_test/altra_10_10_10_200000_5e-05.json deleted file mode 100644 index fc45df4..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_200000_5e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1622, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 1999942, "MATRIX_DENSITY": 4.999855e-05, "TIME_S": 10.43860411643982, "TIME_S_1KI": 6.435637556374735, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 392.18215005874634, "W": 37.165866811389904, "J_1KI": 241.78924171316052, "W_1KI": 22.913604692595502, "W_D": 18.725866811389906, "J_D": 197.59933879852295, "W_D_1KI": 11.54492405141178, "J_D_1KI": 7.1177090329295805} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_200000_5e-05.output b/pytorch/output_1core_after_test/altra_10_10_10_200000_5e-05.output deleted file mode 100644 index da00bd3..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_200000_5e-05.output +++ /dev/null @@ -1,71 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 200000 -sd 5e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 1999950, "MATRIX_DENSITY": 4.999875e-05, "TIME_S": 6.472595930099487} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 11, 24, ..., 1999927, - 1999941, 1999950]), - col_indices=tensor([ 15347, 15852, 27357, ..., 175265, 186435, - 196056]), - values=tensor([-0.1963, -0.5915, -0.4592, ..., -2.0562, -0.0108, - -1.5764]), size=(200000, 200000), nnz=1999950, - layout=torch.sparse_csr) -tensor([0.6431, 0.3328, 0.3894, ..., 0.6282, 0.8103, 0.7215]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 1999950 -Density: 4.999875e-05 -Time: 6.472595930099487 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1622 -ss 200000 -sd 5e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 1999942, "MATRIX_DENSITY": 4.999855e-05, "TIME_S": 10.43860411643982} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 12, 27, ..., 1999920, - 1999931, 1999942]), - col_indices=tensor([ 7401, 8093, 12306, ..., 126794, 152727, - 181903]), - values=tensor([-1.8596, 1.4317, -0.3064, ..., -0.2326, 0.0517, - -1.4306]), size=(200000, 200000), nnz=1999942, - layout=torch.sparse_csr) -tensor([0.3318, 0.9133, 0.7802, ..., 0.9835, 0.0623, 0.0964]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 1999942 -Density: 4.999855e-05 -Time: 10.43860411643982 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 12, 27, ..., 1999920, - 1999931, 1999942]), - col_indices=tensor([ 7401, 8093, 12306, ..., 126794, 152727, - 181903]), - values=tensor([-1.8596, 1.4317, -0.3064, ..., -0.2326, 0.0517, - -1.4306]), size=(200000, 200000), nnz=1999942, - layout=torch.sparse_csr) -tensor([0.3318, 0.9133, 0.7802, ..., 0.9835, 0.0623, 0.0964]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 1999942 -Density: 4.999855e-05 -Time: 10.43860411643982 seconds - -[20.28, 20.4, 20.4, 20.32, 20.56, 20.44, 20.64, 20.68, 20.64, 20.48] -[20.56, 20.28, 23.4, 24.56, 26.36, 29.12, 31.56, 31.56, 30.96, 32.48, 32.44, 32.36, 32.52, 32.6] -10.552213191986084 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1622, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 5e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [200000, 200000], 'MATRIX_ROWS': 200000, 'MATRIX_SIZE': 40000000000, 'MATRIX_NNZ': 1999942, 'MATRIX_DENSITY': 4.999855e-05, 'TIME_S': 10.43860411643982, 'TIME_S_1KI': 6.435637556374735, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 392.18215005874634, 'W': 37.165866811389904} -[20.28, 20.4, 20.4, 20.32, 20.56, 20.44, 20.64, 20.68, 20.64, 20.48, 20.52, 20.32, 20.28, 20.08, 20.32, 20.48, 20.52, 20.68, 20.96, 20.88] -368.79999999999995 -18.439999999999998 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1622, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 5e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [200000, 200000], 'MATRIX_ROWS': 200000, 'MATRIX_SIZE': 40000000000, 'MATRIX_NNZ': 1999942, 'MATRIX_DENSITY': 4.999855e-05, 'TIME_S': 10.43860411643982, 'TIME_S_1KI': 6.435637556374735, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 392.18215005874634, 'W': 37.165866811389904, 'J_1KI': 241.78924171316052, 'W_1KI': 22.913604692595502, 'W_D': 18.725866811389906, 'J_D': 197.59933879852295, 'W_D_1KI': 11.54492405141178, 'J_D_1KI': 7.1177090329295805} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_200000_8e-05.json b/pytorch/output_1core_after_test/altra_10_10_10_200000_8e-05.json deleted file mode 100644 index 531557d..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_200000_8e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 3199886, "MATRIX_DENSITY": 7.999715e-05, "TIME_S": 11.041945934295654, "TIME_S_1KI": 11.041945934295654, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 420.73246814727787, "W": 37.627043073018406, "J_1KI": 420.73246814727787, "W_1KI": 37.627043073018406, "W_D": 17.962043073018407, "J_D": 200.8452989625931, "W_D_1KI": 17.962043073018407, "J_D_1KI": 17.962043073018407} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_200000_8e-05.output b/pytorch/output_1core_after_test/altra_10_10_10_200000_8e-05.output deleted file mode 100644 index 725266f..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_200000_8e-05.output +++ /dev/null @@ -1,49 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 200000 -sd 8e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 3199886, "MATRIX_DENSITY": 7.999715e-05, "TIME_S": 11.041945934295654} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 13, 29, ..., 3199850, - 3199873, 3199886]), - col_indices=tensor([ 1474, 32917, 59625, ..., 164776, 165534, - 165742]), - values=tensor([ 0.5733, -1.4295, 0.3956, ..., 0.0085, 0.5997, - 0.3568]), size=(200000, 200000), nnz=3199886, - layout=torch.sparse_csr) -tensor([0.1783, 0.3519, 0.3509, ..., 0.9970, 0.4624, 0.8799]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 3199886 -Density: 7.999715e-05 -Time: 11.041945934295654 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 13, 29, ..., 3199850, - 3199873, 3199886]), - col_indices=tensor([ 1474, 32917, 59625, ..., 164776, 165534, - 165742]), - values=tensor([ 0.5733, -1.4295, 0.3956, ..., 0.0085, 0.5997, - 0.3568]), size=(200000, 200000), nnz=3199886, - layout=torch.sparse_csr) -tensor([0.1783, 0.3519, 0.3509, ..., 0.9970, 0.4624, 0.8799]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 3199886 -Density: 7.999715e-05 -Time: 11.041945934295654 seconds - -[21.72, 22.52, 23.56, 24.04, 24.04, 23.64, 23.04, 23.16, 23.04, 23.04] -[23.24, 23.4, 23.0, 24.04, 25.0, 26.92, 29.2, 31.12, 32.12, 33.16, 34.04, 34.56, 34.68, 34.56, 34.2] -11.18165111541748 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 8e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [200000, 200000], 'MATRIX_ROWS': 200000, 'MATRIX_SIZE': 40000000000, 'MATRIX_NNZ': 3199886, 'MATRIX_DENSITY': 7.999715e-05, 'TIME_S': 11.041945934295654, 'TIME_S_1KI': 11.041945934295654, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 420.73246814727787, 'W': 37.627043073018406} -[21.72, 22.52, 23.56, 24.04, 24.04, 23.64, 23.04, 23.16, 23.04, 23.04, 20.24, 20.32, 20.48, 20.44, 20.44, 20.44, 20.48, 20.52, 20.4, 20.48] -393.29999999999995 -19.665 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 8e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [200000, 200000], 'MATRIX_ROWS': 200000, 'MATRIX_SIZE': 40000000000, 'MATRIX_NNZ': 3199886, 'MATRIX_DENSITY': 7.999715e-05, 'TIME_S': 11.041945934295654, 'TIME_S_1KI': 11.041945934295654, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 420.73246814727787, 'W': 37.627043073018406, 'J_1KI': 420.73246814727787, 'W_1KI': 37.627043073018406, 'W_D': 17.962043073018407, 'J_D': 200.8452989625931, 'W_D_1KI': 17.962043073018407, 'J_D_1KI': 17.962043073018407} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_20000_0.0001.json b/pytorch/output_1core_after_test/altra_10_10_10_20000_0.0001.json deleted file mode 100644 index e1375e1..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_20000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 58715, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 39997, "MATRIX_DENSITY": 9.99925e-05, "TIME_S": 10.23157262802124, "TIME_S_1KI": 0.17425824113124824, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 312.74463246345516, "W": 30.541103907235787, "J_1KI": 5.326486118767865, "W_1KI": 0.5201584587794564, "W_D": 12.259103907235787, "J_D": 125.53472059965131, "W_D_1KI": 0.2087899839433839, "J_D_1KI": 0.003555990529564573} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_20000_0.0001.output b/pytorch/output_1core_after_test/altra_10_10_10_20000_0.0001.output deleted file mode 100644 index f1449cf..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_20000_0.0001.output +++ /dev/null @@ -1,65 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 20000 -sd 0.0001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 39999, "MATRIX_DENSITY": 9.99975e-05, "TIME_S": 0.17882966995239258} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 4, ..., 39995, 39997, 39999]), - col_indices=tensor([ 1398, 8266, 9733, ..., 5901, 6485, 19808]), - values=tensor([ 1.4442, 0.3161, 0.6925, ..., -1.6441, -1.7494, - -0.0189]), size=(20000, 20000), nnz=39999, - layout=torch.sparse_csr) -tensor([0.3364, 0.8734, 0.2560, ..., 0.4245, 0.7879, 0.4227]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 39999 -Density: 9.99975e-05 -Time: 0.17882966995239258 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 58715 -ss 20000 -sd 0.0001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 39997, "MATRIX_DENSITY": 9.99925e-05, "TIME_S": 10.23157262802124} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 2, ..., 39994, 39995, 39997]), - col_indices=tensor([ 385, 13575, 993, ..., 7905, 3269, 19471]), - values=tensor([-0.9779, 0.5850, -0.0057, ..., 1.2177, 0.6236, - -0.3848]), size=(20000, 20000), nnz=39997, - layout=torch.sparse_csr) -tensor([0.4733, 0.3692, 0.8512, ..., 0.6396, 0.1954, 0.4828]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 39997 -Density: 9.99925e-05 -Time: 10.23157262802124 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 2, ..., 39994, 39995, 39997]), - col_indices=tensor([ 385, 13575, 993, ..., 7905, 3269, 19471]), - values=tensor([-0.9779, 0.5850, -0.0057, ..., 1.2177, 0.6236, - -0.3848]), size=(20000, 20000), nnz=39997, - layout=torch.sparse_csr) -tensor([0.4733, 0.3692, 0.8512, ..., 0.6396, 0.1954, 0.4828]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 39997 -Density: 9.99925e-05 -Time: 10.23157262802124 seconds - -[19.96, 19.8, 19.8, 19.88, 20.12, 20.6, 20.88, 20.88, 21.08, 21.08] -[20.88, 20.84, 20.88, 24.6, 26.68, 28.84, 29.92, 30.28, 26.48, 25.4, 25.04, 24.96, 24.84] -10.240122079849243 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 58715, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 0.0001, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [20000, 20000], 'MATRIX_ROWS': 20000, 'MATRIX_SIZE': 400000000, 'MATRIX_NNZ': 39997, 'MATRIX_DENSITY': 9.99925e-05, 'TIME_S': 10.23157262802124, 'TIME_S_1KI': 0.17425824113124824, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 312.74463246345516, 'W': 30.541103907235787} -[19.96, 19.8, 19.8, 19.88, 20.12, 20.6, 20.88, 20.88, 21.08, 21.08, 20.16, 20.24, 20.16, 20.12, 20.16, 20.16, 20.24, 20.32, 20.4, 20.4] -365.64 -18.282 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 58715, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 0.0001, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [20000, 20000], 'MATRIX_ROWS': 20000, 'MATRIX_SIZE': 400000000, 'MATRIX_NNZ': 39997, 'MATRIX_DENSITY': 9.99925e-05, 'TIME_S': 10.23157262802124, 'TIME_S_1KI': 0.17425824113124824, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 312.74463246345516, 'W': 30.541103907235787, 'J_1KI': 5.326486118767865, 'W_1KI': 0.5201584587794564, 'W_D': 12.259103907235787, 'J_D': 125.53472059965131, 'W_D_1KI': 0.2087899839433839, 'J_D_1KI': 0.003555990529564573} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_20000_1e-05.json b/pytorch/output_1core_after_test/altra_10_10_10_20000_1e-05.json deleted file mode 100644 index 0229441..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_20000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 175318, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 4000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.609092712402344, "TIME_S_1KI": 0.06051342538930597, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 296.8691830253601, "W": 28.18179818289856, "J_1KI": 1.6933183302647765, "W_1KI": 0.1607467469563796, "W_D": 9.98079818289856, "J_D": 105.1384792151451, "W_D_1KI": 0.05692968310668933, "J_D_1KI": 0.00032472240789131366} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_20000_1e-05.output b/pytorch/output_1core_after_test/altra_10_10_10_20000_1e-05.output deleted file mode 100644 index adf421b..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_20000_1e-05.output +++ /dev/null @@ -1,85 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 20000 -sd 1e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 4000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.06297636032104492} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 4000, 4000, 4000]), - col_indices=tensor([12357, 15223, 9231, ..., 10258, 1732, 65]), - values=tensor([ 0.8374, 0.1792, -0.7911, ..., -0.9962, 0.9719, - 1.4813]), size=(20000, 20000), nnz=4000, - layout=torch.sparse_csr) -tensor([0.0473, 0.1860, 0.9022, ..., 0.7616, 0.8259, 0.0597]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 4000 -Density: 1e-05 -Time: 0.06297636032104492 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 166729 -ss 20000 -sd 1e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 4000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.985570907592773} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 4000, 4000, 4000]), - col_indices=tensor([14783, 10920, 11726, ..., 6504, 7631, 16250]), - values=tensor([-0.3836, -0.4175, -1.2173, ..., 1.7428, 0.1050, - 0.7111]), size=(20000, 20000), nnz=4000, - layout=torch.sparse_csr) -tensor([0.9840, 0.6859, 0.7359, ..., 0.2102, 0.3574, 0.8617]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 4000 -Density: 1e-05 -Time: 9.985570907592773 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 175318 -ss 20000 -sd 1e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 4000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.609092712402344} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 3999, 3999, 4000]), - col_indices=tensor([11238, 15714, 3500, ..., 13500, 6351, 10546]), - values=tensor([-1.2322, -0.5076, 1.3304, ..., -0.0344, 1.2521, - 0.5111]), size=(20000, 20000), nnz=4000, - layout=torch.sparse_csr) -tensor([0.2801, 0.3002, 0.3051, ..., 0.7279, 0.9688, 0.7873]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 4000 -Density: 1e-05 -Time: 10.609092712402344 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 3999, 3999, 4000]), - col_indices=tensor([11238, 15714, 3500, ..., 13500, 6351, 10546]), - values=tensor([-1.2322, -0.5076, 1.3304, ..., -0.0344, 1.2521, - 0.5111]), size=(20000, 20000), nnz=4000, - layout=torch.sparse_csr) -tensor([0.2801, 0.3002, 0.3051, ..., 0.7279, 0.9688, 0.7873]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 4000 -Density: 1e-05 -Time: 10.609092712402344 seconds - -[20.48, 20.4, 20.48, 20.44, 20.44, 20.32, 20.24, 20.32, 20.6, 20.52] -[20.56, 20.4, 20.64, 24.4, 25.8, 26.48, 27.36, 24.32, 23.96, 23.12, 23.08, 23.08, 23.16] -10.534075260162354 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 175318, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 1e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [20000, 20000], 'MATRIX_ROWS': 20000, 'MATRIX_SIZE': 400000000, 'MATRIX_NNZ': 4000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.609092712402344, 'TIME_S_1KI': 0.06051342538930597, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 296.8691830253601, 'W': 28.18179818289856} -[20.48, 20.4, 20.48, 20.44, 20.44, 20.32, 20.24, 20.32, 20.6, 20.52, 20.28, 20.2, 20.16, 19.92, 19.96, 19.8, 19.96, 19.8, 20.16, 20.36] -364.02 -18.201 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 175318, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 1e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [20000, 20000], 'MATRIX_ROWS': 20000, 'MATRIX_SIZE': 400000000, 'MATRIX_NNZ': 4000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.609092712402344, 'TIME_S_1KI': 0.06051342538930597, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 296.8691830253601, 'W': 28.18179818289856, 'J_1KI': 1.6933183302647765, 'W_1KI': 0.1607467469563796, 'W_D': 9.98079818289856, 'J_D': 105.1384792151451, 'W_D_1KI': 0.05692968310668933, 'J_D_1KI': 0.00032472240789131366} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_20000_2e-05.json b/pytorch/output_1core_after_test/altra_10_10_10_20000_2e-05.json deleted file mode 100644 index b4fdd9e..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_20000_2e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 118942, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 8000, "MATRIX_DENSITY": 2e-05, "TIME_S": 10.07962417602539, "TIME_S_1KI": 0.08474402798023735, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 297.63386821746826, "W": 28.954115811135768, "J_1KI": 2.5023445731320164, "W_1KI": 0.24343054439252548, "W_D": 10.466115811135769, "J_D": 107.58645003700256, "W_D_1KI": 0.08799344059403548, "J_D_1KI": 0.0007398012526612591} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_20000_2e-05.output b/pytorch/output_1core_after_test/altra_10_10_10_20000_2e-05.output deleted file mode 100644 index 13b47b3..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_20000_2e-05.output +++ /dev/null @@ -1,66 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 20000 -sd 2e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 8000, "MATRIX_DENSITY": 2e-05, "TIME_S": 0.08827829360961914} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 3, ..., 8000, 8000, 8000]), - col_indices=tensor([11437, 4018, 19190, ..., 10689, 12356, 1797]), - values=tensor([ 0.9012, 2.0083, 1.2437, ..., -1.5308, 0.0468, - -1.8336]), size=(20000, 20000), nnz=8000, - layout=torch.sparse_csr) -tensor([7.3997e-01, 9.3806e-01, 1.1245e-01, ..., 3.6502e-04, 1.7307e-01, - 5.9848e-01]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 8000 -Density: 2e-05 -Time: 0.08827829360961914 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 118942 -ss 20000 -sd 2e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 8000, "MATRIX_DENSITY": 2e-05, "TIME_S": 10.07962417602539} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 8000, 8000, 8000]), - col_indices=tensor([ 5851, 16585, 17651, ..., 8618, 3900, 5823]), - values=tensor([ 0.3386, 1.2811, 0.0393, ..., -0.3587, 0.6718, - 0.5199]), size=(20000, 20000), nnz=8000, - layout=torch.sparse_csr) -tensor([0.4256, 0.9835, 0.0324, ..., 0.1762, 0.1511, 0.0829]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 8000 -Density: 2e-05 -Time: 10.07962417602539 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 8000, 8000, 8000]), - col_indices=tensor([ 5851, 16585, 17651, ..., 8618, 3900, 5823]), - values=tensor([ 0.3386, 1.2811, 0.0393, ..., -0.3587, 0.6718, - 0.5199]), size=(20000, 20000), nnz=8000, - layout=torch.sparse_csr) -tensor([0.4256, 0.9835, 0.0324, ..., 0.1762, 0.1511, 0.0829]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 8000 -Density: 2e-05 -Time: 10.07962417602539 seconds - -[19.88, 20.04, 20.08, 20.4, 20.56, 20.6, 20.6, 20.64, 20.64, 20.4] -[20.68, 20.76, 20.84, 24.64, 26.64, 27.2, 27.92, 25.6, 24.76, 23.44, 23.72, 23.44, 23.52] -10.27950119972229 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 118942, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 2e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [20000, 20000], 'MATRIX_ROWS': 20000, 'MATRIX_SIZE': 400000000, 'MATRIX_NNZ': 8000, 'MATRIX_DENSITY': 2e-05, 'TIME_S': 10.07962417602539, 'TIME_S_1KI': 0.08474402798023735, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 297.63386821746826, 'W': 28.954115811135768} -[19.88, 20.04, 20.08, 20.4, 20.56, 20.6, 20.6, 20.64, 20.64, 20.4, 20.28, 20.52, 20.88, 20.84, 20.84, 20.68, 20.44, 20.68, 20.68, 20.72] -369.76 -18.488 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 118942, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 2e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [20000, 20000], 'MATRIX_ROWS': 20000, 'MATRIX_SIZE': 400000000, 'MATRIX_NNZ': 8000, 'MATRIX_DENSITY': 2e-05, 'TIME_S': 10.07962417602539, 'TIME_S_1KI': 0.08474402798023735, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 297.63386821746826, 'W': 28.954115811135768, 'J_1KI': 2.5023445731320164, 'W_1KI': 0.24343054439252548, 'W_D': 10.466115811135769, 'J_D': 107.58645003700256, 'W_D_1KI': 0.08799344059403548, 'J_D_1KI': 0.0007398012526612591} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_20000_5e-05.json b/pytorch/output_1core_after_test/altra_10_10_10_20000_5e-05.json deleted file mode 100644 index a0c8000..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_20000_5e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 76193, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 20000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.029513835906982, "TIME_S_1KI": 0.1316330087528642, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 282.77299644470213, "W": 27.674543734558664, "J_1KI": 3.711272642430435, "W_1KI": 0.3632163549743239, "W_D": 9.323543734558665, "J_D": 95.26611981725691, "W_D_1KI": 0.12236745809403313, "J_D_1KI": 0.0016060196880820171} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_20000_5e-05.output b/pytorch/output_1core_after_test/altra_10_10_10_20000_5e-05.output deleted file mode 100644 index 2991719..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_20000_5e-05.output +++ /dev/null @@ -1,65 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 20000 -sd 5e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 19998, "MATRIX_DENSITY": 4.9995e-05, "TIME_S": 0.13780617713928223} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 19996, 19997, 19998]), - col_indices=tensor([ 8564, 15371, 12390, ..., 9506, 1971, 15999]), - values=tensor([ 0.3043, -0.2329, -1.5565, ..., -0.7374, 0.0559, - 1.5079]), size=(20000, 20000), nnz=19998, - layout=torch.sparse_csr) -tensor([0.5830, 0.7136, 0.4497, ..., 0.0929, 0.6777, 0.4633]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 19998 -Density: 4.9995e-05 -Time: 0.13780617713928223 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 76193 -ss 20000 -sd 5e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 20000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.029513835906982} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 3, ..., 19996, 19998, 20000]), - col_indices=tensor([ 5096, 14038, 11024, ..., 11411, 2364, 2810]), - values=tensor([-1.0860, -0.1176, -0.2257, ..., 1.5728, -0.1349, - 0.0418]), size=(20000, 20000), nnz=20000, - layout=torch.sparse_csr) -tensor([0.2807, 0.2512, 0.1212, ..., 0.2572, 0.0420, 0.0768]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 20000 -Density: 5e-05 -Time: 10.029513835906982 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 3, ..., 19996, 19998, 20000]), - col_indices=tensor([ 5096, 14038, 11024, ..., 11411, 2364, 2810]), - values=tensor([-1.0860, -0.1176, -0.2257, ..., 1.5728, -0.1349, - 0.0418]), size=(20000, 20000), nnz=20000, - layout=torch.sparse_csr) -tensor([0.2807, 0.2512, 0.1212, ..., 0.2572, 0.0420, 0.0768]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 20000 -Density: 5e-05 -Time: 10.029513835906982 seconds - -[20.72, 20.72, 20.64, 20.36, 20.4, 20.4, 20.24, 20.36, 20.48, 20.6] -[20.4, 20.32, 21.36, 22.4, 22.4, 23.88, 24.8, 25.24, 24.56, 24.32, 23.48, 23.16, 23.2] -10.217801570892334 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 76193, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 5e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [20000, 20000], 'MATRIX_ROWS': 20000, 'MATRIX_SIZE': 400000000, 'MATRIX_NNZ': 20000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.029513835906982, 'TIME_S_1KI': 0.1316330087528642, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 282.77299644470213, 'W': 27.674543734558664} -[20.72, 20.72, 20.64, 20.36, 20.4, 20.4, 20.24, 20.36, 20.48, 20.6, 20.28, 20.16, 20.16, 20.08, 20.2, 20.32, 20.4, 20.52, 20.52, 20.52] -367.02 -18.351 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 76193, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 5e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [20000, 20000], 'MATRIX_ROWS': 20000, 'MATRIX_SIZE': 400000000, 'MATRIX_NNZ': 20000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.029513835906982, 'TIME_S_1KI': 0.1316330087528642, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 282.77299644470213, 'W': 27.674543734558664, 'J_1KI': 3.711272642430435, 'W_1KI': 0.3632163549743239, 'W_D': 9.323543734558665, 'J_D': 95.26611981725691, 'W_D_1KI': 0.12236745809403313, 'J_D_1KI': 0.0016060196880820171} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_20000_8e-05.json b/pytorch/output_1core_after_test/altra_10_10_10_20000_8e-05.json deleted file mode 100644 index aba01eb..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_20000_8e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 63424, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 31997, "MATRIX_DENSITY": 7.99925e-05, "TIME_S": 10.19437575340271, "TIME_S_1KI": 0.1607337246689378, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 285.14172214508056, "W": 27.849590636275384, "J_1KI": 4.495801623125009, "W_1KI": 0.43910176961836817, "W_D": 9.509590636275387, "J_D": 97.36520318508151, "W_D_1KI": 0.14993678475459427, "J_D_1KI": 0.0023640386092740016} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_20000_8e-05.output b/pytorch/output_1core_after_test/altra_10_10_10_20000_8e-05.output deleted file mode 100644 index a5c47fb..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_20000_8e-05.output +++ /dev/null @@ -1,65 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 20000 -sd 8e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 31998, "MATRIX_DENSITY": 7.9995e-05, "TIME_S": 0.16555237770080566} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 31992, 31996, 31998]), - col_indices=tensor([13174, 16154, 19104, ..., 17316, 14628, 14714]), - values=tensor([ 0.2961, -0.6988, 1.4292, ..., 0.9249, -0.4549, - 0.1182]), size=(20000, 20000), nnz=31998, - layout=torch.sparse_csr) -tensor([0.8123, 0.8879, 0.3353, ..., 0.0309, 0.7117, 0.9836]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 31998 -Density: 7.9995e-05 -Time: 0.16555237770080566 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 63424 -ss 20000 -sd 8e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 31997, "MATRIX_DENSITY": 7.99925e-05, "TIME_S": 10.19437575340271} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 31993, 31995, 31997]), - col_indices=tensor([ 1951, 4400, 13355, ..., 16423, 6899, 14719]), - values=tensor([-0.3339, 0.8334, 0.7225, ..., -0.9410, 1.7196, - -1.1716]), size=(20000, 20000), nnz=31997, - layout=torch.sparse_csr) -tensor([0.5883, 0.4106, 0.3171, ..., 0.8266, 0.4259, 0.6836]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 31997 -Density: 7.99925e-05 -Time: 10.19437575340271 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 31993, 31995, 31997]), - col_indices=tensor([ 1951, 4400, 13355, ..., 16423, 6899, 14719]), - values=tensor([-0.3339, 0.8334, 0.7225, ..., -0.9410, 1.7196, - -1.1716]), size=(20000, 20000), nnz=31997, - layout=torch.sparse_csr) -tensor([0.5883, 0.4106, 0.3171, ..., 0.8266, 0.4259, 0.6836]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 31997 -Density: 7.99925e-05 -Time: 10.19437575340271 seconds - -[20.2, 20.24, 20.44, 20.56, 20.32, 20.24, 20.24, 20.68, 20.48, 20.48] -[20.76, 20.36, 20.52, 21.4, 23.0, 23.92, 24.8, 24.76, 25.0, 24.2, 24.12, 24.6, 24.48] -10.238632440567017 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 63424, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 8e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [20000, 20000], 'MATRIX_ROWS': 20000, 'MATRIX_SIZE': 400000000, 'MATRIX_NNZ': 31997, 'MATRIX_DENSITY': 7.99925e-05, 'TIME_S': 10.19437575340271, 'TIME_S_1KI': 0.1607337246689378, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 285.14172214508056, 'W': 27.849590636275384} -[20.2, 20.24, 20.44, 20.56, 20.32, 20.24, 20.24, 20.68, 20.48, 20.48, 20.2, 20.36, 20.36, 20.44, 20.44, 20.16, 20.28, 20.24, 20.52, 20.72] -366.79999999999995 -18.339999999999996 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 63424, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 8e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [20000, 20000], 'MATRIX_ROWS': 20000, 'MATRIX_SIZE': 400000000, 'MATRIX_NNZ': 31997, 'MATRIX_DENSITY': 7.99925e-05, 'TIME_S': 10.19437575340271, 'TIME_S_1KI': 0.1607337246689378, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 285.14172214508056, 'W': 27.849590636275384, 'J_1KI': 4.495801623125009, 'W_1KI': 0.43910176961836817, 'W_D': 9.509590636275387, 'J_D': 97.36520318508151, 'W_D_1KI': 0.14993678475459427, 'J_D_1KI': 0.0023640386092740016} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_50000_0.0001.json b/pytorch/output_1core_after_test/altra_10_10_10_50000_0.0001.json deleted file mode 100644 index b21da97..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_50000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 16114, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 249986, "MATRIX_DENSITY": 9.99944e-05, "TIME_S": 10.773088455200195, "TIME_S_1KI": 0.6685545770882584, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 356.3190417861938, "W": 34.1705954897688, "J_1KI": 22.112389337606665, "W_1KI": 2.120553276018915, "W_D": 15.8825954897688, "J_D": 165.6181616058349, "W_D_1KI": 0.9856395364136031, "J_D_1KI": 0.06116665858344317} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_50000_0.0001.output b/pytorch/output_1core_after_test/altra_10_10_10_50000_0.0001.output deleted file mode 100644 index 9d0a3ab..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_50000_0.0001.output +++ /dev/null @@ -1,68 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 50000 -sd 0.0001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 249990, "MATRIX_DENSITY": 9.9996e-05, "TIME_S": 0.6515696048736572} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 12, ..., 249983, 249988, - 249990]), - col_indices=tensor([ 6662, 8889, 16052, ..., 41480, 19736, 47943]), - values=tensor([ 0.7313, 1.1544, -0.5654, ..., 0.5067, 2.7032, - 1.2092]), size=(50000, 50000), nnz=249990, - layout=torch.sparse_csr) -tensor([0.0703, 0.6351, 0.6923, ..., 0.0380, 0.6908, 0.4954]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 249990 -Density: 9.9996e-05 -Time: 0.6515696048736572 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 16114 -ss 50000 -sd 0.0001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 249986, "MATRIX_DENSITY": 9.99944e-05, "TIME_S": 10.773088455200195} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 6, ..., 249975, 249982, - 249986]), - col_indices=tensor([ 9416, 19517, 30063, ..., 32154, 36782, 41226]), - values=tensor([ 0.8423, -0.3244, 1.0233, ..., 0.0855, 0.0139, - 0.1714]), size=(50000, 50000), nnz=249986, - layout=torch.sparse_csr) -tensor([0.3530, 0.4524, 0.0808, ..., 0.2188, 0.9274, 0.8850]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 249986 -Density: 9.99944e-05 -Time: 10.773088455200195 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 6, ..., 249975, 249982, - 249986]), - col_indices=tensor([ 9416, 19517, 30063, ..., 32154, 36782, 41226]), - values=tensor([ 0.8423, -0.3244, 1.0233, ..., 0.0855, 0.0139, - 0.1714]), size=(50000, 50000), nnz=249986, - layout=torch.sparse_csr) -tensor([0.3530, 0.4524, 0.0808, ..., 0.2188, 0.9274, 0.8850]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 249986 -Density: 9.99944e-05 -Time: 10.773088455200195 seconds - -[20.04, 20.08, 20.2, 20.16, 20.16, 20.24, 20.44, 20.8, 20.76, 20.8] -[20.8, 20.72, 20.76, 24.72, 26.88, 29.84, 32.2, 31.64, 32.56, 32.28, 32.28, 32.28, 32.08] -10.427650928497314 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 16114, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 0.0001, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 249986, 'MATRIX_DENSITY': 9.99944e-05, 'TIME_S': 10.773088455200195, 'TIME_S_1KI': 0.6685545770882584, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 356.3190417861938, 'W': 34.1705954897688} -[20.04, 20.08, 20.2, 20.16, 20.16, 20.24, 20.44, 20.8, 20.76, 20.8, 20.12, 20.08, 20.28, 20.56, 20.48, 20.52, 20.28, 20.16, 20.04, 20.08] -365.76 -18.288 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 16114, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 0.0001, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 249986, 'MATRIX_DENSITY': 9.99944e-05, 'TIME_S': 10.773088455200195, 'TIME_S_1KI': 0.6685545770882584, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 356.3190417861938, 'W': 34.1705954897688, 'J_1KI': 22.112389337606665, 'W_1KI': 2.120553276018915, 'W_D': 15.8825954897688, 'J_D': 165.6181616058349, 'W_D_1KI': 0.9856395364136031, 'J_D_1KI': 0.06116665858344317} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_50000_1e-05.json b/pytorch/output_1core_after_test/altra_10_10_10_50000_1e-05.json deleted file mode 100644 index 7aca453..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_50000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 39558, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.424606084823608, "TIME_S_1KI": 0.26352712687253166, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 324.11351709365846, "W": 30.726844173746105, "J_1KI": 8.193374717975086, "W_1KI": 0.7767542386810785, "W_D": 12.161844173746108, "J_D": 128.28580986738208, "W_D_1KI": 0.30744335339870843, "J_D_1KI": 0.007771964037583004} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_50000_1e-05.output b/pytorch/output_1core_after_test/altra_10_10_10_50000_1e-05.output deleted file mode 100644 index be91dba..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_50000_1e-05.output +++ /dev/null @@ -1,65 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 50000 -sd 1e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.26543164253234863} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 2, ..., 24999, 24999, 25000]), - col_indices=tensor([22098, 23271, 16509, ..., 49035, 19856, 29710]), - values=tensor([-1.0630, -0.7063, -0.4487, ..., 0.5192, -0.7952, - -0.0211]), size=(50000, 50000), nnz=25000, - layout=torch.sparse_csr) -tensor([0.5959, 0.4899, 0.5718, ..., 0.0559, 0.3906, 0.5621]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 25000 -Density: 1e-05 -Time: 0.26543164253234863 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 39558 -ss 50000 -sd 1e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.424606084823608} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 24999, 25000, 25000]), - col_indices=tensor([48058, 11500, 28092, ..., 28559, 6217, 24317]), - values=tensor([-0.1678, 1.6669, -1.7696, ..., 1.2303, 1.8947, - -1.1526]), size=(50000, 50000), nnz=25000, - layout=torch.sparse_csr) -tensor([0.2915, 0.7675, 0.3440, ..., 0.8675, 0.1613, 0.7154]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 25000 -Density: 1e-05 -Time: 10.424606084823608 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 24999, 25000, 25000]), - col_indices=tensor([48058, 11500, 28092, ..., 28559, 6217, 24317]), - values=tensor([-0.1678, 1.6669, -1.7696, ..., 1.2303, 1.8947, - -1.1526]), size=(50000, 50000), nnz=25000, - layout=torch.sparse_csr) -tensor([0.2915, 0.7675, 0.3440, ..., 0.8675, 0.1613, 0.7154]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 25000 -Density: 1e-05 -Time: 10.424606084823608 seconds - -[20.36, 20.32, 20.28, 20.52, 20.52, 20.64, 20.8, 20.64, 20.64, 20.64] -[20.64, 20.64, 20.28, 23.88, 25.6, 27.36, 28.68, 29.84, 27.6, 27.0, 27.08, 27.28, 27.24] -10.548220157623291 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 39558, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 1e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.424606084823608, 'TIME_S_1KI': 0.26352712687253166, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 324.11351709365846, 'W': 30.726844173746105} -[20.36, 20.32, 20.28, 20.52, 20.52, 20.64, 20.8, 20.64, 20.64, 20.64, 20.64, 20.84, 21.04, 20.96, 20.92, 20.76, 20.76, 20.56, 20.16, 20.24] -371.29999999999995 -18.564999999999998 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 39558, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 1e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.424606084823608, 'TIME_S_1KI': 0.26352712687253166, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 324.11351709365846, 'W': 30.726844173746105, 'J_1KI': 8.193374717975086, 'W_1KI': 0.7767542386810785, 'W_D': 12.161844173746108, 'J_D': 128.28580986738208, 'W_D_1KI': 0.30744335339870843, 'J_D_1KI': 0.007771964037583004} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_50000_2e-05.json b/pytorch/output_1core_after_test/altra_10_10_10_50000_2e-05.json deleted file mode 100644 index 5c88497..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_50000_2e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 29180, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 50000, "MATRIX_DENSITY": 2e-05, "TIME_S": 10.342068433761597, "TIME_S_1KI": 0.35442318141746393, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 314.51990249633786, "W": 30.360391076955718, "J_1KI": 10.778612148606507, "W_1KI": 1.040452058840155, "W_D": 11.935391076955717, "J_D": 123.64524647474285, "W_D_1KI": 0.4090264248442672, "J_D_1KI": 0.014017355203710322} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_50000_2e-05.output b/pytorch/output_1core_after_test/altra_10_10_10_50000_2e-05.output deleted file mode 100644 index 9955434..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_50000_2e-05.output +++ /dev/null @@ -1,65 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 50000 -sd 2e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 49998, "MATRIX_DENSITY": 1.99992e-05, "TIME_S": 0.3598310947418213} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 49996, 49996, 49998]), - col_indices=tensor([30529, 7062, 29506, ..., 35953, 37426, 43003]), - values=tensor([ 0.6020, 0.0624, 0.2604, ..., 1.2885, -2.2140, - 1.3375]), size=(50000, 50000), nnz=49998, - layout=torch.sparse_csr) -tensor([0.3283, 0.8413, 0.6070, ..., 0.6287, 0.3886, 0.1587]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 49998 -Density: 1.99992e-05 -Time: 0.3598310947418213 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 29180 -ss 50000 -sd 2e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 50000, "MATRIX_DENSITY": 2e-05, "TIME_S": 10.342068433761597} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 4, ..., 49998, 49999, 50000]), - col_indices=tensor([24033, 20967, 34679, ..., 22694, 884, 7980]), - values=tensor([-0.8208, 1.0839, 0.5317, ..., -1.4749, 0.0532, - -0.4907]), size=(50000, 50000), nnz=50000, - layout=torch.sparse_csr) -tensor([0.2365, 0.0619, 0.0494, ..., 0.8664, 0.4569, 0.5629]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 50000 -Density: 2e-05 -Time: 10.342068433761597 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 4, ..., 49998, 49999, 50000]), - col_indices=tensor([24033, 20967, 34679, ..., 22694, 884, 7980]), - values=tensor([-0.8208, 1.0839, 0.5317, ..., -1.4749, 0.0532, - -0.4907]), size=(50000, 50000), nnz=50000, - layout=torch.sparse_csr) -tensor([0.2365, 0.0619, 0.0494, ..., 0.8664, 0.4569, 0.5629]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 50000 -Density: 2e-05 -Time: 10.342068433761597 seconds - -[20.28, 20.36, 20.08, 20.16, 20.16, 20.08, 20.24, 20.08, 20.2, 20.24] -[20.24, 20.44, 20.36, 21.44, 22.44, 25.12, 26.56, 27.96, 28.88, 28.88, 29.08, 28.76, 28.48] -10.359547138214111 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 29180, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 2e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 50000, 'MATRIX_DENSITY': 2e-05, 'TIME_S': 10.342068433761597, 'TIME_S_1KI': 0.35442318141746393, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 314.51990249633786, 'W': 30.360391076955718} -[20.28, 20.36, 20.08, 20.16, 20.16, 20.08, 20.24, 20.08, 20.2, 20.24, 20.4, 20.36, 20.44, 20.84, 20.92, 21.04, 21.04, 21.0, 20.68, 20.72] -368.5 -18.425 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 29180, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 2e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 50000, 'MATRIX_DENSITY': 2e-05, 'TIME_S': 10.342068433761597, 'TIME_S_1KI': 0.35442318141746393, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 314.51990249633786, 'W': 30.360391076955718, 'J_1KI': 10.778612148606507, 'W_1KI': 1.040452058840155, 'W_D': 11.935391076955717, 'J_D': 123.64524647474285, 'W_D_1KI': 0.4090264248442672, 'J_D_1KI': 0.014017355203710322} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_50000_5e-05.json b/pytorch/output_1core_after_test/altra_10_10_10_50000_5e-05.json deleted file mode 100644 index ecd2363..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_50000_5e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 21246, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 124997, "MATRIX_DENSITY": 4.99988e-05, "TIME_S": 10.39963436126709, "TIME_S_1KI": 0.4894866968496229, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 348.6815179252624, "W": 32.90522736665826, "J_1KI": 16.41163126825108, "W_1KI": 1.5487728215503274, "W_D": 14.517227366658261, "J_D": 153.83236279964444, "W_D_1KI": 0.6832922605035424, "J_D_1KI": 0.03216098373828214} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_50000_5e-05.output b/pytorch/output_1core_after_test/altra_10_10_10_50000_5e-05.output deleted file mode 100644 index f138783..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_50000_5e-05.output +++ /dev/null @@ -1,68 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 50000 -sd 5e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 124997, "MATRIX_DENSITY": 4.99988e-05, "TIME_S": 0.49421024322509766} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 3, ..., 124993, 124996, - 124997]), - col_indices=tensor([ 1273, 22428, 29987, ..., 14261, 20854, 19550]), - values=tensor([ 1.3964, 1.1880, -2.4586, ..., 0.2900, -1.6227, - 1.1179]), size=(50000, 50000), nnz=124997, - layout=torch.sparse_csr) -tensor([0.8998, 0.8412, 0.9493, ..., 0.6279, 0.1643, 0.8336]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 124997 -Density: 4.99988e-05 -Time: 0.49421024322509766 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 21246 -ss 50000 -sd 5e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 124997, "MATRIX_DENSITY": 4.99988e-05, "TIME_S": 10.39963436126709} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 5, ..., 124992, 124995, - 124997]), - col_indices=tensor([33843, 5335, 28219, ..., 48135, 16744, 44054]), - values=tensor([ 0.6356, -0.4709, 1.5854, ..., -0.0049, -0.5240, - -0.7657]), size=(50000, 50000), nnz=124997, - layout=torch.sparse_csr) -tensor([0.3855, 0.1623, 0.0527, ..., 0.6589, 0.5383, 0.0901]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 124997 -Density: 4.99988e-05 -Time: 10.39963436126709 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 5, ..., 124992, 124995, - 124997]), - col_indices=tensor([33843, 5335, 28219, ..., 48135, 16744, 44054]), - values=tensor([ 0.6356, -0.4709, 1.5854, ..., -0.0049, -0.5240, - -0.7657]), size=(50000, 50000), nnz=124997, - layout=torch.sparse_csr) -tensor([0.3855, 0.1623, 0.0527, ..., 0.6589, 0.5383, 0.0901]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 124997 -Density: 4.99988e-05 -Time: 10.39963436126709 seconds - -[20.44, 20.6, 20.44, 20.56, 20.72, 20.48, 20.56, 20.8, 20.88, 20.88] -[20.8, 20.76, 20.96, 21.92, 24.04, 27.24, 29.92, 31.4, 31.72, 31.72, 31.76, 31.84, 31.92] -10.596538782119751 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 21246, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 5e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 124997, 'MATRIX_DENSITY': 4.99988e-05, 'TIME_S': 10.39963436126709, 'TIME_S_1KI': 0.4894866968496229, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 348.6815179252624, 'W': 32.90522736665826} -[20.44, 20.6, 20.44, 20.56, 20.72, 20.48, 20.56, 20.8, 20.88, 20.88, 20.28, 20.32, 20.28, 20.4, 20.16, 20.16, 20.24, 20.08, 20.12, 20.32] -367.76 -18.387999999999998 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 21246, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 5e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 124997, 'MATRIX_DENSITY': 4.99988e-05, 'TIME_S': 10.39963436126709, 'TIME_S_1KI': 0.4894866968496229, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 348.6815179252624, 'W': 32.90522736665826, 'J_1KI': 16.41163126825108, 'W_1KI': 1.5487728215503274, 'W_D': 14.517227366658261, 'J_D': 153.83236279964444, 'W_D_1KI': 0.6832922605035424, 'J_D_1KI': 0.03216098373828214} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_50000_8e-05.json b/pytorch/output_1core_after_test/altra_10_10_10_50000_8e-05.json deleted file mode 100644 index effc6da..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_50000_8e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 17928, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 199993, "MATRIX_DENSITY": 7.99972e-05, "TIME_S": 10.449234962463379, "TIME_S_1KI": 0.5828444311949675, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 345.35830185890194, "W": 33.32021277823694, "J_1KI": 19.263626832825857, "W_1KI": 1.858557160767344, "W_D": 14.958212778236941, "J_D": 155.03931497430793, "W_D_1KI": 0.8343492178847022, "J_D_1KI": 0.0465388898864738} diff --git a/pytorch/output_1core_after_test/altra_10_10_10_50000_8e-05.output b/pytorch/output_1core_after_test/altra_10_10_10_50000_8e-05.output deleted file mode 100644 index 969a67b..0000000 --- a/pytorch/output_1core_after_test/altra_10_10_10_50000_8e-05.output +++ /dev/null @@ -1,68 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 50000 -sd 8e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 199993, "MATRIX_DENSITY": 7.99972e-05, "TIME_S": 0.5856485366821289} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 8, 13, ..., 199985, 199988, - 199993]), - col_indices=tensor([ 4116, 20821, 23313, ..., 36221, 39671, 48300]), - values=tensor([-1.1656, 0.6488, 0.3884, ..., 0.8608, -1.0532, - -1.6884]), size=(50000, 50000), nnz=199993, - layout=torch.sparse_csr) -tensor([0.0024, 0.6993, 0.1691, ..., 0.8154, 0.5901, 0.4003]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 199993 -Density: 7.99972e-05 -Time: 0.5856485366821289 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 17928 -ss 50000 -sd 8e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 199993, "MATRIX_DENSITY": 7.99972e-05, "TIME_S": 10.449234962463379} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 5, ..., 199987, 199991, - 199993]), - col_indices=tensor([17990, 18143, 26452, ..., 25515, 3657, 45119]), - values=tensor([-0.8402, -1.2988, 1.1344, ..., -1.1042, 0.4643, - 1.1586]), size=(50000, 50000), nnz=199993, - layout=torch.sparse_csr) -tensor([0.2314, 0.4382, 0.4620, ..., 0.3725, 0.7017, 0.5878]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 199993 -Density: 7.99972e-05 -Time: 10.449234962463379 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 5, ..., 199987, 199991, - 199993]), - col_indices=tensor([17990, 18143, 26452, ..., 25515, 3657, 45119]), - values=tensor([-0.8402, -1.2988, 1.1344, ..., -1.1042, 0.4643, - 1.1586]), size=(50000, 50000), nnz=199993, - layout=torch.sparse_csr) -tensor([0.2314, 0.4382, 0.4620, ..., 0.3725, 0.7017, 0.5878]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 199993 -Density: 7.99972e-05 -Time: 10.449234962463379 seconds - -[20.36, 20.36, 20.16, 20.32, 20.4, 20.52, 20.64, 20.64, 20.6, 20.56] -[20.44, 20.44, 20.36, 23.72, 25.8, 28.2, 31.08, 30.32, 31.88, 31.92, 31.76, 31.88, 32.12] -10.364828824996948 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 17928, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 8e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 199993, 'MATRIX_DENSITY': 7.99972e-05, 'TIME_S': 10.449234962463379, 'TIME_S_1KI': 0.5828444311949675, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 345.35830185890194, 'W': 33.32021277823694} -[20.36, 20.36, 20.16, 20.32, 20.4, 20.52, 20.64, 20.64, 20.6, 20.56, 20.76, 20.64, 20.32, 20.32, 20.08, 20.4, 20.32, 20.28, 20.36, 20.08] -367.24 -18.362000000000002 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 17928, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 8e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 199993, 'MATRIX_DENSITY': 7.99972e-05, 'TIME_S': 10.449234962463379, 'TIME_S_1KI': 0.5828444311949675, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 345.35830185890194, 'W': 33.32021277823694, 'J_1KI': 19.263626832825857, 'W_1KI': 1.858557160767344, 'W_D': 14.958212778236941, 'J_D': 155.03931497430793, 'W_D_1KI': 0.8343492178847022, 'J_D_1KI': 0.0465388898864738} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_100000_0.0001.json b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_100000_0.0001.json deleted file mode 100644 index 6b53bff..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_100000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 7321, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 999958, "MATRIX_DENSITY": 9.99958e-05, "TIME_S": 10.493394613265991, "TIME_S_1KI": 1.4333280444291752, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 693.8586197495459, "W": 66.35, "J_1KI": 94.77648132079578, "W_1KI": 9.06296953968037, "W_D": 31.555249999999994, "J_D": 329.9906889352202, "W_D_1KI": 4.310237672449118, "J_D_1KI": 0.5887498528137027} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_100000_0.0001.output b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_100000_0.0001.output deleted file mode 100644 index 297dd37..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_100000_0.0001.output +++ /dev/null @@ -1,68 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '100000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 999955, "MATRIX_DENSITY": 9.99955e-05, "TIME_S": 1.434152603149414} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 11, 23, ..., 999937, 999947, - 999955]), - col_indices=tensor([17714, 27606, 40423, ..., 56745, 68426, 94681]), - values=tensor([-0.0848, -2.5543, -0.9845, ..., 1.0991, -1.2721, - 0.0094]), size=(100000, 100000), nnz=999955, - layout=torch.sparse_csr) -tensor([0.0317, 0.5212, 0.6740, ..., 0.1470, 0.6060, 0.4229]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 999955 -Density: 9.99955e-05 -Time: 1.434152603149414 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '7321', '-ss', '100000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 999958, "MATRIX_DENSITY": 9.99958e-05, "TIME_S": 10.493394613265991} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 17, ..., 999929, 999946, - 999958]), - col_indices=tensor([ 3686, 11174, 36004, ..., 72478, 81947, 88062]), - values=tensor([ 1.2821, -0.7142, -0.0602, ..., 0.1059, 0.3571, - 1.8677]), size=(100000, 100000), nnz=999958, - layout=torch.sparse_csr) -tensor([0.8470, 0.5279, 0.2762, ..., 0.6136, 0.0054, 0.0656]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 999958 -Density: 9.99958e-05 -Time: 10.493394613265991 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 17, ..., 999929, 999946, - 999958]), - col_indices=tensor([ 3686, 11174, 36004, ..., 72478, 81947, 88062]), - values=tensor([ 1.2821, -0.7142, -0.0602, ..., 0.1059, 0.3571, - 1.8677]), size=(100000, 100000), nnz=999958, - layout=torch.sparse_csr) -tensor([0.8470, 0.5279, 0.2762, ..., 0.6136, 0.0054, 0.0656]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 999958 -Density: 9.99958e-05 -Time: 10.493394613265991 seconds - -[39.01, 38.31, 38.71, 38.24, 38.61, 38.69, 38.97, 38.52, 39.54, 38.43] -[66.35] -10.457552671432495 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 7321, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 0.0001, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 999958, 'MATRIX_DENSITY': 9.99958e-05, 'TIME_S': 10.493394613265991, 'TIME_S_1KI': 1.4333280444291752, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 693.8586197495459, 'W': 66.35} -[39.01, 38.31, 38.71, 38.24, 38.61, 38.69, 38.97, 38.52, 39.54, 38.43, 39.14, 38.72, 38.58, 38.9, 38.67, 38.68, 38.47, 38.41, 38.35, 38.47] -695.895 -34.79475 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 7321, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 0.0001, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 999958, 'MATRIX_DENSITY': 9.99958e-05, 'TIME_S': 10.493394613265991, 'TIME_S_1KI': 1.4333280444291752, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 693.8586197495459, 'W': 66.35, 'J_1KI': 94.77648132079578, 'W_1KI': 9.06296953968037, 'W_D': 31.555249999999994, 'J_D': 329.9906889352202, 'W_D_1KI': 4.310237672449118, 'J_D_1KI': 0.5887498528137027} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_100000_1e-05.json b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_100000_1e-05.json deleted file mode 100644 index f548cbc..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_100000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 15459, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.295618772506714, "TIME_S_1KI": 0.6659951337412972, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 663.4292171406746, "W": 64.51, "J_1KI": 42.91540313996213, "W_1KI": 4.172973672294457, "W_D": 29.505250000000004, "J_D": 303.43582249325516, "W_D_1KI": 1.9086131056342588, "J_D_1KI": 0.12346290870264952} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_100000_1e-05.output b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_100000_1e-05.output deleted file mode 100644 index eafacaf..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_100000_1e-05.output +++ /dev/null @@ -1,67 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '100000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 99999, "MATRIX_DENSITY": 9.9999e-06, "TIME_S": 0.6791770458221436} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 99996, 99996, 99999]), - col_indices=tensor([48006, 64298, 50858, ..., 16925, 31708, 60124]), - values=tensor([ 0.5949, -1.1126, -0.4425, ..., 1.9222, -0.2766, - -0.1611]), size=(100000, 100000), nnz=99999, - layout=torch.sparse_csr) -tensor([0.6661, 0.7299, 0.6911, ..., 0.4623, 0.9962, 0.3767]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 99999 -Density: 9.9999e-06 -Time: 0.6791770458221436 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '15459', '-ss', '100000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.295618772506714} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 99998, 100000, - 100000]), - col_indices=tensor([31661, 76136, 71092, ..., 68291, 34176, 79322]), - values=tensor([-1.4568, -0.5642, -0.1260, ..., -2.0915, -0.5754, - -0.9900]), size=(100000, 100000), nnz=100000, - layout=torch.sparse_csr) -tensor([0.6003, 0.7344, 0.3335, ..., 0.5656, 0.2704, 0.5992]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 100000 -Density: 1e-05 -Time: 10.295618772506714 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 99998, 100000, - 100000]), - col_indices=tensor([31661, 76136, 71092, ..., 68291, 34176, 79322]), - values=tensor([-1.4568, -0.5642, -0.1260, ..., -2.0915, -0.5754, - -0.9900]), size=(100000, 100000), nnz=100000, - layout=torch.sparse_csr) -tensor([0.6003, 0.7344, 0.3335, ..., 0.5656, 0.2704, 0.5992]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 100000 -Density: 1e-05 -Time: 10.295618772506714 seconds - -[39.16, 38.35, 38.36, 38.49, 38.33, 38.35, 38.75, 38.82, 38.39, 39.78] -[64.51] -10.284129858016968 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 15459, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 1e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.295618772506714, 'TIME_S_1KI': 0.6659951337412972, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 663.4292171406746, 'W': 64.51} -[39.16, 38.35, 38.36, 38.49, 38.33, 38.35, 38.75, 38.82, 38.39, 39.78, 39.65, 38.37, 38.32, 38.26, 38.42, 38.71, 38.38, 44.4, 38.96, 38.28] -700.095 -35.00475 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 15459, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 1e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.295618772506714, 'TIME_S_1KI': 0.6659951337412972, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 663.4292171406746, 'W': 64.51, 'J_1KI': 42.91540313996213, 'W_1KI': 4.172973672294457, 'W_D': 29.505250000000004, 'J_D': 303.43582249325516, 'W_D_1KI': 1.9086131056342588, 'J_D_1KI': 0.12346290870264952} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_100000_2e-05.json b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_100000_2e-05.json deleted file mode 100644 index 8ca2df0..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_100000_2e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 12799, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 199999, "MATRIX_DENSITY": 1.99999e-05, "TIME_S": 10.312466621398926, "TIME_S_1KI": 0.8057244020156986, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 665.1102061700822, "W": 64.37, "J_1KI": 51.965794684747415, "W_1KI": 5.029299163997187, "W_D": 29.493500000000004, "J_D": 304.74487906908996, "W_D_1KI": 2.3043597156027817, "J_D_1KI": 0.1800421685758873} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_100000_2e-05.output b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_100000_2e-05.output deleted file mode 100644 index 30f0afb..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_100000_2e-05.output +++ /dev/null @@ -1,68 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '100000', '-sd', '2e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 199996, "MATRIX_DENSITY": 1.99996e-05, "TIME_S": 0.8203706741333008} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 4, ..., 199991, 199992, - 199996]), - col_indices=tensor([51426, 90007, 40378, ..., 18735, 37776, 48454]), - values=tensor([ 0.7391, 0.9740, -1.1861, ..., -0.5652, -0.6436, - 1.0422]), size=(100000, 100000), nnz=199996, - layout=torch.sparse_csr) -tensor([0.7835, 0.3777, 0.7585, ..., 0.8549, 0.3936, 0.4815]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 199996 -Density: 1.99996e-05 -Time: 0.8203706741333008 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '12799', '-ss', '100000', '-sd', '2e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 199999, "MATRIX_DENSITY": 1.99999e-05, "TIME_S": 10.312466621398926} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 5, ..., 199994, 199998, - 199999]), - col_indices=tensor([27560, 28667, 53651, ..., 54900, 68740, 23475]), - values=tensor([-1.6088, -0.0747, 0.2674, ..., 0.3290, 0.5072, - 1.0750]), size=(100000, 100000), nnz=199999, - layout=torch.sparse_csr) -tensor([0.6315, 0.0366, 0.8205, ..., 0.3363, 0.5692, 0.3406]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 199999 -Density: 1.99999e-05 -Time: 10.312466621398926 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 5, ..., 199994, 199998, - 199999]), - col_indices=tensor([27560, 28667, 53651, ..., 54900, 68740, 23475]), - values=tensor([-1.6088, -0.0747, 0.2674, ..., 0.3290, 0.5072, - 1.0750]), size=(100000, 100000), nnz=199999, - layout=torch.sparse_csr) -tensor([0.6315, 0.0366, 0.8205, ..., 0.3363, 0.5692, 0.3406]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 199999 -Density: 1.99999e-05 -Time: 10.312466621398926 seconds - -[39.81, 39.25, 38.39, 38.68, 38.87, 38.32, 38.84, 38.77, 38.55, 38.33] -[64.37] -10.332611560821533 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 12799, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 2e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 199999, 'MATRIX_DENSITY': 1.99999e-05, 'TIME_S': 10.312466621398926, 'TIME_S_1KI': 0.8057244020156986, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 665.1102061700822, 'W': 64.37} -[39.81, 39.25, 38.39, 38.68, 38.87, 38.32, 38.84, 38.77, 38.55, 38.33, 40.43, 38.95, 38.61, 38.52, 38.38, 39.43, 38.59, 38.26, 38.46, 38.75] -697.53 -34.8765 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 12799, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 2e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 199999, 'MATRIX_DENSITY': 1.99999e-05, 'TIME_S': 10.312466621398926, 'TIME_S_1KI': 0.8057244020156986, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 665.1102061700822, 'W': 64.37, 'J_1KI': 51.965794684747415, 'W_1KI': 5.029299163997187, 'W_D': 29.493500000000004, 'J_D': 304.74487906908996, 'W_D_1KI': 2.3043597156027817, 'J_D_1KI': 0.1800421685758873} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_100000_5e-05.json b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_100000_5e-05.json deleted file mode 100644 index 69fd6a2..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_100000_5e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 9599, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 499988, "MATRIX_DENSITY": 4.99988e-05, "TIME_S": 10.298398733139038, "TIME_S_1KI": 1.0728616244545304, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 673.3587252640724, "W": 65.39, "J_1KI": 70.14884105261719, "W_1KI": 6.812167934159809, "W_D": 30.4705, "J_D": 313.7723969744444, "W_D_1KI": 3.1743410771955416, "J_D_1KI": 0.33069497626789685} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_100000_5e-05.output b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_100000_5e-05.output deleted file mode 100644 index ae56d6f..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_100000_5e-05.output +++ /dev/null @@ -1,68 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '100000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 499987, "MATRIX_DENSITY": 4.99987e-05, "TIME_S": 1.0938327312469482} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 10, ..., 499975, 499979, - 499987]), - col_indices=tensor([ 625, 1232, 18696, ..., 77518, 94690, 99471]), - values=tensor([-1.1636, 2.1655, -1.0596, ..., -1.6108, 0.9892, - -1.1686]), size=(100000, 100000), nnz=499987, - layout=torch.sparse_csr) -tensor([0.2570, 0.8095, 0.4051, ..., 0.4677, 0.3527, 0.8430]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 499987 -Density: 4.99987e-05 -Time: 1.0938327312469482 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '9599', '-ss', '100000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 499988, "MATRIX_DENSITY": 4.99988e-05, "TIME_S": 10.298398733139038} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 4, ..., 499972, 499977, - 499988]), - col_indices=tensor([37027, 6807, 46560, ..., 75712, 82456, 83079]), - values=tensor([ 1.4255, -0.1200, -0.1371, ..., 0.2939, 0.4596, - 1.2418]), size=(100000, 100000), nnz=499988, - layout=torch.sparse_csr) -tensor([0.5521, 0.1482, 0.5901, ..., 0.2982, 0.5753, 0.2296]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 499988 -Density: 4.99988e-05 -Time: 10.298398733139038 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 4, ..., 499972, 499977, - 499988]), - col_indices=tensor([37027, 6807, 46560, ..., 75712, 82456, 83079]), - values=tensor([ 1.4255, -0.1200, -0.1371, ..., 0.2939, 0.4596, - 1.2418]), size=(100000, 100000), nnz=499988, - layout=torch.sparse_csr) -tensor([0.5521, 0.1482, 0.5901, ..., 0.2982, 0.5753, 0.2296]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 499988 -Density: 4.99988e-05 -Time: 10.298398733139038 seconds - -[40.42, 38.28, 38.67, 38.64, 38.43, 39.44, 38.38, 38.76, 39.56, 38.41] -[65.39] -10.297579526901245 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 9599, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 5e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 499988, 'MATRIX_DENSITY': 4.99988e-05, 'TIME_S': 10.298398733139038, 'TIME_S_1KI': 1.0728616244545304, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 673.3587252640724, 'W': 65.39} -[40.42, 38.28, 38.67, 38.64, 38.43, 39.44, 38.38, 38.76, 39.56, 38.41, 39.54, 38.71, 39.2, 38.37, 38.42, 39.64, 38.35, 38.47, 38.46, 38.85] -698.39 -34.9195 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 9599, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 5e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 499988, 'MATRIX_DENSITY': 4.99988e-05, 'TIME_S': 10.298398733139038, 'TIME_S_1KI': 1.0728616244545304, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 673.3587252640724, 'W': 65.39, 'J_1KI': 70.14884105261719, 'W_1KI': 6.812167934159809, 'W_D': 30.4705, 'J_D': 313.7723969744444, 'W_D_1KI': 3.1743410771955416, 'J_D_1KI': 0.33069497626789685} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_100000_8e-05.json b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_100000_8e-05.json deleted file mode 100644 index 1c70fda..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_100000_8e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 7573, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 799974, "MATRIX_DENSITY": 7.99974e-05, "TIME_S": 10.329928636550903, "TIME_S_1KI": 1.364047093166632, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 680.7610593819618, "W": 65.53, "J_1KI": 89.89318095628704, "W_1KI": 8.653109731942427, "W_D": 30.261250000000004, "J_D": 314.3702213981748, "W_D_1KI": 3.9959395219860037, "J_D_1KI": 0.5276560837166253} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_100000_8e-05.output b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_100000_8e-05.output deleted file mode 100644 index 66178b5..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_100000_8e-05.output +++ /dev/null @@ -1,68 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '100000', '-sd', '8e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 799979, "MATRIX_DENSITY": 7.99979e-05, "TIME_S": 1.3864548206329346} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 10, 22, ..., 799960, 799969, - 799979]), - col_indices=tensor([ 3991, 12470, 47738, ..., 59230, 62610, 86559]), - values=tensor([-1.5517, -1.1019, -2.2061, ..., 0.0714, 0.2519, - -0.0928]), size=(100000, 100000), nnz=799979, - layout=torch.sparse_csr) -tensor([0.8557, 0.9882, 0.2106, ..., 0.6867, 0.1131, 0.9591]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 799979 -Density: 7.99979e-05 -Time: 1.3864548206329346 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '7573', '-ss', '100000', '-sd', '8e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 799974, "MATRIX_DENSITY": 7.99974e-05, "TIME_S": 10.329928636550903} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 18, ..., 799958, 799967, - 799974]), - col_indices=tensor([22158, 27819, 31162, ..., 83457, 91150, 93673]), - values=tensor([ 1.7487, -0.8213, -0.1355, ..., -0.8810, -2.4345, - 0.2948]), size=(100000, 100000), nnz=799974, - layout=torch.sparse_csr) -tensor([0.3306, 0.6421, 0.3776, ..., 0.4090, 0.4110, 0.5706]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 799974 -Density: 7.99974e-05 -Time: 10.329928636550903 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 18, ..., 799958, 799967, - 799974]), - col_indices=tensor([22158, 27819, 31162, ..., 83457, 91150, 93673]), - values=tensor([ 1.7487, -0.8213, -0.1355, ..., -0.8810, -2.4345, - 0.2948]), size=(100000, 100000), nnz=799974, - layout=torch.sparse_csr) -tensor([0.3306, 0.6421, 0.3776, ..., 0.4090, 0.4110, 0.5706]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 799974 -Density: 7.99974e-05 -Time: 10.329928636550903 seconds - -[39.09, 38.34, 38.83, 38.27, 38.53, 38.34, 38.66, 38.78, 39.1, 44.17] -[65.53] -10.388540506362915 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 7573, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 8e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 799974, 'MATRIX_DENSITY': 7.99974e-05, 'TIME_S': 10.329928636550903, 'TIME_S_1KI': 1.364047093166632, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 680.7610593819618, 'W': 65.53} -[39.09, 38.34, 38.83, 38.27, 38.53, 38.34, 38.66, 38.78, 39.1, 44.17, 39.27, 38.54, 38.41, 38.25, 38.45, 40.7, 44.33, 38.48, 38.64, 38.92] -705.375 -35.26875 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 7573, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 8e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 799974, 'MATRIX_DENSITY': 7.99974e-05, 'TIME_S': 10.329928636550903, 'TIME_S_1KI': 1.364047093166632, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 680.7610593819618, 'W': 65.53, 'J_1KI': 89.89318095628704, 'W_1KI': 8.653109731942427, 'W_D': 30.261250000000004, 'J_D': 314.3702213981748, 'W_D_1KI': 3.9959395219860037, 'J_D_1KI': 0.5276560837166253} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_10000_0.0001.json b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_10000_0.0001.json deleted file mode 100644 index c7d4c4b..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_10000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 364192, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 9999, "MATRIX_DENSITY": 9.999e-05, "TIME_S": 10.286681890487671, "TIME_S_1KI": 0.028245216508016844, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 658.1757550382615, "W": 65.18, "J_1KI": 1.8072218913053046, "W_1KI": 0.17897153149986822, "W_D": 29.735250000000008, "J_D": 300.26113255602127, "W_D_1KI": 0.08164718060803094, "J_D_1KI": 0.00022418718864783123} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_10000_0.0001.output b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_10000_0.0001.output deleted file mode 100644 index 2c25191..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_10000_0.0001.output +++ /dev/null @@ -1,85 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 9998, "MATRIX_DENSITY": 9.998e-05, "TIME_S": 0.03789472579956055} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 4, ..., 9993, 9997, 9998]), - col_indices=tensor([4827, 4877, 7090, ..., 3097, 4386, 1589]), - values=tensor([ 0.2815, -0.0621, 0.7820, ..., 0.1907, 0.2517, - -0.5782]), size=(10000, 10000), nnz=9998, - layout=torch.sparse_csr) -tensor([0.7159, 0.5102, 0.1780, ..., 0.6649, 0.0132, 0.6435]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 9998 -Density: 9.998e-05 -Time: 0.03789472579956055 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '277083', '-ss', '10000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 9999, "MATRIX_DENSITY": 9.999e-05, "TIME_S": 7.98856258392334} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 2, ..., 9998, 9998, 9999]), - col_indices=tensor([4687, 6305, 8321, ..., 5297, 8865, 3125]), - values=tensor([-0.2973, -0.7293, -0.4701, ..., 1.0040, -0.5152, - 0.5670]), size=(10000, 10000), nnz=9999, - layout=torch.sparse_csr) -tensor([0.6850, 0.3394, 0.0913, ..., 0.6251, 0.5060, 0.1073]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 9999 -Density: 9.999e-05 -Time: 7.98856258392334 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '364192', '-ss', '10000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 9999, "MATRIX_DENSITY": 9.999e-05, "TIME_S": 10.286681890487671} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 9996, 9998, 9999]), - col_indices=tensor([9034, 7140, 1786, ..., 4605, 7715, 5729]), - values=tensor([-1.2303, 1.0912, 0.2060, ..., 0.0412, -0.6363, - -0.6436]), size=(10000, 10000), nnz=9999, - layout=torch.sparse_csr) -tensor([0.2105, 0.8829, 0.5834, ..., 0.8176, 0.5853, 0.6953]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 9999 -Density: 9.999e-05 -Time: 10.286681890487671 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 9996, 9998, 9999]), - col_indices=tensor([9034, 7140, 1786, ..., 4605, 7715, 5729]), - values=tensor([-1.2303, 1.0912, 0.2060, ..., 0.0412, -0.6363, - -0.6436]), size=(10000, 10000), nnz=9999, - layout=torch.sparse_csr) -tensor([0.2105, 0.8829, 0.5834, ..., 0.8176, 0.5853, 0.6953]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 9999 -Density: 9.999e-05 -Time: 10.286681890487671 seconds - -[39.11, 38.65, 38.48, 38.91, 44.04, 38.52, 38.51, 38.72, 38.44, 38.98] -[65.18] -10.097817659378052 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 364192, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 0.0001, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 9999, 'MATRIX_DENSITY': 9.999e-05, 'TIME_S': 10.286681890487671, 'TIME_S_1KI': 0.028245216508016844, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 658.1757550382615, 'W': 65.18} -[39.11, 38.65, 38.48, 38.91, 44.04, 38.52, 38.51, 38.72, 38.44, 38.98, 40.98, 44.91, 38.6, 38.78, 38.6, 38.41, 38.68, 38.86, 38.79, 38.92] -708.895 -35.44475 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 364192, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 0.0001, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 9999, 'MATRIX_DENSITY': 9.999e-05, 'TIME_S': 10.286681890487671, 'TIME_S_1KI': 0.028245216508016844, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 658.1757550382615, 'W': 65.18, 'J_1KI': 1.8072218913053046, 'W_1KI': 0.17897153149986822, 'W_D': 29.735250000000008, 'J_D': 300.26113255602127, 'W_D_1KI': 0.08164718060803094, 'J_D_1KI': 0.00022418718864783123} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_10000_1e-05.json b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_10000_1e-05.json deleted file mode 100644 index a9610ea..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_10000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 687353, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 11.203821897506714, "TIME_S_1KI": 0.016299953440963688, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 712.5261029052734, "W": 65.11, "J_1KI": 1.036623253125066, "W_1KI": 0.09472570862424402, "W_D": 30.116, "J_D": 329.5720490722656, "W_D_1KI": 0.04381445923710233, "J_D_1KI": 6.374375209987056e-05} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_10000_1e-05.output b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_10000_1e-05.output deleted file mode 100644 index fc5d954..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_10000_1e-05.output +++ /dev/null @@ -1,1900 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.027229785919189453} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 1000, 1000, 1000]), - col_indices=tensor([4045, 4744, 1969, 5170, 355, 3743, 7239, 1983, 6372, - 7260, 6212, 9195, 8166, 7792, 8432, 3345, 4045, 4770, - 8082, 3211, 3950, 6785, 8723, 6669, 8215, 9969, 7524, - 1306, 4082, 484, 1263, 75, 3992, 5609, 692, 315, - 545, 81, 6122, 4527, 2787, 1344, 1257, 8492, 5972, - 3958, 5565, 388, 2181, 6347, 4665, 1454, 1871, 6577, - 5849, 6561, 2357, 2021, 7110, 9864, 3756, 2830, 2694, - 731, 2540, 2772, 4974, 4576, 4656, 4678, 563, 3696, - 6969, 3891, 8281, 9447, 9975, 6473, 8066, 5180, 5504, - 2443, 2526, 7102, 3475, 9145, 5556, 8703, 2629, 8299, - 7325, 1885, 394, 4482, 7760, 889, 1229, 7188, 2402, - 4254, 4458, 1775, 889, 1922, 4005, 7929, 6332, 5222, - 4193, 3846, 5591, 3132, 5040, 4572, 6412, 7280, 4097, - 1715, 2495, 9743, 7911, 8815, 5925, 6947, 9088, 4900, - 3759, 2945, 9825, 8381, 2146, 9403, 859, 6619, 1686, - 5885, 7277, 7388, 2624, 3, 3287, 4267, 1605, 1329, - 4223, 6537, 872, 9449, 5889, 7324, 571, 8575, 3965, - 3392, 274, 1349, 8301, 7198, 954, 8923, 2716, 1263, - 1951, 5528, 265, 1316, 6577, 2521, 6226, 1549, 2995, - 8579, 1144, 5476, 6143, 4056, 6348, 3896, 2686, 4597, - 8116, 7894, 7968, 6596, 9286, 1061, 2405, 5653, 8630, - 9717, 3850, 6550, 2567, 5764, 7744, 3083, 7257, 5570, - 874, 5960, 1875, 425, 367, 8267, 2229, 9089, 8610, - 9201, 4031, 4109, 1989, 8740, 3450, 3898, 8290, 4247, - 7004, 5659, 1284, 9200, 6028, 3729, 9962, 3344, 7221, - 6450, 4922, 9363, 2957, 694, 1036, 4078, 5321, 9460, - 6554, 8267, 7807, 9506, 418, 6779, 1032, 5923, 1178, - 8871, 7785, 7222, 2798, 5989, 4193, 669, 6941, 5165, - 2347, 2087, 7970, 4135, 7434, 6852, 2499, 4838, 8645, - 5626, 9289, 9352, 367, 6588, 1410, 2648, 5129, 6336, - 3231, 9701, 1799, 9329, 7092, 8568, 1074, 3637, 158, - 1049, 4560, 7841, 9294, 7735, 1630, 3087, 52, 4875, - 2884, 9301, 556, 7130, 3440, 7247, 7021, 540, 8272, - 367, 8348, 2062, 671, 1846, 2205, 8050, 115, 2558, - 8189, 1188, 847, 1660, 4427, 630, 7923, 3944, 2299, - 7763, 5050, 153, 8273, 6659, 2941, 8351, 979, 3432, - 9988, 3089, 127, 6065, 6027, 3320, 952, 8343, 4698, - 23, 1048, 6574, 6762, 3218, 1959, 1984, 2159, 6505, - 6078, 3574, 5128, 308, 9854, 3478, 5402, 7574, 4707, - 9515, 5662, 3917, 6496, 5188, 4426, 7200, 9100, 1396, - 2792, 1491, 3216, 9878, 1991, 6156, 4136, 6794, 8826, - 2274, 1568, 6211, 9509, 2673, 6396, 8449, 6274, 3615, - 5914, 5348, 5596, 771, 5276, 6485, 4912, 1984, 6601, - 8580, 8870, 1395, 2756, 4474, 8853, 8875, 3481, 6187, - 8699, 1632, 817, 5245, 4293, 9975, 9216, 4080, 6123, - 7380, 7956, 7337, 9514, 7279, 9609, 3380, 204, 5188, - 5606, 3352, 7517, 1561, 2129, 8378, 5718, 2198, 3103, - 9297, 3720, 7205, 9471, 206, 3912, 9542, 8918, 3787, - 8729, 5249, 1707, 9065, 7594, 4777, 6292, 7264, 429, - 9992, 4825, 3965, 3250, 2055, 2939, 8853, 9301, 6077, - 3981, 335, 6295, 4072, 6633, 2813, 8082, 4850, 1375, - 8210, 5100, 8898, 1427, 8941, 5121, 3948, 2695, 5096, - 6999, 5887, 4806, 7974, 8400, 8099, 5128, 2963, 8775, - 1223, 6937, 5450, 9949, 5970, 6649, 5843, 5105, 1277, - 2020, 733, 6370, 2428, 6711, 5600, 2402, 4972, 5858, - 2650, 201, 3579, 3362, 8531, 6386, 4248, 1047, 7995, - 97, 2909, 470, 7551, 1034, 7004, 1915, 4105, 418, - 1046, 1682, 3207, 4664, 5323, 8921, 14, 4765, 9494, - 79, 9065, 5588, 6111, 1723, 6653, 2290, 410, 5266, - 4888, 3415, 4042, 8753, 9060, 5791, 5735, 2387, 9055, - 2180, 2986, 3516, 5246, 494, 1641, 8784, 7396, 4552, - 6920, 8809, 5163, 149, 1927, 5854, 8034, 2790, 6271, - 7514, 7906, 7117, 1894, 4349, 1097, 621, 848, 6861, - 7646, 1505, 7533, 451, 2339, 6158, 7478, 8701, 2920, - 8974, 9374, 3350, 5140, 6322, 1170, 4919, 843, 6819, - 6994, 124, 2087, 9060, 5570, 7589, 3450, 2519, 144, - 4613, 8154, 982, 692, 3598, 4047, 2949, 8906, 4908, - 6230, 7566, 6027, 7983, 6241, 7692, 3667, 8035, 5419, - 7468, 4507, 3569, 5779, 8203, 1287, 1609, 1590, 9156, - 3925, 4878, 8612, 569, 1069, 1748, 9524, 8982, 506, - 8724, 1534, 6929, 439, 2816, 4785, 809, 3404, 4218, - 185, 8163, 1021, 7725, 2898, 8570, 443, 9275, 2878, - 8760, 3323, 6860, 6270, 1543, 6227, 8927, 4407, 7188, - 1664, 4376, 9731, 6484, 7766, 1709, 7050, 8789, 788, - 8800, 11, 6697, 7615, 4838, 4366, 2545, 237, 308, - 6905, 6560, 5706, 5925, 2159, 747, 3287, 6222, 7394, - 573, 6558, 6908, 5548, 9252, 1659, 9038, 9721, 2443, - 9158, 4842, 3780, 5764, 5023, 8062, 8499, 5917, 8784, - 2886, 4453, 6397, 5639, 3291, 6280, 7553, 8620, 483, - 2823, 9618, 4508, 6697, 174, 1090, 4993, 9448, 1183, - 7511, 5139, 2451, 111, 1548, 1264, 4673, 7108, 2275, - 6749, 986, 6060, 6102, 5298, 2313, 4878, 2634, 1895, - 6663, 5275, 4324, 5105, 1990, 7264, 3652, 6972, 8415, - 7781, 692, 9639, 5968, 2859, 3549, 6773, 1297, 6640, - 8838, 5795, 7073, 2246, 5608, 5801, 1941, 8044, 5873, - 5040, 6348, 8208, 1070, 4309, 510, 9777, 5789, 5132, - 4737, 6387, 6005, 5783, 7899, 1657, 168, 3310, 82, - 378, 833, 4844, 4687, 2831, 3469, 4509, 5759, 1460, - 7704, 337, 1444, 1837, 9035, 499, 1271, 3169, 4897, - 6704, 1218, 483, 3226, 5142, 6682, 7585, 3258, 405, - 4426, 2271, 3527, 6004, 519, 6391, 4755, 8395, 4097, - 297, 5780, 7847, 9161, 6716, 7774, 9463, 3874, 6817, - 1734, 5402, 6421, 7418, 8084, 8718, 8572, 9441, 6395, - 5191, 1072, 595, 4066, 3784, 8733, 488, 7706, 9174, - 3241, 6201, 4514, 4851, 7584, 2765, 5225, 6075, 2028, - 7439, 5795, 3376, 1181, 7612, 8003, 8223, 7531, 3871, - 4085, 9460, 7515, 7202, 5975, 6696, 7212, 9912, 1194, - 8838, 6681, 1165, 937, 1732, 352, 6235, 1506, 1955, - 7469, 5042, 5077, 4244, 9902, 9586, 9988, 165, 829, - 1326, 8925, 2884, 376, 6650, 3208, 4641, 9040, 9533, - 7990, 1805, 5682, 8878, 2205, 5142, 5004, 5499, 6541, - 2888, 2639, 8210, 1092, 7456, 1106, 5453, 9983, 4488, - 1635, 3122, 8614, 699, 7214, 7857, 6907, 6029, 8950, - 5353, 3209, 805, 3040, 6561, 9350, 9479, 8389, 1545, - 4603, 3157, 8220, 7534, 7114, 2493, 4229, 2805, 9896, - 479, 726, 4101, 1084, 595, 1677, 8165, 7972, 1580, - 114, 3781, 3971, 575, 5891, 6040, 1972, 3274, 7326, - 5553, 8819, 801, 3126, 4151, 4009, 5421, 9087, 2405, - 214, 8515, 7757, 8463, 5452, 7519, 4144, 2396, 9433, - 8418, 4538, 5964, 5562, 2306, 586, 8487, 8023, 9978, - 2270, 339, 652, 1149, 7963, 7214, 6060, 6418, 5514, - 3555]), - values=tensor([-5.8321e-02, -1.0536e+00, 1.3711e+00, 2.2847e+00, - 8.6539e-01, 2.2000e+00, -2.5029e-01, 9.6036e-01, - 3.4734e-02, 1.2159e-01, 7.5239e-01, -4.2812e-01, - 1.1426e+00, 9.9742e-01, -3.3699e-01, 3.7740e-01, - 8.3218e-01, 1.4849e+00, -1.3472e+00, -9.3481e-01, - -1.8908e+00, -4.9636e-01, 8.3604e-01, -4.2333e-01, - 5.0477e-01, -3.7533e-01, -1.3886e+00, -5.7376e-01, - 5.4717e-01, -6.7422e-01, 3.7287e-01, 7.8623e-01, - 6.1444e-01, 9.6579e-01, -3.7618e-01, -1.2343e+00, - -9.1124e-01, 1.1064e+00, -9.8007e-01, -9.2698e-02, - 6.4948e-01, 8.1364e-01, -8.8589e-01, -8.3750e-01, - 1.0300e+00, -1.7302e+00, -1.4895e+00, 3.7038e-01, - 1.0420e-01, -9.8874e-01, 2.2233e+00, 2.0263e-01, - -1.3220e+00, -2.0129e+00, 4.6725e-01, -1.7669e+00, - 5.0711e-01, -1.7349e+00, 7.3483e-01, -3.6095e-01, - 1.3744e+00, 7.1197e-01, 6.6537e-02, -2.1411e+00, - -5.4146e-01, -4.3910e-01, 1.9848e+00, -1.2927e+00, - 6.5827e-01, 1.2582e+00, 7.8976e-01, -3.0704e-01, - -7.4789e-02, -5.1713e-02, -5.2032e-01, -2.9047e-01, - 1.6690e+00, 9.6946e-01, -9.9150e-01, -1.2714e-01, - 4.5388e-01, -2.1241e+00, -2.9707e-01, -1.2364e-01, - -1.0662e+00, 1.0525e+00, 4.4433e-01, 7.6404e-01, - -6.9956e-01, -8.2530e-01, 8.1422e-01, 1.8750e+00, - -5.2540e-01, -5.0406e-01, 1.1832e+00, 6.0872e-01, - -1.0338e+00, -3.3237e-01, -3.3672e-01, 9.1013e-01, - -1.7739e-01, 1.3737e+00, -1.1665e+00, 3.4620e-01, - 5.1651e-01, 1.5546e-02, 4.6462e-01, -7.7016e-01, - 1.5815e+00, -1.0479e+00, -7.9507e-01, -4.3014e-02, - 4.9160e-01, -3.4034e-01, 1.2115e+00, 2.8728e-01, - 8.0528e-01, -8.3934e-01, -3.3325e-01, 5.5678e-01, - -7.7119e-01, 7.8389e-02, -1.3413e+00, 1.0645e-01, - -1.1389e-01, -3.1325e-01, 6.1789e-01, -1.2699e+00, - -1.7949e+00, -6.1489e-04, 2.4797e-01, 6.3276e-01, - 5.2437e-01, 2.5265e+00, -4.1223e-01, -7.8872e-01, - 2.7848e-02, 1.8384e-01, 5.3328e-01, -8.1310e-01, - 1.4620e+00, 9.8843e-01, 1.9245e+00, 7.4561e-01, - -2.4949e-01, -4.6079e-01, -4.0545e-01, -1.9349e+00, - 1.8575e+00, -1.9678e+00, -1.8695e-01, -3.0940e-01, - 2.4758e-01, -5.1331e-01, -4.0686e-01, -5.5147e-01, - 6.7930e-01, 7.2947e-01, -3.7896e-01, 3.9343e-01, - 4.7151e-01, -1.3777e+00, -1.3204e+00, 3.3110e-01, - 1.2632e+00, -5.6118e-01, 4.6333e-01, 2.1421e-01, - 5.0867e-01, 1.8214e+00, 4.3012e-01, 5.9729e-01, - -2.3856e-01, 4.0854e-01, -1.3233e+00, 3.8049e-01, - 1.9989e+00, -8.7387e-02, 2.7262e+00, 2.1358e-01, - 4.0193e-01, -5.5407e-01, -2.6822e+00, 4.3966e-01, - -7.9737e-01, -7.1283e-02, -2.9672e-01, -4.0969e-01, - 1.1242e+00, 2.3837e-01, -3.3900e-01, -1.2749e+00, - -7.7218e-01, 1.6363e+00, -3.9449e-01, 9.2654e-01, - -3.0326e-01, -7.0715e-02, -1.2153e+00, 1.1531e+00, - -1.7347e+00, -7.8662e-01, 9.3799e-01, 3.6441e-01, - -6.8617e-01, -1.6076e+00, -9.5673e-01, 1.6901e+00, - 3.5922e-01, 1.0953e+00, -1.0627e+00, 2.0561e-01, - -7.0759e-03, 1.1481e+00, -1.1969e+00, 9.7205e-02, - 8.6478e-01, -7.2710e-01, -2.9855e-02, 1.4566e+00, - 2.2941e-01, -6.5417e-01, -1.2597e+00, -7.9836e-01, - -5.7610e-01, 1.5365e+00, -1.2438e+00, 8.8555e-01, - 2.3996e-02, 4.5265e-01, -1.6103e-01, 1.5738e-01, - -1.3471e+00, 1.3332e-01, -1.7570e+00, -8.0491e-01, - 5.5578e-01, 9.1739e-01, 3.1047e-01, -1.1840e+00, - -8.4009e-01, -1.3077e+00, 6.1993e-02, -1.1784e+00, - 6.3146e-01, -1.0814e+00, -8.9777e-01, 2.6414e-01, - -5.3498e-01, -1.2494e+00, -1.9572e-01, -1.3498e+00, - -8.5701e-01, -1.9717e+00, 3.0453e-02, -1.3636e+00, - -2.8500e-02, 1.1577e-01, 4.6679e-01, -1.1379e+00, - -5.2938e-01, -2.8160e-01, -1.2206e+00, -1.6336e+00, - -8.4655e-02, -2.4172e+00, 1.7028e+00, -4.6438e-01, - -1.8477e+00, 1.2091e+00, 1.2522e+00, -1.0783e+00, - 4.0777e-01, 2.4379e-01, 1.9986e+00, -2.0004e+00, - -8.2675e-01, 1.7191e+00, 1.1599e+00, 1.4077e+00, - -1.9595e-01, 1.2723e+00, 2.0571e+00, 1.7422e+00, - 3.4911e-01, -1.3971e+00, 1.0576e+00, 6.8314e-01, - 9.1265e-01, 8.9033e-01, 1.6876e-01, 4.4065e-01, - -9.5693e-01, 9.4284e-01, -6.6802e-01, -2.7450e-01, - 9.9803e-02, 7.2484e-01, 8.0461e-01, 2.3650e+00, - -1.0720e+00, 2.3881e+00, -6.3311e-01, 4.7639e-02, - 3.1451e-01, 3.2758e-01, 1.1598e+00, -3.8540e-01, - 7.8632e-01, 7.3874e-02, -6.7245e-01, 2.3095e-01, - -1.7655e-01, -1.6253e+00, 4.3439e-01, -5.6198e-01, - 2.4679e+00, -7.5650e-01, -4.3371e-01, -6.7424e-01, - 2.1450e+00, 7.1503e-01, 2.0236e+00, -7.9035e-01, - -5.0880e-01, 8.1445e-01, 3.1281e-01, -2.9328e-01, - 1.4162e-01, 4.3335e-01, 6.0515e-01, 5.3365e-01, - -6.6216e-01, -5.4866e-01, 7.7063e-03, 1.1676e+00, - -1.3137e+00, -1.3718e+00, 3.2506e-01, -1.1547e+00, - 2.5543e-01, -1.0995e+00, 5.5100e-01, 3.2379e-01, - 1.6089e+00, -2.1373e+00, 5.4432e-01, -6.4536e-01, - 8.6856e-01, 8.2402e-01, -1.5556e+00, 7.6277e-01, - -7.5710e-01, 4.9209e-01, -2.0173e+00, -7.9302e-01, - -6.9808e-01, 1.2169e+00, 1.2805e+00, 9.7303e-01, - 1.6909e+00, -3.4065e-01, 9.4489e-02, -8.7978e-01, - 4.7930e-01, -1.3129e+00, 2.2102e+00, -7.5828e-01, - -1.0235e+00, -3.5186e-02, 1.1026e+00, -3.5811e-01, - 4.4460e-01, 1.6242e-02, 7.1573e-01, -1.0844e+00, - -6.5138e-01, -1.3155e+00, 1.2242e+00, -2.2980e+00, - 6.4174e-01, 6.1247e-02, 1.6132e+00, 5.7191e-01, - 1.3973e+00, 2.5004e+00, -1.2287e+00, -1.6721e+00, - -7.2816e-01, 1.3303e+00, 1.3877e+00, 2.3823e+00, - 3.0504e-02, 1.5894e+00, 1.7693e-01, 1.0735e+00, - -1.7219e+00, -5.1428e-01, -1.7483e+00, -2.0180e-01, - 7.7919e-01, -1.3307e-02, -9.7129e-01, -1.1232e+00, - 1.3473e+00, 6.5038e-01, 1.4526e+00, -1.0377e-01, - -2.7166e-02, 9.6170e-02, 5.6776e-02, 5.5269e-01, - -2.2984e-01, 7.4075e-01, -2.7142e-01, -1.0202e+00, - 3.0187e-01, 9.4541e-01, 9.4631e-01, 5.4760e-01, - -4.8929e-01, -4.7149e-02, 1.2862e+00, 2.9739e-01, - -3.9029e-01, 1.1005e+00, 7.1078e-01, -1.7957e-01, - 8.7675e-02, -1.0449e+00, 3.9558e-01, 1.6747e-02, - -2.8053e-01, 1.8743e+00, -1.1343e+00, 3.3356e-02, - 5.2968e-01, 1.2066e+00, -5.5997e-01, -1.4346e+00, - -8.2084e-02, 1.4587e+00, -6.9692e-01, 1.5933e-02, - 5.7387e-01, 1.2692e-01, -4.5911e-01, -9.1351e-01, - 9.9481e-01, -1.9605e+00, 1.2682e+00, 1.2046e-01, - 4.3630e-01, 1.0579e+00, -1.9585e+00, -9.3446e-02, - -4.0026e-01, 3.3254e-01, 1.1341e+00, 1.4250e+00, - -2.0852e-01, 7.3204e-01, -1.6219e+00, 3.8339e-01, - -6.2430e-01, -9.6063e-02, 1.8321e+00, -1.1671e-02, - 3.6916e-01, 1.8906e-01, 1.3751e-01, -7.0560e-01, - -3.4148e-01, 6.2011e-01, -1.7038e+00, 4.8513e-02, - -2.9604e-01, -5.0475e-01, -3.5845e-01, -8.7147e-01, - 1.7621e-01, -1.1282e+00, -8.1432e-01, -2.2518e-01, - -1.2148e-01, 9.8011e-02, 2.1543e+00, 1.2895e+00, - -8.7422e-01, -8.0751e-01, -1.0026e+00, 5.3257e-01, - -4.8489e-01, 2.0063e+00, -2.5763e-01, 1.3821e+00, - -4.8270e-01, 3.7440e-01, 8.9182e-01, -4.9017e-02, - -2.6580e+00, 4.2412e-01, 1.1576e+00, 2.8544e-01, - -6.0879e-01, -1.0101e+00, 2.9248e-01, 1.6864e-02, - 1.4669e-01, -1.2590e+00, 3.1559e+00, -1.0801e-02, - -1.2675e-02, -7.0586e-01, -4.2623e-01, 5.3462e-01, - 5.1302e-01, -1.2168e+00, -6.5137e-01, 2.1875e+00, - -1.8554e-01, -1.7144e+00, -3.5572e-01, 4.6694e-01, - 5.1720e-01, -1.6763e+00, -1.1713e+00, -1.9412e+00, - -7.2575e-01, 4.9474e-01, 6.4056e-01, 1.3212e+00, - 4.7582e-01, -5.4841e-01, 1.5315e+00, -3.0029e-02, - 1.4717e+00, 1.7626e+00, 1.4323e+00, -5.3402e-01, - 2.6435e-01, 7.7192e-02, -1.3290e-01, 1.6936e-01, - -7.8468e-01, 1.1574e+00, -7.3305e-01, -1.0750e+00, - -1.9646e+00, -5.0856e-01, 1.1250e+00, 9.1897e-02, - 5.9680e-01, -2.0934e-01, 5.1394e-01, -1.0141e-01, - 3.7976e-01, -4.7986e-02, 1.4648e+00, -2.2346e+00, - -4.9742e-01, -2.0726e-02, 5.8145e-01, -2.0168e+00, - 2.2356e-01, -1.8052e-02, 1.0444e+00, 2.7983e-03, - -7.4501e-01, 1.3335e+00, -7.3230e-01, 1.9795e+00, - 3.5096e-03, 4.2891e-01, 9.3393e-01, 6.9789e-01, - 8.1140e-01, 9.3604e-01, -8.4939e-01, -2.3166e-03, - 5.7551e-02, -6.5445e-01, 2.4189e-01, -2.5144e-01, - -1.2720e+00, 7.9557e-01, 6.6317e-02, -1.4960e+00, - 2.8957e-01, 6.4401e-01, -1.1835e+00, 8.1908e-01, - -1.1195e+00, 1.0852e-04, -2.6878e-01, 1.0203e+00, - -8.3461e-01, 1.3001e+00, -5.1365e-01, -3.9105e-01, - 1.4296e-01, 3.7511e-01, -1.3034e+00, 8.4505e-01, - 3.9152e-01, 1.2920e+00, 2.2863e-01, 3.1730e-01, - 2.7843e-01, 3.3405e-01, 1.4540e+00, -5.3026e-01, - 5.7681e-01, -8.5635e-01, 9.4756e-02, 9.1160e-03, - -1.1158e-01, -2.5398e-01, 1.2877e+00, 1.4529e+00, - 3.5377e-01, -1.5401e+00, -8.0347e-01, -2.0706e+00, - 1.0997e+00, -1.0485e+00, -8.4604e-01, 4.3011e-02, - 2.6854e-01, -1.5410e+00, 6.1378e-01, -7.8217e-02, - -4.7344e-01, -1.8367e-01, -1.7696e+00, 1.0631e-01, - -3.2541e-01, -7.0282e-01, -9.4914e-01, -2.1341e-01, - -2.2482e+00, 1.6764e+00, -1.1297e+00, -4.1562e-04, - -1.1859e+00, -2.1923e-01, -1.8833e-01, -1.5443e-01, - 4.5659e-01, 5.7646e-01, -1.0444e+00, -7.3792e-02, - -2.7850e-01, 2.1237e-01, -1.3507e+00, 1.4359e+00, - -1.0939e+00, 6.5857e-01, 1.7377e-01, 1.2770e+00, - -2.2846e-01, 6.2289e-01, -4.8826e-01, 3.3915e-01, - 1.2518e+00, 1.8370e+00, -2.0132e+00, 1.3912e+00, - 6.5592e-01, 1.6696e+00, 2.2828e-01, 6.7127e-01, - -3.4168e-02, 1.8966e+00, -1.9759e-01, 3.1685e-01, - -7.9687e-01, 4.1681e-01, -1.2965e+00, -4.4668e-01, - 7.3951e-02, -7.9039e-01, 1.1352e+00, -1.7531e-01, - 1.6779e+00, 2.0049e+00, 7.8288e-01, 5.7802e-01, - 2.3709e+00, -3.4110e-01, 7.7174e-01, 7.1561e-01, - -6.4241e-01, -3.3023e-01, 8.1489e-01, 4.3727e-02, - 1.1848e+00, -1.1756e+00, -1.1703e+00, -4.2293e-03, - -9.5706e-01, -1.6086e-01, 7.2709e-01, 3.3596e-01, - 1.8913e-01, -2.0025e-01, 1.5456e+00, -3.1282e+00, - 9.5311e-01, -9.1885e-01, 3.8799e-01, -4.0008e-01, - -8.5246e-01, 1.1552e+00, 8.4403e-01, 2.9816e-01, - -6.7938e-01, 1.5203e+00, 1.2529e+00, -1.4616e+00, - -2.8175e+00, 5.3612e-01, 1.5195e+00, 6.8513e-01, - -4.4330e-01, -3.4211e-01, 7.9472e-01, -2.3864e-01, - 1.8329e+00, 1.0380e+00, 8.6038e-01, -7.5975e-01, - 5.4537e-01, -6.6981e-01, 5.1024e-01, 6.5492e-01, - -4.5618e-01, 1.0727e+00, -7.4883e-01, -3.6014e-01, - -3.1104e-01, -1.5460e+00, 4.0017e-01, -4.9926e-01, - -1.6642e+00, 1.1707e+00, 9.5597e-02, -1.2148e+00, - 1.1273e+00, 1.1249e+00, -5.6183e-02, -6.8504e-02, - 9.4557e-01, 2.4702e+00, -4.4558e-01, -3.6733e-02, - -9.3382e-01, 1.2537e+00, 7.7316e-01, 9.2821e-02, - -7.2172e-01, -1.5260e-01, -9.1759e-02, -6.6660e-01, - -4.2097e-01, -4.2025e-01, 6.5436e-01, -8.8473e-01, - -3.3085e+00, 1.1026e+00, -5.3303e-01, -1.1872e+00, - 2.7772e-01, 4.0847e-01, -7.1724e-01, -1.1891e+00, - 9.6555e-01, -1.2191e+00, -1.0623e+00, 1.2701e+00, - -2.8026e-01, -2.1585e-01, 3.6662e-01, 1.5732e+00, - 9.0268e-02, 8.8774e-02, 9.8233e-01, 1.4662e+00, - 4.7175e-01, -9.8353e-01, 4.3171e-02, 2.1522e+00, - 5.4995e-01, 9.2915e-02, -5.7800e-01, -1.3629e+00, - 1.7423e-01, 4.8613e-02, 2.4206e-01, -5.4092e-01, - -3.1975e-01, -4.5454e-01, -1.6420e-01, -1.3450e+00, - 1.4813e+00, 8.3541e-01, 1.3971e-01, 3.7177e-01, - -8.5783e-01, -3.8537e-01, -2.5242e+00, 2.0586e+00, - 2.4502e+00, 1.1520e+00, -9.8919e-01, -1.1558e+00, - -9.7191e-01, -4.1303e-01, -6.2209e-01, -6.2719e-03, - -1.6744e+00, -1.3172e-01, 3.3523e-01, -2.3893e+00, - 8.4629e-01, -6.1317e-01, -1.0366e+00, -6.0393e-02, - -1.5428e+00, -1.1263e-01, -4.0083e-02, -5.1239e-01, - -4.8777e-02, 1.8043e+00, 7.5034e-02, -1.7681e-01, - -9.7525e-01, 9.0209e-01, 2.7090e-01, 1.1812e+00, - -4.5112e-01, -1.1279e+00, 2.1579e-01, 4.3145e-01, - -1.1553e+00, -3.5942e-01, -1.3157e-01, 1.5302e+00, - -4.6190e-01, 2.4061e+00, 6.0990e-02, -8.5396e-01, - 4.1777e-01, 1.7893e+00, 5.8486e-01, -1.4619e+00, - 1.5277e+00, 6.5226e-02, 1.7391e-01, 6.5561e-01, - -1.0628e+00, 2.8577e-01, 1.0177e+00, -1.3192e+00, - 6.3153e-01, 4.3790e-03, -1.8552e+00, 9.5859e-01, - 1.3922e+00, 9.2912e-01, 6.3108e-01, 5.5035e-01, - -2.3677e-01, 1.3564e+00, 1.6363e+00, -6.9366e-01, - 9.0106e-01, 4.2455e-01, 5.2578e-01, 3.3651e-01, - 5.8909e-01, -1.8736e-01, 1.8988e-01, -1.6684e+00, - 1.2431e+00, -1.1827e-02, 3.4297e-01, 1.2995e+00, - 3.0211e-01, -1.7190e+00, -3.9808e-01, 2.1798e-01, - -4.9366e-03, 2.0546e+00, 2.3932e-01, -3.6542e-02, - -6.0041e-01, -4.1822e-01, 2.9053e-01, -1.9323e+00, - 1.3622e+00, -9.1572e-01, -3.8763e-01, 3.5311e-01, - -3.3410e-02, -6.9534e-02, -2.2625e+00, -1.1003e+00, - -4.8818e-01, -1.4156e+00, -1.5541e+00, -1.1944e-01, - 7.2575e-01, 8.6949e-01, -1.0446e+00, -2.1557e+00, - 7.1430e-01, 2.4782e-01, 2.2690e-01, 1.7215e+00, - 4.9319e-01, -2.9989e-01, 1.5653e+00, 4.6723e-01, - 1.6231e+00, 6.0466e-01, 9.0624e-01, 3.9397e-01, - 8.8744e-01, 8.2185e-01, -4.7667e-01, -4.3790e-02, - -3.8679e-01, 6.6211e-02, -6.8113e-01, 1.2998e+00, - 1.4049e+00, -1.7555e-01, 4.4489e-01, 1.4931e+00, - 5.2540e-01, -4.1382e-01, 1.2180e-01, -8.7144e-01, - -1.4104e+00, -1.2259e+00, -7.3383e-01, 9.7703e-01, - -1.0389e-01, -2.4810e+00, 3.0598e+00, -1.8455e+00, - 9.2837e-01, -2.0665e-01, 1.6798e+00, 4.3944e-01, - -1.3341e-01, -6.8865e-01, -1.0923e+00, -1.1770e+00, - -2.2761e-01, 1.9312e+00, 1.8696e-01, -2.0886e-01, - 7.6178e-01, 1.5225e+00, 3.5777e-01, 1.5199e+00, - -7.1123e-01, 6.9979e-01, -6.2374e-02, 2.2441e-01, - 2.6463e+00, 6.8153e-01, 2.3795e-01, -1.0060e+00, - -4.5511e-01, -2.7521e+00, -1.0330e+00, -4.1218e-01, - -1.1294e+00, -1.4019e+00, 1.4918e+00, -8.8894e-01, - -1.3682e+00, -4.2415e-01, 3.1284e-01, 8.3110e-01, - 1.3104e+00, 8.2391e-01, -9.3483e-01, 7.5210e-02, - -2.4588e-02, 2.6560e-01, -2.6718e+00, 7.7709e-01, - -5.0970e-01, 1.2523e+00, -1.9754e+00, -8.4242e-01]), - size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.9147, 0.2269, 0.9183, ..., 0.1784, 0.3861, 0.3996]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 1000 -Density: 1e-05 -Time: 0.027229785919189453 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '385607', '-ss', '10000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 6.351012468338013} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([5715, 5170, 1366, 6103, 7007, 8959, 4623, 4213, 8177, - 5959, 965, 1066, 6411, 9244, 7565, 885, 5094, 4201, - 2700, 3259, 8413, 776, 9023, 5383, 931, 2694, 3746, - 4804, 4080, 9861, 8358, 6322, 7311, 732, 8943, 872, - 1910, 7813, 9850, 8757, 6710, 9989, 2709, 5071, 5030, - 3890, 6697, 4006, 9819, 222, 4863, 6733, 3928, 8353, - 4772, 8658, 7813, 2506, 281, 1398, 5781, 3017, 9545, - 4412, 6691, 20, 2669, 426, 5860, 2481, 3363, 2483, - 9390, 4895, 4755, 9090, 3862, 956, 3381, 8382, 4505, - 740, 2726, 5804, 1646, 5377, 1732, 8671, 7394, 4742, - 853, 3830, 9952, 9062, 7450, 1545, 5510, 9397, 6475, - 1984, 553, 1910, 4255, 5357, 7796, 1432, 4814, 9896, - 9266, 4030, 5580, 3411, 986, 3778, 1036, 834, 5036, - 8077, 7114, 3461, 3439, 8962, 2976, 402, 9230, 3072, - 2655, 459, 6916, 245, 6074, 4680, 9422, 4094, 1234, - 8469, 7017, 1311, 5026, 4074, 6905, 982, 8484, 444, - 9095, 9521, 7135, 6384, 4268, 7803, 5512, 9702, 832, - 2191, 943, 3605, 2877, 8060, 1350, 1113, 5650, 9397, - 3042, 906, 4220, 5724, 7986, 4626, 1704, 4170, 2914, - 4842, 290, 569, 2385, 8282, 4009, 7571, 6247, 2594, - 4015, 7626, 8298, 8849, 3734, 5442, 7781, 1955, 7283, - 4929, 3468, 5402, 6172, 6472, 4264, 4959, 5853, 3725, - 4731, 9206, 4122, 3195, 6337, 278, 2036, 9224, 639, - 2097, 9985, 340, 7149, 1272, 7055, 141, 1629, 866, - 3321, 8436, 939, 3194, 2312, 3757, 353, 7887, 2570, - 8937, 4843, 3752, 4491, 7673, 9320, 1031, 4454, 69, - 8693, 5772, 6677, 698, 3341, 9690, 5658, 4413, 8482, - 303, 6575, 6729, 141, 982, 8393, 69, 6252, 9713, - 2464, 3218, 1320, 5956, 2842, 5100, 9410, 2582, 9752, - 5666, 3883, 1200, 3666, 3995, 9247, 6012, 2461, 7982, - 5030, 3573, 4124, 4694, 3985, 8704, 7450, 9912, 8703, - 1678, 315, 2938, 293, 1741, 4322, 8268, 1998, 9301, - 462, 2587, 6158, 2001, 1369, 8863, 844, 2388, 2632, - 7076, 1872, 6068, 9638, 2342, 3327, 4462, 7699, 6301, - 1010, 4686, 6980, 5997, 9389, 122, 2390, 5779, 945, - 7874, 6220, 879, 2168, 4533, 3674, 8415, 3668, 5012, - 2375, 4125, 1340, 2144, 853, 787, 4893, 8475, 5897, - 8787, 1849, 2709, 6425, 3918, 6400, 4787, 8388, 9602, - 3267, 9685, 7452, 2571, 4868, 1195, 687, 6633, 8706, - 3493, 9647, 2597, 5926, 2748, 7867, 7582, 8418, 6792, - 4994, 9743, 9069, 6084, 8651, 7644, 4406, 8368, 2112, - 6516, 2714, 6137, 824, 2254, 2409, 6690, 212, 6730, - 3880, 1815, 1273, 6145, 628, 3927, 5444, 3929, 7859, - 8763, 7365, 2602, 6237, 5693, 448, 5643, 4102, 617, - 4502, 6173, 5701, 369, 6538, 1399, 2196, 5620, 5677, - 2970, 9715, 598, 501, 3155, 8409, 8588, 1474, 5149, - 6955, 503, 7618, 8049, 1123, 296, 1193, 5359, 4251, - 1876, 2912, 5408, 3517, 2643, 8907, 2073, 5500, 2612, - 6222, 3619, 9922, 6922, 4095, 2337, 2773, 5163, 5312, - 556, 4947, 8713, 7932, 8380, 3653, 2471, 6101, 3784, - 8789, 8655, 966, 9162, 7285, 1602, 3680, 4037, 545, - 1522, 8634, 3101, 8453, 7972, 4844, 2000, 6257, 2057, - 9416, 4465, 5011, 7194, 2451, 7019, 4706, 7325, 1367, - 996, 6735, 9163, 5750, 1499, 2615, 5438, 3494, 9487, - 2060, 4637, 1057, 2613, 4469, 4981, 2274, 293, 5976, - 4536, 6063, 9375, 3001, 6449, 3917, 5189, 5545, 9955, - 4116, 569, 6679, 8298, 537, 6985, 506, 1209, 5268, - 7921, 2148, 8099, 7057, 858, 3977, 4692, 9162, 4388, - 6469, 990, 7737, 6193, 9804, 527, 6877, 9639, 2729, - 6860, 399, 7883, 3416, 3869, 6853, 3256, 2911, 31, - 7069, 1383, 8423, 9335, 1954, 5985, 9385, 175, 2845, - 158, 1102, 9000, 4155, 4737, 8722, 9973, 5662, 5178, - 5065, 7221, 4347, 9359, 9111, 4446, 1768, 2976, 2151, - 1765, 4974, 4487, 2214, 9191, 706, 2948, 1112, 9654, - 8919, 4854, 62, 111, 4294, 4510, 7501, 8992, 2443, - 3824, 9180, 7146, 7645, 7192, 695, 8299, 4437, 8824, - 8438, 3127, 3533, 7349, 1661, 5740, 3505, 6956, 3994, - 6259, 6129, 9494, 1501, 346, 9813, 2608, 3355, 226, - 6112, 5123, 1237, 6530, 4967, 4723, 9036, 8636, 2759, - 6474, 1921, 6676, 194, 1197, 58, 9063, 8218, 902, - 1421, 9631, 5223, 3035, 5207, 4467, 3865, 5103, 2475, - 2190, 7218, 3031, 2185, 1631, 2384, 4300, 3381, 5443, - 1722, 11, 3664, 9445, 9278, 8112, 821, 1464, 3061, - 956, 9393, 8800, 7554, 5983, 5524, 4705, 8167, 5534, - 2259, 3963, 7890, 7834, 5540, 1454, 5892, 9838, 3064, - 8949, 8876, 4242, 913, 7654, 4364, 7598, 4480, 7142, - 1806, 9218, 9349, 9986, 2831, 5215, 5964, 3401, 5626, - 715, 7523, 661, 256, 5652, 3801, 7877, 3804, 5444, - 543, 1240, 297, 4458, 296, 3483, 6097, 2406, 5908, - 8293, 230, 3868, 5811, 341, 5584, 1964, 3679, 6923, - 9275, 1980, 5746, 6370, 7209, 5816, 3790, 1612, 8641, - 982, 6404, 8924, 4784, 8085, 9634, 4756, 9854, 4710, - 9561, 323, 1545, 7493, 7704, 526, 7049, 9884, 990, - 574, 8831, 6691, 9669, 9563, 2227, 2531, 6206, 5574, - 7574, 9233, 6133, 4215, 1236, 1792, 6795, 4824, 4509, - 7910, 1981, 2037, 1444, 5256, 8748, 8672, 4788, 9640, - 469, 8335, 2816, 9373, 8422, 1226, 6541, 3258, 4540, - 6925, 3286, 5880, 5753, 3465, 4007, 4316, 5629, 1942, - 4197, 7348, 2591, 3275, 2092, 2572, 2578, 3654, 3983, - 9875, 699, 6017, 142, 2261, 4490, 5834, 7985, 2050, - 3757, 4213, 5026, 5637, 6129, 4084, 6820, 7343, 5051, - 6234, 9830, 4254, 9968, 7775, 1051, 8126, 1690, 7627, - 2656, 5799, 5471, 2862, 5932, 4002, 1731, 885, 9628, - 4394, 5873, 3901, 2328, 2712, 2154, 6050, 9891, 6611, - 8862, 6974, 5261, 1969, 915, 5359, 6593, 2354, 4355, - 9061, 3416, 5637, 3333, 4705, 2913, 1953, 7785, 6198, - 6333, 1883, 8492, 8212, 9014, 6538, 6664, 6130, 4866, - 416, 5408, 3413, 5400, 7128, 3227, 4227, 6405, 9348, - 4588, 6647, 3803, 813, 5390, 7001, 9476, 3056, 2425, - 4499, 1551, 4732, 3371, 8290, 2391, 5252, 9609, 5755, - 488, 5006, 601, 4984, 5898, 3223, 1489, 7948, 1301, - 2331, 5428, 5236, 4886, 1211, 3242, 543, 6528, 7365, - 410, 7798, 2078, 5461, 6439, 244, 8195, 2328, 1765, - 7557, 8571, 4456, 5067, 8305, 5830, 5687, 8021, 8492, - 7138, 2401, 1550, 3868, 6016, 7528, 648, 2589, 340, - 8482, 197, 405, 7121, 8201, 5350, 7875, 3360, 4345, - 8073, 5146, 7863, 1324, 5212, 9598, 1861, 9649, 9446, - 690, 9233, 9415, 7367, 751, 4264, 353, 2912, 5647, - 4871, 3101, 4382, 9537, 7204, 2254, 8390, 6773, 8733, - 6715, 3319, 7375, 6956, 8019, 8545, 97, 471, 8229, - 3587, 4327, 9544, 9410, 1843, 5715, 6699, 1624, 2302, - 1008]), - values=tensor([ 2.9103e-01, 2.5564e-01, -8.1799e-01, -6.2049e-02, - -7.3697e-01, -6.3605e-01, 9.4826e-01, 1.0116e+00, - 5.2497e-01, -1.3307e-01, -8.6453e-01, -6.7539e-01, - -1.0600e+00, -5.8188e-02, 9.3941e-01, 1.6961e+00, - -1.2806e+00, -6.5955e-01, -3.1771e-01, -7.6049e-01, - -5.2247e-01, 7.6536e-01, 3.5058e-01, 1.1096e+00, - 1.7242e+00, 8.9146e-01, -1.1507e+00, -1.3999e-01, - -1.0431e+00, -5.3133e-01, 1.9636e-01, -4.0441e-01, - -6.9943e-01, 2.9240e-01, -1.9072e+00, -1.1570e+00, - 1.3900e+00, -1.1292e+00, 1.2483e+00, -5.9510e-01, - 1.8875e+00, -1.3820e+00, 3.3827e-01, -1.2718e-01, - 5.7576e-02, 6.0059e-01, -6.2326e-01, 2.9339e-01, - -1.4748e+00, 4.5609e-02, -2.4964e-01, -2.4800e-01, - 9.6611e-01, -1.7927e-01, 1.2051e+00, -4.7421e-01, - -5.4664e-01, 1.7531e+00, 6.0661e-01, -5.5880e-01, - 7.2796e-01, 5.0887e-01, 5.8651e-01, -1.7426e-01, - -7.0147e-01, 4.8014e-02, -5.0125e-01, 2.5125e+00, - 7.3657e-01, -1.8122e-01, -2.5039e-02, 1.5463e+00, - 2.4457e-01, -1.6198e+00, -1.2503e+00, 2.5305e-01, - -3.5011e-01, -1.6723e-02, -1.6005e+00, 4.5776e-01, - -3.8309e-01, -2.2811e+00, -1.2957e+00, -6.1809e-01, - 6.1736e-01, -1.6596e+00, 1.7813e+00, 1.7699e-01, - -8.3328e-01, 8.0230e-01, -1.4158e+00, 1.5920e-01, - -2.8695e+00, -8.4163e-01, 6.3621e-01, -1.7808e+00, - -2.8399e-01, 1.8622e+00, 4.2233e-01, -8.0079e-01, - -4.9846e-01, 2.1895e-01, 6.6414e-01, -1.2450e+00, - 1.3102e+00, 3.4370e-01, -3.5426e-01, -9.2345e-01, - -7.3484e-01, -1.5036e+00, 7.7696e-02, 2.3666e-01, - -8.5529e-01, 8.3124e-01, 3.2553e-01, 1.0987e+00, - -7.2158e-02, -1.5711e+00, 3.6648e-01, 8.4743e-01, - 8.4705e-01, -5.7712e-01, -1.4117e+00, 1.0459e+00, - 9.3385e-01, 1.3522e-01, 6.9141e-01, 5.5360e-01, - 5.1646e-01, 1.4877e+00, 7.7260e-01, -6.4755e-01, - 9.9628e-02, 1.0797e+00, 6.1417e-01, -2.8372e-01, - 6.7120e-01, 1.0210e+00, 4.3399e-01, -5.9343e-01, - 1.2068e+00, -7.1175e-01, 8.1232e-01, 1.4308e+00, - -3.3188e-01, 6.4015e-01, 5.9899e-01, 2.9270e-02, - 7.1591e-01, -1.1306e-01, -4.5101e-01, -1.0585e+00, - -9.3646e-01, 1.4782e-01, -8.6557e-02, 8.6675e-02, - 6.5024e-01, -5.8139e-01, 1.2607e+00, 5.1263e-01, - -1.3972e+00, 4.0936e-01, 5.1840e-01, 1.6108e+00, - 5.1199e-01, -7.1732e-01, 3.4024e-01, 9.0506e-01, - 1.1776e+00, -2.8262e-01, -1.7399e-01, 7.4221e-01, - 4.8329e-01, -3.7932e-02, -1.2059e+00, 1.6359e+00, - 9.8108e-02, 1.1077e+00, -1.3214e+00, 5.1149e-01, - 9.2661e-01, -9.1623e-01, 1.3080e+00, -3.3310e-01, - 5.5691e-01, -9.4724e-01, -1.9606e+00, 7.6948e-01, - 8.7366e-01, 1.1104e+00, 2.1968e+00, -1.9555e-01, - -2.3607e+00, -1.0473e+00, 1.0593e-01, -1.6398e+00, - -8.8382e-01, 1.0566e+00, 1.1702e-01, -6.7596e-01, - -9.4156e-02, -1.7350e-01, 1.0064e+00, -7.8293e-01, - -7.7791e-01, 6.3860e-01, -1.3045e+00, 6.9555e-01, - 6.6262e-01, 1.6371e-01, 3.3537e-01, -8.0321e-01, - -4.1732e-01, 3.7304e-01, 9.3027e-01, -7.3961e-01, - 1.2350e+00, -3.8386e-01, 1.0847e+00, -2.0001e+00, - -8.8202e-01, 9.2928e-01, 3.2735e-01, -1.0626e+00, - 4.2900e-01, -1.5425e-02, 6.4490e-01, 9.6476e-01, - 6.5656e-01, 3.1420e-01, 1.0030e+00, 2.2156e+00, - 5.0200e-01, -1.5096e+00, -6.8684e-01, -1.1971e+00, - 8.8372e-01, -9.8230e-01, 9.3104e-01, -5.7465e-01, - 1.7907e-01, -5.4097e-01, 8.8711e-01, 3.1832e-01, - -1.7165e-01, -1.0482e+00, -8.8796e-02, 5.2947e-01, - -8.7406e-01, -1.5492e-01, 2.0111e+00, -1.2999e-01, - -5.7317e-01, 1.1088e+00, 1.6615e-01, -1.6310e-01, - 6.9012e-01, -2.5172e-01, -7.3321e-01, -6.0964e-01, - -1.4506e+00, 1.6092e+00, 1.2914e+00, 4.6507e-01, - 1.0410e+00, -2.0020e+00, 5.7010e-01, -1.8916e-01, - -8.2795e-01, -1.4583e+00, 6.5309e-01, 4.7725e-01, - 6.7825e-01, 8.1542e-01, -2.3767e+00, 3.2191e+00, - 2.7176e-02, 1.2190e+00, 2.1457e-01, -1.2138e-01, - 3.8748e-01, -3.3644e-01, -1.8780e-02, 2.6576e-01, - 1.6098e+00, 8.8909e-01, -4.6430e-01, -7.0632e-01, - -3.9191e-01, -3.2250e-01, -2.7332e-01, -4.1991e-01, - -6.2435e-01, -3.8380e-01, 9.0022e-01, 6.4944e-01, - 1.2600e+00, -9.1453e-01, 1.0080e-01, -2.2021e+00, - 3.2565e-01, -8.5091e-01, -5.3778e-01, -7.0766e-02, - -3.7588e-01, 2.3724e+00, -5.9976e-01, -1.0623e+00, - -7.8789e-01, 7.1887e-01, 3.3685e-01, -8.3765e-01, - 1.8953e-01, -3.1987e-01, -2.7559e-01, -1.3333e+00, - -4.6640e-01, -1.5817e-02, 1.2270e+00, 6.2578e-01, - 1.9285e+00, 8.6265e-01, 3.5437e-01, 3.1125e-01, - 8.3602e-01, -2.4148e-01, 2.0943e-01, -1.4495e+00, - 1.9833e-02, -2.9174e-01, -6.2495e-01, -4.5917e-01, - 9.1781e-01, -3.9357e-01, 4.5612e-01, -1.5187e-01, - -1.0037e+00, 1.9915e+00, 9.7905e-01, 6.8321e-01, - 5.5759e-01, 1.0224e+00, 1.7313e+00, 3.1153e-01, - -2.3394e-01, 1.6601e+00, -1.7153e+00, -9.5143e-01, - 1.4718e+00, -2.2011e+00, -1.7903e+00, 5.0001e-02, - -8.9850e-01, 5.0424e-01, 1.9616e-01, -1.0849e+00, - 5.8599e-01, 9.0246e-01, -1.3938e+00, 5.9049e-01, - -2.2622e+00, -1.3217e+00, 1.5698e+00, -1.7840e+00, - -3.1063e-01, 2.1382e-01, 1.8234e-01, -1.7920e-01, - 3.1322e-01, -4.3535e-02, -1.1354e+00, -1.4167e+00, - -1.3037e-01, -1.5500e+00, -2.7736e-01, -5.4520e-01, - -1.7234e+00, -5.1656e-01, 3.0241e-01, -8.9100e-01, - -3.6071e-01, -1.2156e+00, -6.2803e-01, -1.0032e+00, - 1.3957e+00, -7.2175e-01, -9.0551e-01, -1.9010e-01, - -3.4329e-01, 3.3023e-01, -3.3813e-01, -8.9870e-01, - 8.4397e-01, 5.0313e-01, 9.6253e-01, -4.4093e-03, - 1.0290e+00, -5.5903e-01, -1.1524e-01, 1.4732e-01, - 3.1780e-01, -3.5264e-01, -1.3937e+00, -4.6558e-01, - -3.7639e-01, -6.7541e-02, 2.7368e+00, 2.3751e-01, - 1.1736e-01, 8.5786e-01, -1.9568e-01, 2.9318e+00, - 1.5217e+00, -4.7240e-01, -1.2094e+00, 2.4469e-01, - -2.0649e-01, -8.8482e-02, 1.3740e+00, -1.4461e+00, - -1.9098e+00, -1.0593e+00, 5.5971e-01, 7.0380e-01, - -1.3894e-01, -3.8972e-01, -8.0615e-01, -1.4300e+00, - -5.9580e-01, -1.4597e+00, 2.9113e+00, 1.5759e+00, - -1.0535e+00, 6.5467e-01, -3.5602e-01, 9.0057e-01, - 8.4816e-01, -3.2017e-01, 8.5226e-01, -9.3748e-02, - 3.7508e-01, 3.2701e-01, 2.1791e-01, 4.9694e-01, - 1.4708e+00, 1.4892e+00, 8.2747e-01, -1.7514e-01, - -1.2019e+00, 7.1862e-01, 3.2286e+00, 6.6439e-01, - -1.2497e+00, -1.9899e+00, 2.2487e-01, 5.6280e-01, - 8.8140e-02, 8.1349e-01, 4.7400e-01, -5.1203e-02, - 3.1416e-01, 5.6526e-01, 6.4709e-02, -3.7952e-01, - 3.5013e-01, -9.1092e-01, 8.7099e-01, -8.2199e-01, - -2.5012e-01, -3.8726e-01, 2.1141e-01, -8.8705e-01, - 9.0666e-01, 9.0707e-01, -9.0952e-01, 7.7533e-01, - -5.5935e-01, 5.2679e-01, -1.2852e+00, -8.1078e-01, - 6.5792e-01, 1.1117e+00, 9.3440e-01, -2.5566e-01, - -3.6771e-01, 1.4378e-01, 2.7861e-01, 1.4836e+00, - 3.5860e-02, -5.3336e-01, -1.3028e+00, 4.0285e-01, - -7.3386e-01, 1.2795e+00, -7.9433e-01, 5.2074e-01, - 6.7295e-01, -1.1792e+00, 1.8395e+00, 7.1889e-01, - 8.0820e-01, 6.6345e-01, -2.6328e-01, 1.2959e+00, - 8.2689e-01, -7.5805e-01, -9.8656e-01, -1.8671e+00, - -3.8755e-01, 1.0553e+00, 1.8972e-01, 2.8496e-01, - -6.2042e-01, 1.2119e+00, 5.2936e-01, -3.4872e-01, - -8.4198e-01, 1.0950e+00, -5.6761e-01, 1.1275e+00, - -1.9346e+00, 9.1152e-01, -1.6136e-01, -2.7374e-02, - -7.8376e-01, 1.2088e-01, 5.3485e-03, -3.4974e-01, - -1.7880e+00, -8.7015e-01, -6.5029e-02, -3.7009e-01, - 1.9724e+00, 4.4227e-01, -8.4003e-01, -6.7618e-01, - 9.1261e-02, -1.3276e-01, -2.3035e-01, 1.1290e-02, - -6.9564e-01, -1.1603e+00, 4.5805e-01, 9.6601e-02, - -7.3241e-01, 2.1452e+00, 4.4845e-02, -6.3071e-01, - 1.5194e+00, 1.6422e+00, -1.5927e-01, 1.9374e-01, - -5.9165e-01, 1.2385e+00, 5.3093e-01, 1.4532e-01, - -9.5996e-01, 1.6440e+00, -3.4936e-01, -3.1856e-01, - 6.1002e-01, 3.3883e-01, -4.1230e-01, -8.0923e-02, - -1.2768e+00, 2.0687e-01, -3.0228e-01, -4.6455e-01, - -1.8378e-01, -2.7539e-01, 1.4673e+00, -1.6464e+00, - 1.8837e-01, -2.7836e+00, -1.4473e-01, 6.6445e-01, - -1.3271e+00, 1.1714e+00, 4.8249e-01, 6.1400e-01, - 5.7929e-01, 6.2860e-01, -1.8804e+00, 9.9885e-01, - 1.3716e+00, -6.0113e-01, -7.1591e-02, -1.3951e+00, - 1.3893e+00, -2.0609e+00, 6.4471e-01, 8.3584e-01, - -2.4999e-01, -7.5756e-01, -9.8447e-01, 1.7764e-01, - 1.8587e+00, -8.5097e-01, -4.7317e-01, 9.8345e-01, - -1.4865e+00, -1.8945e+00, 1.1091e+00, 5.4199e-01, - 1.7728e+00, -1.3859e+00, -3.2971e-01, -6.3608e-01, - -3.9891e-01, 1.5369e+00, 8.0486e-01, 7.4837e-01, - 1.4477e+00, -9.0637e-01, -1.7620e-01, -3.7123e-01, - 4.2202e-01, -1.7090e+00, -9.3891e-01, -1.5286e+00, - 5.9977e-01, -7.2453e-01, -1.4468e+00, -3.0955e-01, - 5.3272e-01, 7.5105e-01, 1.2405e+00, -4.4610e-01, - -9.9402e-01, -5.1650e-01, 2.3661e-01, -1.3492e+00, - -3.8439e-01, -1.6052e-01, -8.8132e-01, 4.4870e-02, - -1.3219e+00, -1.4671e+00, -9.5360e-01, 3.4169e-01, - 8.2864e-01, 4.2686e-01, 1.1306e+00, 2.6253e-01, - 1.7010e+00, -1.1043e-01, -2.1514e+00, -8.8870e-02, - -1.7711e-01, 1.4595e-02, 6.3883e-01, 6.2447e-01, - 1.4895e+00, 2.9780e-01, 1.3172e+00, 3.8500e-01, - 9.7148e-02, -1.8670e+00, -1.4755e+00, 1.6142e-01, - -1.5503e+00, 1.6701e+00, -2.2453e-02, 1.6711e+00, - -5.5644e-01, -6.6826e-01, 1.1016e-01, 6.7140e-01, - 1.0101e+00, 1.7037e+00, -5.8961e-01, 1.7551e-01, - -6.4121e-01, 2.6640e+00, -9.6149e-01, 1.1670e-01, - 2.2843e-01, 7.7872e-01, -1.8078e+00, -8.5595e-01, - 2.1645e-01, -6.2604e-02, -1.2987e+00, 1.4111e+00, - -1.2202e+00, 1.0742e+00, 8.3644e-01, -1.8812e+00, - 1.3711e+00, 1.0596e+00, 1.3931e+00, -8.8627e-01, - -1.6493e-01, 8.7206e-02, 1.2282e+00, -3.3145e-01, - 8.6404e-01, -3.4086e-01, 4.9284e-01, -1.5246e+00, - -7.0126e-01, 1.0663e-01, 6.4830e-01, -2.0475e+00, - 9.0912e-02, 1.0638e+00, -9.4488e-02, -4.8261e-01, - -5.3104e-01, -6.3457e-01, 1.2756e-01, 2.0216e+00, - -1.3818e+00, -2.0045e+00, -3.8374e-01, 4.7203e-01, - -3.0470e-01, 6.4869e-02, -4.2947e-01, -5.6360e-01, - 3.5286e-01, -1.0935e-01, 1.6844e-01, 2.1469e+00, - 4.6433e-01, -2.7165e+00, 1.1348e+00, -8.2658e-01, - 5.5486e-01, -1.7174e-01, 2.3152e-01, 9.0074e-01, - -1.3947e+00, -4.4224e-02, -5.4652e-01, 3.3002e-01, - -4.4166e-01, 4.2327e-01, 1.8086e-01, -2.3203e+00, - -8.0115e-01, -1.2778e+00, -1.6463e+00, 1.6532e+00, - 1.1271e+00, 1.0690e+00, -1.2498e+00, 2.9316e-03, - -6.1004e-01, -1.0443e+00, 1.5095e+00, 1.5283e+00, - 8.9064e-01, -4.7600e-01, -7.1406e-01, 3.7699e-01, - 3.9384e-01, 1.0746e-01, -1.0946e-01, 1.2359e+00, - -2.6080e-01, -6.3630e-01, -1.3618e+00, -1.2248e+00, - -1.2964e+00, -7.5963e-02, -3.0056e-01, -4.3527e-01, - -7.5125e-01, 9.5430e-01, 9.1075e-01, -1.1137e+00, - 4.4533e-01, 3.4233e-01, 6.7625e-01, -6.4746e-01, - 2.1675e+00, -1.9298e-01, -6.6236e-01, 1.1338e+00, - 3.9650e-01, 2.9174e-01, -1.1006e+00, -1.6298e-01, - 4.4847e-01, -1.4399e+00, 3.5222e-01, 5.0076e-01, - 4.0610e-01, 5.9205e-01, -8.9339e-01, 5.1416e-01, - -3.4260e-01, -3.6624e-01, -3.8157e-02, 9.0559e-01, - 4.0560e-01, -1.1740e-01, 5.5059e-01, -1.1079e+00, - 7.0034e-01, -7.6837e-01, -1.2853e+00, -1.0331e+00, - -4.0882e-01, 6.0824e-01, -1.8464e+00, -2.4099e-01, - -6.4075e-01, 1.0549e+00, 4.9384e-01, -2.4290e-01, - -1.0860e+00, -1.6913e+00, 7.6897e-01, 1.3570e+00, - -1.2498e-01, 9.2241e-01, -2.6609e-01, -1.1973e+00, - -2.1456e+00, 6.5706e-01, -9.2642e-02, -2.0347e+00, - 1.0098e+00, 2.3194e+00, 9.7993e-01, -1.1933e+00, - 2.2717e-01, 2.0203e-01, 4.2363e-01, -4.2646e-01, - 4.3162e-01, 1.8079e+00, 6.3309e-01, -5.7737e-01, - -1.1832e+00, -2.1535e-01, 9.9373e-01, -4.5180e-01, - 1.0509e+00, 6.9808e-01, -6.4530e-01, -6.5586e-01, - 1.4788e+00, 3.2275e-01, 2.3001e-01, -9.2953e-01, - -8.8853e-01, -2.3816e-01, 1.8101e+00, 9.2623e-01, - 2.5290e-01, -2.3366e+00, -8.2722e-01, 4.2944e-01, - -1.0268e+00, 2.1575e+00, 6.6667e-01, -6.4197e-01, - -1.0847e+00, 3.1721e-03, -3.6088e-01, 2.3492e-01, - -2.0708e-01, -6.6962e-01, -1.1455e-01, -1.0251e+00, - -1.0921e+00, 7.6430e-01, 1.2163e+00, -1.0590e+00, - -4.2576e-01, -2.9291e-01, -5.3577e-03, 7.5468e-01, - -3.2707e-01, -1.3061e-01, 1.3264e+00, -6.0094e-01, - 1.1541e+00, 1.8046e+00, 7.2901e-01, 6.4041e-02, - 7.0734e-02, 2.7121e-01, -8.7073e-01, 8.3671e-01, - 6.8418e-02, 1.0251e+00, -2.9172e+00, 9.3302e-01, - -5.2335e-01, -1.0703e+00, -3.4414e-01, -9.0838e-01, - -1.2929e+00, -1.8747e-01, 4.6439e-01, -9.1980e-02, - -1.8848e-01, -5.0008e-02, -9.6477e-01, -7.4197e-01, - -5.1820e-01, -1.5286e+00, -6.9047e-01, -8.1775e-01, - -1.5984e+00, -1.0867e+00, -1.1818e+00, -7.7680e-01, - -1.9211e-01, 1.3454e-01, 7.1829e-01, 2.2465e+00, - 4.7107e-01, -4.6802e-01, -4.7530e-02, 2.0446e+00, - 1.5553e-01, -6.8944e-01, -5.8490e-01, 1.0739e-02, - 1.1213e+00, -2.4216e+00, 4.9005e-02, 2.4587e-02, - -5.9466e-01, 2.7744e-01, -6.1210e-01, 2.3039e+00, - -2.6842e+00, 9.6522e-01, 1.1605e+00, 2.2719e-01, - 1.1363e+00, -7.6551e-02, 8.6680e-01, 1.1114e+00, - 7.1861e-01, 2.1805e-01, -1.2376e+00, 9.5840e-01, - 2.2654e+00, -4.2012e-01, 1.6736e+00, -1.8411e+00, - -3.3262e-01, 1.6666e+00, -1.9059e-01, -4.9915e-01, - 9.7952e-02, -1.1064e+00, -6.5935e-01, -1.5965e-01, - 2.2966e+00, 6.1004e-01, 5.0518e-01, 4.4027e-02, - 2.0766e-01, -2.3053e+00, -1.1459e+00, 4.3782e-01, - -2.1207e-01, -7.5971e-01, -7.2051e-01, 1.0477e+00, - -4.6195e-01, -3.8061e-02, -3.2531e-01, -9.2680e-01, - -4.4580e-01, -3.3090e-01, -2.4550e-01, 1.2880e+00, - -9.4711e-01, 2.5166e-02, -1.4829e+00, 2.0823e-01, - -2.4857e-01, -3.7229e-01, 9.1373e-01, -1.4266e+00, - -1.7952e-01, 1.5041e+00, 2.9063e-01, -6.7028e-01, - -5.3908e-01, 1.9189e+00, 4.5263e-01, 1.6841e+00, - -3.3742e-01, -1.4407e-02, 6.6552e-02, -1.0995e+00, - -1.3763e+00, 7.2223e-01, -1.8425e-01, 8.2180e-01]), - size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.5370, 0.7664, 0.0991, ..., 0.3694, 0.0077, 0.1983]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 1000 -Density: 1e-05 -Time: 6.351012468338013 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '637516', '-ss', '10000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.738685369491577} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 999, 999, 1000]), - col_indices=tensor([3130, 1922, 7438, 7875, 3086, 5037, 9264, 5970, 8835, - 5840, 3371, 717, 2714, 5652, 8308, 3774, 7120, 4154, - 7395, 3929, 6129, 9781, 6261, 6732, 1669, 1667, 633, - 1923, 1906, 8442, 3906, 9988, 4535, 5982, 1715, 1741, - 5414, 8042, 4357, 4431, 4927, 8828, 4505, 6769, 6156, - 1201, 5960, 9322, 3991, 1136, 1328, 2782, 1824, 7316, - 6453, 470, 7627, 3485, 910, 6463, 9140, 9700, 6831, - 3173, 9221, 1971, 5055, 5209, 9297, 8683, 6790, 41, - 9221, 6698, 1242, 8607, 4352, 642, 6934, 8675, 9019, - 7985, 9417, 4336, 905, 1676, 3636, 9517, 1045, 1840, - 8489, 8589, 1855, 2811, 6382, 7962, 4532, 5045, 7314, - 1374, 3659, 3222, 3854, 5093, 9965, 7549, 9206, 601, - 1277, 6577, 7061, 6580, 3150, 5150, 3874, 6227, 5400, - 5706, 4848, 1107, 6344, 8605, 8569, 4536, 4723, 4850, - 3803, 3536, 6933, 7593, 8054, 7100, 5045, 8961, 9146, - 2589, 3789, 9323, 9447, 2958, 4169, 9940, 8937, 7469, - 2367, 4687, 6211, 8108, 3237, 6333, 2756, 4956, 6359, - 2623, 598, 4402, 6995, 3811, 3147, 4341, 7488, 3314, - 2862, 1250, 3652, 8776, 1737, 5267, 2012, 8255, 644, - 6552, 9996, 1096, 646, 3090, 8829, 1988, 520, 727, - 6333, 9199, 9945, 3923, 8562, 7267, 4022, 3189, 8, - 678, 6430, 157, 9858, 9099, 6445, 1514, 642, 9242, - 4022, 5743, 5608, 5614, 4623, 6204, 9627, 8376, 4982, - 5374, 2245, 2529, 6441, 2137, 1983, 1678, 5163, 8143, - 557, 960, 3725, 758, 2859, 9445, 9811, 7921, 3499, - 6296, 6152, 5519, 3505, 9352, 5652, 2197, 8190, 3212, - 1216, 7747, 4668, 5510, 2060, 8155, 8914, 7444, 1814, - 8158, 8433, 9779, 3513, 4760, 5505, 1295, 2403, 5957, - 6077, 628, 7844, 1050, 1238, 9617, 1103, 2247, 2230, - 7929, 7103, 3144, 6082, 9351, 9733, 3548, 20, 241, - 9110, 5000, 1004, 7601, 6549, 5371, 8804, 7961, 2113, - 4889, 3571, 223, 3117, 1772, 2611, 9867, 3974, 2819, - 6718, 1598, 8698, 3496, 7702, 3624, 2809, 8984, 911, - 4323, 8913, 1985, 8789, 9442, 2285, 9793, 1099, 3720, - 8940, 1938, 6886, 5768, 8290, 7546, 7861, 1329, 1883, - 6999, 2372, 4060, 474, 3357, 3085, 6571, 6427, 2155, - 2340, 3141, 6988, 498, 3997, 3663, 4492, 1159, 606, - 6779, 1752, 2650, 9025, 7105, 5573, 1957, 4193, 9064, - 824, 3599, 7673, 9223, 9268, 6844, 6548, 9487, 1234, - 5418, 7958, 422, 2585, 3983, 3427, 505, 9294, 8172, - 4387, 8326, 1560, 8415, 6179, 1068, 6487, 1565, 4653, - 742, 7215, 8965, 2867, 2649, 8180, 8320, 4988, 233, - 2934, 3402, 8976, 8244, 1806, 626, 9081, 619, 6288, - 9226, 5864, 5693, 7876, 5398, 3434, 3465, 2153, 2806, - 9042, 312, 924, 7508, 8794, 9533, 6375, 5092, 8650, - 8362, 2227, 1277, 3650, 4662, 756, 7703, 8367, 9693, - 4427, 3219, 476, 9761, 7078, 2810, 6558, 7270, 1328, - 9505, 7966, 8548, 5282, 3575, 1492, 4407, 777, 6479, - 4274, 3967, 74, 6371, 6578, 1792, 3282, 4502, 2240, - 2330, 4115, 4430, 3331, 3373, 2352, 9254, 412, 92, - 238, 898, 4401, 3488, 9990, 6068, 8788, 8759, 4918, - 7369, 9712, 3545, 8839, 8323, 6754, 3901, 3186, 964, - 6842, 7066, 3074, 6761, 9106, 207, 1240, 5043, 3934, - 9768, 8434, 7523, 162, 6248, 6689, 5017, 6518, 4206, - 9754, 8046, 2335, 1014, 8306, 8491, 3698, 850, 2107, - 5513, 6665, 2252, 5382, 3977, 4175, 8421, 6471, 9701, - 2908, 1398, 6480, 7485, 5276, 5758, 1752, 1497, 4034, - 2666, 2551, 3382, 8399, 8694, 3287, 765, 2314, 8084, - 3212, 8200, 4059, 1093, 4601, 5697, 5142, 2067, 5697, - 9327, 3257, 3210, 1198, 1147, 615, 9844, 3012, 4261, - 4007, 4283, 3098, 6365, 7444, 1505, 745, 1454, 4529, - 1902, 425, 494, 8466, 8738, 252, 8981, 5619, 9212, - 9889, 7989, 2610, 1442, 8783, 4624, 7324, 7238, 1855, - 5992, 3285, 5688, 3922, 3100, 6781, 3825, 6656, 5548, - 9430, 7104, 4244, 3502, 5647, 9646, 6661, 9232, 6728, - 8561, 8885, 1435, 9965, 803, 3952, 8156, 6644, 8173, - 9304, 1162, 1017, 2797, 6555, 6232, 3524, 3780, 7629, - 9214, 6801, 6395, 3975, 7108, 3353, 7707, 3964, 1386, - 4869, 1555, 4657, 6591, 4293, 8736, 8862, 3049, 3962, - 3373, 6282, 6353, 8051, 7416, 802, 403, 1696, 7963, - 631, 7943, 1166, 8712, 881, 9783, 3276, 2977, 2110, - 5530, 8681, 1886, 1194, 6348, 492, 2643, 6098, 3189, - 5545, 2941, 6974, 6626, 4801, 5979, 1282, 6854, 1363, - 6874, 9031, 8534, 6699, 5859, 9699, 570, 8142, 2540, - 9776, 329, 3897, 6059, 1993, 6797, 7041, 2959, 8866, - 4030, 5469, 8801, 5111, 1454, 2063, 7429, 8449, 9024, - 4017, 1458, 5245, 1192, 1046, 9492, 929, 8753, 1005, - 1100, 5434, 6627, 408, 6543, 857, 4539, 6486, 1847, - 5473, 8179, 3357, 9118, 51, 4965, 2261, 7161, 6448, - 2693, 721, 3806, 4282, 1845, 1375, 7311, 4042, 4536, - 5415, 8324, 7244, 4364, 3096, 6797, 570, 8923, 1777, - 3013, 2101, 9095, 6844, 6497, 3837, 7057, 2492, 6075, - 8384, 2809, 2642, 9500, 3194, 9363, 5206, 2977, 6204, - 4864, 1718, 8209, 8193, 2920, 6937, 1819, 4027, 6297, - 8224, 936, 5942, 9497, 5373, 1321, 8242, 462, 385, - 2613, 9094, 1820, 7834, 7282, 5535, 6931, 8328, 5415, - 5761, 8080, 8402, 7597, 1652, 6788, 695, 7881, 9360, - 8894, 7062, 6139, 3143, 8934, 7087, 2667, 9450, 7212, - 1140, 1864, 9805, 8772, 2904, 6237, 8763, 9925, 2509, - 2863, 8060, 1117, 5275, 1318, 8002, 9262, 225, 5149, - 8116, 2517, 6426, 2516, 6552, 7344, 1461, 2875, 2861, - 9754, 82, 6910, 4086, 6987, 985, 4095, 7393, 5632, - 4679, 5986, 614, 1757, 9629, 4103, 9044, 9586, 7995, - 2818, 2092, 283, 280, 3399, 5549, 8578, 2257, 8268, - 3241, 182, 3580, 498, 5262, 8936, 2139, 3966, 7211, - 8490, 4602, 5000, 9020, 3234, 7845, 5853, 9307, 5018, - 630, 6995, 8335, 3459, 5985, 8304, 9687, 7861, 783, - 9893, 3603, 3240, 8318, 6813, 9155, 4891, 1003, 4722, - 3638, 9067, 1540, 2485, 5783, 3992, 1254, 6579, 4476, - 1387, 5667, 9272, 2271, 8091, 9127, 7521, 6143, 1078, - 1061, 6708, 1844, 1096, 7640, 2821, 7847, 3397, 6996, - 4980, 3438, 2364, 5795, 2372, 3966, 4575, 8554, 4438, - 5927, 5852, 8043, 8929, 6065, 6066, 4066, 8202, 427, - 9820, 781, 6280, 6061, 120, 4704, 7362, 9469, 3206, - 8289, 5142, 182, 5145, 5164, 6546, 668, 2118, 1558, - 8790, 8352, 5258, 2245, 6764, 6507, 3235, 2045, 3251, - 7352, 968, 5854, 2015, 1867, 8402, 6811, 3582, 2419, - 9722, 3156, 465, 4204, 5518, 3826, 2057, 3282, 3589, - 9888, 9283, 7690, 3472, 2381, 3540, 4001, 8537, 111, - 9524, 4977, 9663, 6668, 1810, 4958, 4561, 6927, 1905, - 8565, 9006, 481, 931, 6535, 7043, 767, 1383, 6877, - 8815]), - values=tensor([ 2.4712e+00, -6.4098e-01, 4.4048e-01, 1.4810e+00, - 4.8869e-01, -1.8595e+00, -1.0513e+00, -1.2328e+00, - 8.0728e-01, -7.1168e-01, -7.3367e-01, -4.5937e-01, - 5.0030e-01, -1.9136e+00, 1.3607e+00, -8.8982e-02, - -6.2274e-01, 7.0586e-01, 6.7771e-01, 1.3276e+00, - 1.2163e+00, 2.2961e-01, 2.8238e-01, 3.1667e-01, - -7.2076e-02, 1.7135e-01, 2.6511e-02, 1.3436e-01, - 7.0196e-01, -1.0026e+00, -5.4744e-02, 3.8156e-01, - 8.9809e-01, 6.2842e-01, 4.4123e-01, -6.8313e-01, - 6.2525e-01, -1.5225e+00, -4.8870e-01, -6.4525e-01, - -1.5181e-02, -1.0698e+00, -2.2069e+00, -3.2462e-01, - -5.2399e-01, -2.6237e-01, -6.6100e-01, -1.3051e+00, - 3.0759e-01, 1.8007e+00, -4.3808e-01, 1.6099e-01, - 2.0257e+00, -5.2075e-01, -1.5016e+00, -1.0643e+00, - -5.0268e-01, -1.1201e+00, 4.4091e-01, 6.9874e-01, - 6.0069e-01, -9.7202e-01, -1.0245e+00, 9.6365e-01, - -7.2281e-01, -1.3316e+00, -1.5695e+00, 2.8770e-01, - 7.8357e-02, 4.7207e-02, -2.5283e-01, 6.3713e-01, - 3.7426e-01, 6.4841e-01, -2.0078e-01, 4.9545e-01, - -5.7277e-01, 2.9568e-01, 1.3064e-01, -8.9380e-01, - -9.9804e-01, -1.3303e+00, 1.9828e+00, 6.7975e-02, - -9.0309e-01, 3.0519e-01, -3.3351e-01, 1.5711e+00, - 1.3832e+00, -1.1505e+00, -1.8647e+00, -2.0668e+00, - 4.4417e-01, -3.2940e-01, 2.9575e-01, -2.4232e+00, - 7.0361e-01, -1.8424e+00, -3.2003e-01, 7.1938e-01, - 7.0064e-01, 1.9673e+00, 1.6560e+00, -6.8088e-01, - -9.3720e-01, 8.3050e-01, 5.4436e-01, 2.1164e-01, - -5.2098e-01, 5.7315e-01, 1.3454e+00, 2.0557e+00, - -5.8521e-01, 7.8951e-01, -9.3431e-01, -9.6977e-01, - -1.2330e+00, 9.2931e-01, 4.1129e-01, -3.9929e-01, - 1.1771e+00, 2.1830e-01, 1.7306e+00, 1.1278e-01, - 2.0103e+00, -8.5711e-02, 4.7722e-01, -4.8625e-01, - 1.4676e+00, -1.6535e+00, 2.8689e-01, -5.4858e-01, - -1.6913e+00, 5.4138e-01, 4.5252e-01, -4.6254e-01, - -2.4329e-01, -9.4123e-01, 2.4815e+00, -8.1358e-01, - 3.6697e-01, 7.6976e-01, -6.4166e-01, 1.2594e+00, - -5.7886e-01, 1.0588e+00, -9.4899e-01, 1.7277e+00, - -1.5085e+00, -4.2221e-01, -4.9541e-01, 1.1746e+00, - 1.0312e+00, -1.0309e+00, -1.5655e+00, -3.5219e-02, - 1.2542e+00, 2.4653e-01, -1.2863e-01, -4.3454e-01, - -3.0405e-01, 1.2856e+00, -1.7373e+00, 3.8921e-01, - -2.2565e-01, -9.0908e-01, 5.3909e-01, -9.4975e-02, - 2.0384e+00, -2.2562e-01, -4.5708e-01, 9.8855e-01, - -1.3495e+00, 2.2299e+00, -1.2221e+00, 6.8765e-01, - -1.5204e+00, 1.9466e+00, 8.5291e-01, 7.0943e-01, - -1.1666e+00, -5.8849e-01, -6.0008e-01, 4.0837e-01, - -6.3923e-01, -1.6287e+00, -1.5219e+00, 2.8573e-01, - -1.3902e+00, 7.8562e-01, -1.1192e+00, 1.0627e+00, - 1.0131e-01, -1.5853e+00, 1.0574e+00, -3.5736e-01, - -4.6132e-01, -7.0015e-01, 9.5982e-01, 4.7795e-02, - -1.2615e+00, -3.2005e-01, 7.4614e-02, -1.3504e+00, - -4.2363e-01, -3.2568e-01, 5.0508e-01, 1.6635e-01, - 5.3170e-01, 1.3869e-01, 5.4297e-01, 2.5915e+00, - -3.6247e-01, -3.8603e-01, 4.2741e-02, -1.9745e-02, - -8.4106e-01, -5.1496e-01, 8.4200e-01, 9.2009e-01, - -4.3447e-01, -4.4656e-01, -7.4869e-01, -1.9892e-01, - -1.1809e+00, 3.9687e-01, -1.3300e-01, 2.6714e-01, - -1.5536e+00, 8.3710e-01, 7.3313e-01, 7.4085e-01, - 5.3501e-02, 5.5349e-01, -6.0544e-01, -7.3774e-02, - -5.9487e-01, -1.4766e-02, -4.8619e-01, -9.7440e-02, - -1.3678e+00, -5.4991e-01, 1.0208e-01, -2.6935e-02, - -1.3142e-02, -1.4970e+00, 6.0807e-01, -7.7832e-02, - 3.1206e-01, 1.2336e+00, 2.1437e-01, -8.8273e-01, - -9.2184e-01, -1.3869e+00, 2.5448e+00, 1.4575e+00, - -1.5853e+00, 5.3927e-01, 6.1590e-02, 2.8977e-01, - -6.9079e-01, -2.0450e+00, 6.5284e-01, 6.5257e-02, - 9.2827e-01, -1.5449e+00, -8.4170e-01, 8.2771e-01, - 8.9689e-01, -5.3467e-01, -7.9990e-01, 8.2120e-01, - 3.1034e-01, -9.9210e-01, 1.6722e+00, 7.3070e-01, - 6.5198e-01, -2.8972e-01, -1.6317e+00, -1.4142e+00, - 1.8077e+00, 1.9173e+00, -5.5331e-01, -9.8067e-01, - 1.6521e+00, -1.0636e+00, 1.1497e+00, 1.0441e+00, - 8.0579e-01, -9.4581e-03, 2.2431e-01, 1.5383e+00, - -4.0404e-01, 7.5166e-01, -4.4070e-02, -2.7429e+00, - -1.1255e+00, 5.7058e-01, -3.7568e-01, 6.1401e-01, - 1.6696e+00, -4.3788e-01, -5.7532e-01, 1.2120e-01, - 1.6741e+00, -1.2964e+00, 9.4671e-01, -1.2504e+00, - -9.5145e-01, 2.3841e-01, 1.4023e+00, -2.8416e-01, - -1.0529e+00, -1.0413e+00, -3.2610e-01, 1.5060e+00, - -7.9708e-01, -5.5437e-01, 1.9549e-01, 2.3724e-01, - -4.4063e-01, 1.7634e+00, -1.0763e+00, -1.0253e+00, - -1.3996e+00, 1.3269e+00, 1.8984e-01, 2.8741e-01, - -3.2722e-01, 1.1737e+00, -2.9016e-02, -1.0685e+00, - 1.5156e+00, -4.3051e-01, 1.5275e+00, -9.7233e-02, - -4.5925e-01, -1.9608e+00, 2.9273e-02, -1.4703e+00, - 8.3907e-01, 1.0942e+00, -7.1234e-03, 1.9212e+00, - -1.0652e+00, -7.5513e-01, -6.5676e-01, 1.1408e+00, - 1.1342e+00, -1.7994e+00, 1.6630e+00, 1.0688e-02, - -1.9285e+00, -1.0825e+00, -7.1971e-01, 1.0952e+00, - 2.6340e-01, 3.5020e-01, 5.6364e-01, -4.9212e-01, - -1.1515e+00, -5.0104e-02, -4.9781e-01, 8.6750e-01, - -5.3337e-01, -5.0099e-01, -1.2522e+00, -3.1868e-01, - 8.8054e-01, 1.0559e+00, 1.0622e+00, 2.9444e-01, - 1.5943e+00, 5.3798e-01, -7.6036e-02, 7.3429e-01, - 2.3313e-01, -1.9652e-01, -1.1196e+00, 1.5648e-01, - -6.1555e-01, -8.7884e-01, -1.9483e+00, -1.0436e+00, - -1.1113e-02, -7.8999e-01, 6.7561e-01, 6.8887e-01, - 7.9433e-01, -4.9113e-01, -9.9494e-01, -2.3743e-01, - 1.2233e+00, 1.2394e+00, 1.1544e+00, 5.3015e-01, - -6.6310e-01, -2.1022e+00, 6.7944e-01, -1.3442e+00, - 2.6413e+00, 4.9828e-01, -1.3207e+00, -2.7721e-01, - -1.6881e+00, 5.7162e-01, 1.8421e+00, 1.2452e+00, - 2.3082e-01, -1.7195e+00, 3.6932e-01, -1.2430e+00, - 2.5182e-01, -1.0486e+00, -1.7430e-01, -1.0787e+00, - 1.9582e-01, 6.3772e-02, -1.3365e+00, -1.5584e+00, - -8.7071e-01, 2.4311e+00, -1.6650e-01, -1.0427e+00, - -3.5390e-01, 1.6192e+00, 8.4918e-01, 2.9921e-01, - -4.9153e-01, -6.2310e-01, -1.1641e+00, -4.2993e-01, - -2.6500e-01, 8.2108e-01, 1.9448e-02, -2.0454e-01, - 9.8798e-01, 1.2623e+00, 4.4411e-01, -5.5065e-03, - -5.0869e-01, -5.6785e-02, 3.8071e-01, -1.3636e+00, - 2.0685e-01, -1.3265e+00, -1.1809e-01, -9.7371e-01, - 8.2560e-01, 1.3043e+00, 4.0596e-01, -1.6750e+00, - 4.5633e-01, -3.7142e-01, -4.8180e-01, 9.3576e-01, - 5.8707e-01, -7.3281e-01, -1.4056e-01, 4.5214e-01, - 7.8143e-01, -1.5889e+00, -6.3356e-01, -4.6267e-01, - -4.4394e-01, -2.4200e-01, 8.1336e-01, 1.3457e+00, - -1.0474e+00, 1.3604e-01, 4.5314e-01, 1.9639e+00, - -2.9887e-01, -1.2509e-01, -2.8481e+00, 1.3852e+00, - -1.5330e+00, 2.2496e-01, -3.2133e-01, -9.3001e-01, - 5.8996e-01, -1.8346e+00, -9.8172e-01, 3.0751e-01, - 1.5001e+00, -2.3845e-01, -4.1641e-01, -5.2730e-01, - -1.1682e+00, 6.1244e-01, -1.0406e+00, -3.3326e-01, - 4.5010e-01, 1.4518e+00, 3.1171e-01, 7.0912e-01, - -8.1887e-01, 3.4394e-01, -1.2615e+00, 1.0727e+00, - -3.2982e-01, 2.8449e-01, 7.4276e-01, 9.2551e-01, - -3.6329e-01, 1.7358e+00, 1.1442e+00, 1.9588e-01, - 8.4997e-01, 9.9600e-01, 1.8354e-01, 4.3248e-01, - 7.7791e-01, 1.8930e+00, -4.8117e-01, 3.6172e-01, - -1.4445e+00, 9.4332e-01, 1.6896e+00, 4.2181e-02, - 1.0182e+00, -6.3392e-01, 9.7368e-02, 2.4400e-01, - -1.3101e+00, 3.8381e-01, 1.6755e-01, -6.0364e-01, - 4.6198e-01, 1.5887e+00, 1.6784e+00, 1.4332e+00, - 1.0238e+00, 3.5913e-01, -7.7011e-02, -1.0906e+00, - -3.9080e-01, -7.2640e-02, -1.7419e-01, 1.2550e+00, - 1.4594e+00, 5.0837e-01, 3.3610e-02, 2.8448e-01, - -3.6622e-01, -8.8080e-03, 8.6157e-01, 1.3831e+00, - 5.5145e-01, -1.9441e+00, 4.6470e-01, -2.1288e+00, - 2.7687e-01, 1.2155e-01, -1.0247e-03, -1.3933e-01, - -3.2987e-01, -9.1203e-01, 1.4422e+00, 2.2436e-01, - -2.2773e+00, -7.2998e-01, 2.5384e-02, 8.5800e-01, - 1.1809e+00, -5.9789e-01, -3.7839e-01, -5.7305e-01, - -2.3773e-01, -5.9265e-01, -6.5035e-01, 2.8618e-01, - -1.6506e+00, 2.0045e+00, -1.7390e+00, -1.6546e+00, - -1.5979e+00, 2.9671e-02, -2.9530e-01, 3.3292e-01, - 1.2369e+00, -4.6076e-01, 9.3303e-01, -1.1991e+00, - 2.8110e-01, 4.4868e-01, 1.8177e+00, -3.2970e-01, - 1.1572e+00, -9.4671e-01, -4.0671e-01, 7.4484e-01, - -6.3842e-01, -5.7869e-01, -1.5955e+00, 8.4376e-01, - -6.4377e-01, 9.0753e-01, -1.2187e+00, -6.1436e-01, - -4.5399e-01, 3.2936e-01, -5.9578e-01, -1.1948e-01, - 6.8560e-01, -8.1882e-01, 7.9382e-01, -1.3061e+00, - -1.3683e-02, -8.1025e-01, 6.7851e-01, -1.2531e-01, - -1.7121e+00, 1.5307e-01, 5.5526e-01, 1.4884e+00, - 8.4967e-01, 1.4167e-01, 2.6376e-01, 8.7384e-01, - 2.8490e-01, 1.0994e+00, 1.4465e+00, 1.8766e-01, - 1.6252e+00, 7.6013e-01, 3.3599e-01, -2.5042e-01, - 3.0976e-01, 4.7959e-01, 4.7351e-01, -7.1952e-01, - 1.1909e+00, -3.0414e-01, 7.5864e-01, -6.1217e-01, - 1.1702e+00, -2.7692e-01, 7.3461e-01, -7.2649e-01, - 7.4860e-01, 1.7258e+00, 1.8756e+00, 4.2551e-01, - 4.4205e-01, -6.2296e-01, 7.9423e-01, 8.2377e-01, - -5.3456e-01, 9.7277e-01, 6.3597e-01, 1.3035e+00, - -1.5802e-01, -8.8411e-01, 1.0933e+00, 1.3703e+00, - -3.6846e-01, 6.6938e-01, -2.0227e+00, -1.1620e+00, - 1.8029e-01, 1.6290e-02, 8.0863e-01, -4.6834e-01, - 1.0121e+00, 5.2475e-01, -4.5960e-02, 2.2648e-01, - -4.8070e-01, -1.9462e+00, -7.2573e-01, 2.7083e-01, - 1.2443e+00, -8.8493e-01, -1.6698e+00, -7.1613e-01, - -3.7336e-02, 4.6224e-01, 1.4344e+00, -1.6103e-01, - 8.2406e-01, -1.3810e+00, 3.0782e-02, -4.8786e-01, - 1.0655e+00, 4.3697e-01, -2.4675e+00, -5.8329e-01, - -6.7116e-01, -1.0279e+00, 2.2983e+00, 1.1274e+00, - 1.1685e+00, 9.5959e-01, 5.9881e-01, 2.9078e-01, - -7.6136e-01, 9.2768e-01, 7.2822e-01, -4.2507e-01, - -4.9736e-01, 5.3652e-01, -1.3584e+00, -1.1629e+00, - 1.0075e+00, -1.2471e+00, 2.2295e-01, 1.4690e+00, - 7.5613e-01, 1.2340e+00, -7.4186e-01, -5.5422e-01, - 1.0551e+00, -7.1772e-01, 1.5901e+00, 2.4792e+00, - -3.1318e-01, -1.1446e+00, 1.1604e+00, -7.5626e-01, - -1.4388e+00, 1.1877e+00, 2.0158e+00, 1.3585e+00, - 1.1755e+00, -9.2301e-01, 2.2368e-01, 1.2354e+00, - -6.6059e-01, 8.8082e-01, -8.9672e-01, -4.1948e-01, - -4.5202e-03, -7.1478e-01, -6.1154e-02, -9.6628e-01, - 2.1935e-01, 1.0811e+00, 4.6276e-01, -7.3262e-01, - 3.0826e-01, 1.5945e-01, 4.0907e-01, 7.0263e-01, - 2.4238e+00, 2.2162e+00, -1.3960e-01, 1.3631e+00, - 1.9286e+00, 8.4284e-01, 1.3901e+00, -3.6878e-01, - -3.1503e-01, 7.5366e-01, -1.4212e+00, 4.4236e-01, - 1.4111e+00, -3.2007e-03, -1.4246e+00, -1.6058e-01, - -5.2819e-02, -2.1511e-01, -9.8376e-02, -6.5363e-01, - 9.3586e-01, 1.0379e+00, -4.7491e-01, 1.3771e+00, - 3.4811e-01, 1.6458e+00, -2.8714e-01, -3.2084e-02, - -1.9168e+00, 6.2708e-02, 1.2228e+00, 2.4805e-01, - -3.3214e+00, 7.6252e-01, 6.7887e-01, -2.3078e+00, - 7.9494e-01, 9.3763e-01, -2.6393e-01, -3.3019e-01, - -1.1430e+00, 6.8143e-01, -1.6936e+00, 5.0938e-01, - 2.2854e-01, -4.5127e-01, 6.3886e-04, 2.0009e-01, - 9.7061e-01, 1.9199e+00, -9.3215e-01, -6.5949e-01, - -2.6938e+00, 2.8376e-01, 3.9102e-01, 1.9911e-01, - 7.7861e-01, -3.2603e-01, -3.6420e-01, -3.4816e-01, - 4.7383e-01, 3.6403e-01, -1.7137e+00, -8.0807e-01, - 1.4281e+00, -5.1182e-01, 1.5750e+00, 5.1733e-01, - 1.0996e+00, 8.5076e-01, 5.4253e-02, -5.0249e-01, - 3.3400e-01, 1.4432e+00, -1.4940e+00, 9.8811e-01, - -1.0876e+00, 9.6801e-01, 6.1199e-01, -6.7482e-01, - -8.6018e-01, -6.7555e-01, -3.9019e-01, 3.2585e+00, - 3.3328e-01, 4.4020e-01, 6.6161e-02, 1.4198e+00, - -5.7160e-01, 9.2953e-02, 2.1219e+00, -8.2412e-01, - -1.5081e+00, 4.8482e-01, -4.0900e-01, 1.5407e+00, - 2.5504e+00, -2.0030e-01, -1.2086e-01, -8.3207e-01, - 6.4657e-01, 1.6895e+00, 4.6608e-01, 1.7506e+00, - 1.5084e+00, 3.5577e-01, -1.2677e+00, 4.9242e-01, - -7.0530e-02, -1.1617e+00, -1.9616e+00, 6.8584e-01, - -2.4256e-01, 1.2177e+00, -5.0913e-01, 8.2890e-01, - 5.1893e-01, 4.6789e-01, -6.5076e-02, -4.8593e-01, - 2.4553e-01, -1.5214e+00, -1.5293e-02, -2.3693e-01, - -9.2726e-01, 6.1040e-01, 1.2202e+00, 5.0287e-01, - 3.4577e-01, 1.0492e+00, 4.9076e-01, 1.4477e+00, - 2.7206e-01, 5.9261e-01, -3.4217e-01, 6.0118e-01, - 1.5218e+00, -2.0196e+00, 1.3196e+00, -3.7228e-01, - -6.2718e-01, 1.2697e+00, 1.1102e-01, -9.8526e-01, - -1.6644e+00, -1.9469e+00, -6.8693e-01, 1.5351e+00, - 8.3146e-01, 5.0908e-01, -1.1142e+00, -5.4803e-01, - -2.4427e+00, 1.0938e-01, 1.3539e+00, -1.3717e+00, - -4.4884e-01, -2.0902e-01, -7.7258e-01, 2.4147e+00, - -8.6735e-01, -9.7576e-01, 7.8534e-01, 8.4711e-01, - 7.0130e-01, 2.6546e-01, -5.0232e-01, -1.1689e+00, - -4.0296e-01, -7.0829e-01, -2.8043e+00, 2.6795e-01, - -9.1730e-01, 1.0188e+00, 7.6544e-01, -4.6970e-01, - -1.3518e+00, -8.5537e-01, -4.3790e-01, 3.6874e-01, - -1.2578e+00, 1.4816e+00, 2.4759e+00, -1.1315e+00, - -1.8052e+00, 6.5233e-01, -1.0845e+00, -6.1564e-01, - 1.3101e+00, -4.3988e-01, -8.4167e-03, -9.2793e-01, - 3.9496e-01, 1.9285e-01, 1.6976e-01, -5.5464e-01, - 1.0142e+00, 7.3243e-01, -4.2201e-01, 2.5982e-01, - 4.6326e-01, 5.8530e-01, 7.2924e-01, -4.9691e-01, - -5.7950e-02, -1.4176e-01, 1.0483e+00, 2.5740e-01, - -5.7391e-01, 6.1557e-01, -1.2073e-02, 2.0381e-01, - 1.9566e+00, 8.5407e-01, 4.8386e-01, 4.7403e-01, - -1.7225e+00, -2.0846e+00, 1.4562e+00, -1.3607e+00, - 3.4557e-02, 1.1720e+00, 9.9995e-01, 1.3034e+00, - 3.4822e+00, -5.3608e-01, -3.4852e-01, -6.6019e-01, - 2.1527e+00, -4.1947e-01, 1.0823e+00, 1.1652e+00, - -1.4845e-01, 5.6005e-01, 9.7299e-01, -4.4649e-01, - -1.4385e-01, 4.5945e-02, -1.4481e+00, 5.0917e-01, - -1.1516e+00, 1.5270e-01, -1.3479e+00, 3.2473e-01, - 1.7103e+00, 1.5384e-01, -1.5823e-01, 9.1598e-01, - -9.9592e-01, -3.8750e-01, 7.6564e-01, -3.5067e-01, - 3.4365e-01, 2.3028e+00, 1.2591e+00, -2.5634e-01]), - size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.7889, 0.1723, 0.2587, ..., 0.4333, 0.6767, 0.9006]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 1000 -Density: 1e-05 -Time: 9.738685369491577 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '687353', '-ss', '10000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 11.203821897506714} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([2208, 3194, 8248, 3979, 1910, 9958, 4160, 5101, 1098, - 4689, 1389, 216, 9069, 9298, 185, 6867, 8825, 7295, - 248, 1982, 7519, 3151, 8106, 6490, 2709, 9743, 8136, - 7724, 1737, 8062, 4239, 6637, 9264, 8967, 1878, 4772, - 48, 8040, 9656, 4405, 9729, 9982, 2578, 4456, 4886, - 4774, 7108, 3443, 9402, 1294, 3094, 1454, 5497, 6773, - 2294, 1277, 4630, 9296, 8967, 4267, 7695, 3980, 1095, - 1221, 637, 3060, 3550, 9711, 6262, 1658, 1379, 4171, - 3084, 8609, 3496, 9519, 2372, 4850, 6099, 952, 2017, - 369, 1506, 5137, 9071, 8716, 5147, 2857, 7922, 3186, - 4695, 626, 8660, 9242, 2235, 1183, 8806, 932, 4385, - 3155, 4040, 7394, 624, 4475, 66, 9928, 4116, 3136, - 4742, 3197, 1343, 7371, 3392, 7171, 8058, 5365, 8129, - 2239, 7841, 8866, 6688, 2584, 5361, 6865, 4729, 7267, - 401, 7285, 1041, 9963, 2289, 9480, 4371, 3365, 2996, - 9948, 678, 4846, 5671, 8496, 4930, 8967, 5398, 7328, - 1157, 7297, 438, 8117, 1132, 7340, 2399, 9875, 6873, - 9915, 5662, 5933, 8864, 3423, 92, 2134, 8437, 4101, - 1420, 981, 6130, 9037, 1916, 9321, 8375, 4477, 9451, - 6272, 6752, 7213, 7364, 4384, 7944, 5471, 7834, 7570, - 7801, 9873, 6614, 9124, 7238, 2320, 3083, 5487, 9031, - 2127, 3154, 1113, 46, 8176, 1259, 6169, 6862, 2167, - 7492, 3296, 1369, 4598, 6361, 6252, 6681, 993, 2030, - 6995, 9891, 7557, 9719, 9648, 9095, 848, 320, 2282, - 7430, 8877, 5977, 535, 9089, 1066, 2895, 3887, 4816, - 1033, 7669, 549, 924, 8881, 2214, 6800, 4932, 56, - 3237, 7774, 1913, 3598, 2989, 6041, 8295, 9160, 1725, - 7971, 3212, 632, 9443, 2230, 1262, 6050, 2208, 6464, - 4919, 7751, 9812, 6889, 7381, 3729, 7186, 9396, 524, - 7639, 5642, 7733, 6997, 1074, 7017, 1313, 4325, 3205, - 879, 4606, 4689, 1779, 4208, 9113, 7675, 8760, 6771, - 3460, 9972, 4984, 7983, 7627, 2723, 5596, 4888, 7474, - 6154, 1959, 2273, 2394, 1280, 5429, 8333, 9579, 7404, - 1787, 7037, 7873, 4895, 9239, 8778, 4930, 7959, 8562, - 7379, 2295, 951, 188, 7230, 5725, 9582, 8405, 8091, - 4710, 89, 2949, 9864, 4741, 8126, 8559, 9819, 9527, - 7038, 6840, 8694, 8820, 2265, 9498, 2938, 3267, 3228, - 4991, 1683, 9595, 8278, 7615, 3457, 6839, 4010, 5664, - 4367, 8856, 4814, 9753, 38, 7270, 4811, 9984, 3648, - 9130, 3723, 1688, 5153, 3428, 5634, 2891, 5195, 9932, - 7360, 5054, 8991, 2581, 1892, 3516, 9457, 3319, 4945, - 2115, 3928, 9313, 2756, 4889, 6240, 9780, 1908, 5270, - 394, 4553, 2311, 1266, 9862, 5030, 4876, 2581, 4550, - 2190, 4429, 5809, 2268, 1610, 3905, 148, 6953, 924, - 8699, 9622, 9696, 3792, 5982, 40, 7060, 41, 9634, - 1292, 3305, 5649, 5763, 7350, 324, 1548, 2000, 2436, - 3866, 8918, 3037, 8728, 3725, 238, 8097, 151, 1414, - 5764, 9386, 6314, 9575, 6880, 2895, 8514, 2803, 1629, - 2746, 4827, 3247, 742, 658, 6224, 432, 8749, 5564, - 9242, 2793, 3426, 8446, 1503, 5804, 9863, 8350, 9314, - 8057, 9487, 7203, 394, 9341, 6125, 9084, 914, 8174, - 3692, 4067, 452, 5670, 2463, 9994, 5034, 4639, 4426, - 3028, 8923, 6235, 7044, 8944, 5047, 3643, 1068, 9271, - 429, 3446, 4374, 4501, 1404, 2721, 1836, 3923, 3380, - 1911, 4592, 6168, 2885, 8263, 1641, 3582, 5076, 2311, - 5577, 244, 939, 1882, 6202, 3802, 772, 6651, 5253, - 7301, 357, 6380, 5247, 7708, 6681, 4518, 2496, 4099, - 4066, 3418, 6699, 5400, 9234, 8546, 6682, 529, 9369, - 2987, 7509, 6754, 2055, 477, 4941, 7632, 8057, 8301, - 613, 6444, 5412, 4809, 4276, 8875, 2274, 2526, 8254, - 2388, 305, 702, 6597, 3621, 4288, 8379, 2926, 4511, - 8059, 4379, 6966, 1729, 8893, 3701, 5876, 9912, 6137, - 9395, 6718, 12, 7188, 8892, 7285, 7886, 9741, 3208, - 2802, 1030, 62, 1095, 8623, 6072, 5284, 6464, 9687, - 1094, 535, 926, 7311, 8257, 1419, 167, 6074, 1931, - 4764, 7192, 9274, 7634, 2483, 6012, 3899, 5649, 9691, - 7452, 5506, 968, 3421, 6043, 1105, 6492, 6614, 7659, - 7369, 9932, 2401, 7707, 8472, 2177, 1220, 8621, 8570, - 9501, 7596, 231, 1903, 2848, 6692, 2362, 7807, 6347, - 4233, 4760, 6451, 4797, 5227, 4788, 8565, 6443, 3220, - 2838, 8496, 3346, 2759, 3169, 2689, 875, 3230, 7031, - 1848, 4346, 4930, 5156, 4848, 8104, 7338, 5234, 3697, - 310, 3377, 5946, 3672, 6047, 6909, 2990, 2807, 1086, - 4119, 2587, 7610, 1464, 5603, 249, 992, 9947, 7245, - 2199, 9486, 142, 9477, 5618, 1048, 402, 536, 8859, - 5992, 8496, 5394, 8118, 5429, 8950, 1563, 8214, 8794, - 5860, 9039, 313, 7718, 288, 5871, 1599, 8984, 7927, - 4119, 2843, 3593, 6356, 8538, 3994, 8591, 1186, 6555, - 1721, 8324, 4031, 253, 1749, 7806, 4804, 9764, 8893, - 3026, 5156, 5059, 7467, 7763, 7906, 3723, 3782, 7972, - 1420, 645, 5500, 6047, 9574, 1332, 9156, 1999, 5586, - 3109, 4760, 8008, 6780, 1642, 8528, 3595, 3025, 5519, - 7529, 4889, 1908, 9879, 9432, 5864, 7831, 3326, 1104, - 2169, 6553, 4204, 4348, 2019, 5991, 1695, 1177, 1073, - 859, 9148, 6821, 4526, 2493, 9206, 557, 5595, 1189, - 4508, 2706, 9501, 3588, 471, 381, 6819, 8084, 1817, - 8147, 2644, 8640, 4697, 1737, 8092, 5516, 6529, 3287, - 1645, 8887, 2460, 4183, 5417, 5111, 5017, 8695, 8024, - 5285, 6603, 985, 597, 8032, 4668, 6871, 3692, 2839, - 4231, 561, 8660, 5006, 9, 916, 1060, 8930, 4257, - 7579, 4817, 5474, 4207, 2757, 3510, 1941, 8636, 1179, - 2151, 8095, 3235, 766, 1260, 6339, 6277, 488, 2348, - 1210, 8048, 5679, 8626, 1268, 8069, 5943, 9788, 5634, - 1091, 3379, 3795, 1476, 1654, 6181, 6630, 3605, 7612, - 2990, 2897, 5067, 842, 878, 935, 309, 2204, 9485, - 5045, 5595, 5585, 1297, 5235, 9568, 4817, 6971, 5792, - 9259, 3608, 5416, 9064, 7565, 7555, 7624, 3935, 8727, - 74, 5618, 9130, 7627, 9097, 6915, 9562, 1267, 7390, - 1677, 80, 6217, 1332, 5889, 8151, 9367, 2660, 1184, - 4705, 8479, 7752, 540, 7355, 1929, 229, 1169, 1692, - 2305, 1898, 989, 124, 7008, 9895, 9600, 7029, 8002, - 5903, 9963, 3552, 209, 2599, 2861, 2481, 4248, 9754, - 5104, 6295, 6099, 6834, 4208, 9819, 1575, 1241, 3074, - 4555, 9011, 4580, 9323, 3594, 2345, 8720, 7479, 9072, - 1172, 4184, 1297, 429, 2488, 4842, 6597, 6755, 1670, - 5836, 2189, 2447, 8312, 4063, 7930, 1254, 9770, 7266, - 7271, 5888, 391, 403, 6522, 89, 5853, 2941, 5692, - 682, 6138, 8149, 9585, 3403, 9265, 4158, 5925, 8349, - 2805, 8441, 2969, 7641, 9466, 4587, 9433, 1576, 7770, - 1063, 5949, 5235, 3335, 9027, 2720, 4728, 4487, 1732, - 3427, 6100, 8523, 8786, 2976, 253, 1176, 8327, 4130, - 8220]), - values=tensor([-1.8869e+00, -1.0874e+00, -9.8337e-01, 4.5128e-01, - -1.3899e-01, -1.4085e+00, 1.8319e+00, -2.9824e-01, - -2.9007e-01, -1.0830e+00, 1.8296e+00, 7.0347e-01, - 8.0532e-01, -1.3704e+00, 1.0011e+00, -1.5591e+00, - -2.2775e-01, -1.7077e-01, 4.9351e-01, 1.6862e+00, - -3.5932e-01, 5.9849e-01, 6.8732e-02, 1.4060e+00, - 2.0382e+00, -5.8500e-01, -4.1688e-02, 8.3254e-01, - 5.0043e-01, -2.2479e+00, 9.0466e-01, 9.0263e-02, - 5.5254e-01, 1.3914e+00, 9.7754e-01, -1.0533e+00, - 1.5358e+00, -1.2251e+00, 3.2124e-01, -4.0089e-01, - 5.1726e-01, 3.6379e-01, -8.9350e-01, -6.8440e-01, - 9.8756e-01, -1.7394e+00, -4.8734e-01, -7.1909e-01, - 1.8058e+00, 6.8939e-01, -4.5703e-01, 3.6393e-01, - 5.7166e-01, -2.5136e-01, -1.3726e-01, -4.9980e-01, - -7.9045e-01, -1.4746e+00, -8.4182e-01, -5.0295e-01, - -1.4258e+00, 3.4341e-01, 9.4227e-01, -1.5189e+00, - -8.6567e-01, -4.0081e-01, -3.1708e-01, -2.6742e-01, - -1.2195e+00, -5.2117e-01, 1.5928e+00, -3.6492e-01, - -9.9511e-01, -1.7899e-01, -1.9512e+00, -6.1893e-01, - -3.7459e-01, -3.1730e-01, -9.9960e-01, 4.7970e-01, - 9.6973e-01, -5.8714e-01, 6.6231e-01, 5.4592e-01, - -1.4168e-01, 1.6571e+00, -5.7778e-03, -1.0538e-01, - 7.2606e-01, -7.1253e-01, -9.9077e-02, 8.2629e-02, - 1.7111e+00, 1.6896e-03, 8.7316e-01, -2.3668e-01, - -3.1060e-01, 1.9099e+00, 5.9385e-01, 2.0343e+00, - 6.1842e-01, -1.0033e+00, -1.6711e+00, 2.0958e-01, - 1.0743e+00, 8.5540e-01, 1.9295e-01, -1.7138e-01, - 5.9866e-01, -9.9095e-01, -7.8713e-01, 5.9338e-01, - 1.2993e-01, -9.9809e-02, -5.7388e-01, 9.9489e-01, - 7.9334e-01, -8.8623e-01, -1.7928e-01, 9.7753e-01, - 1.3457e+00, -1.3771e+00, -2.9285e-01, -1.9107e+00, - 1.0635e+00, -1.6208e+00, -5.5537e-01, -5.2173e-01, - -2.5977e-01, -1.0624e-01, 2.1697e+00, -4.4851e-01, - -1.9869e+00, 3.0074e-01, -9.5688e-01, -2.5425e-01, - -1.0223e+00, -6.4030e-01, 8.0508e-01, -9.5571e-01, - -8.8600e-01, 8.2238e-02, -5.5926e-01, 4.1819e-02, - 1.2546e+00, -2.3587e+00, 5.2453e-01, 3.4421e-01, - 6.8416e-04, 3.3435e-01, 6.6938e-01, 1.1108e+00, - -2.2210e+00, -2.0454e-01, 1.1940e+00, 1.0849e+00, - -5.4913e-01, -3.0016e-01, -1.9306e+00, -5.7492e-01, - 8.8786e-01, 1.6483e+00, -4.2770e-01, 1.1728e+00, - -2.5338e-01, -7.6020e-01, 1.5390e+00, -4.4935e-01, - -1.8934e+00, -5.4604e-01, -6.4069e-01, -9.0203e-01, - 1.4465e-01, -2.1602e+00, -2.1651e-01, 1.9518e+00, - 2.2580e-01, -1.1688e+00, 7.5173e-01, 7.6988e-01, - -6.1221e-01, 6.5760e-01, -9.1026e-02, -1.4777e+00, - -2.2598e+00, 4.0915e-01, 1.1165e+00, 5.3990e-01, - 1.1370e+00, 8.6317e-01, 3.2800e-01, 6.5464e-01, - -4.5658e-01, -1.4765e+00, -1.7465e+00, 7.9917e-01, - -1.3987e+00, 1.0264e+00, 1.2331e+00, -1.7279e+00, - 9.9542e-01, 2.0683e+00, 1.2858e+00, 1.0251e+00, - 9.9242e-01, 2.2496e+00, 1.6605e-01, 1.3585e+00, - 1.5144e-01, -6.5066e-01, 1.4772e+00, 6.7857e-01, - 8.1605e-01, -7.1698e-01, -5.6946e-01, 3.5651e-01, - -2.8203e-01, 1.2853e-01, -4.0745e-01, 1.2820e+00, - -7.7646e-01, 2.2325e+00, 2.4540e-01, 9.9064e-01, - -6.6128e-01, 6.4041e-01, 1.7619e+00, 1.1263e+00, - 4.6979e-01, -5.1876e-01, 4.4834e-01, -1.1468e+00, - 6.1698e-01, -1.7917e-01, -8.0010e-01, 2.0869e-01, - -7.8554e-01, -5.5187e-01, -1.4288e+00, -1.2413e+00, - -8.0342e-01, -2.6986e-01, -1.7918e+00, 3.8550e-01, - -1.0102e+00, 1.0573e+00, -4.5470e-01, -4.6081e-01, - 2.0754e+00, -1.0446e+00, 8.4764e-01, -2.9433e-01, - -6.2001e-02, -9.8113e-01, -4.1164e-01, 8.0594e-01, - 1.3222e+00, -1.6033e-01, -4.3999e-02, -2.8184e-01, - -3.4838e-01, 1.6604e+00, 9.6669e-01, 4.0602e-01, - -1.0754e+00, -7.9273e-01, -1.8644e-01, 2.1003e+00, - -5.3542e-01, 1.0142e-01, -8.1503e-01, 9.2777e-01, - 2.1476e+00, 1.0898e+00, 3.4383e-01, -1.1546e+00, - -1.4931e+00, -6.1567e-01, -1.5962e-03, -3.8548e-01, - -1.1498e+00, 7.8691e-01, 1.8396e-01, -2.5873e-01, - -3.5983e-01, -3.1432e-01, 4.5913e-01, 1.2846e+00, - 1.1608e+00, -6.2020e-01, 5.3271e-01, -5.1930e-01, - -1.8613e-01, 1.3282e+00, -8.6994e-02, -3.8737e-01, - -1.4276e+00, -2.0487e-01, -1.1931e-01, 6.3543e-01, - -4.6946e-01, 1.4213e-01, -1.8783e-01, -1.4187e+00, - -8.5058e-01, 2.0297e-01, -7.6660e-01, -3.3614e-01, - -4.4619e-01, 1.4337e+00, -5.2976e-01, 6.4143e-01, - -1.2785e+00, -1.3595e+00, 7.2314e-01, 2.4877e-01, - -4.7373e-01, 6.6101e-01, 4.7286e-02, 8.5834e-01, - 1.6308e+00, 3.8030e-01, -6.3701e-01, 1.4711e+00, - 6.5158e-01, 3.2146e-01, 1.1041e+00, 1.2783e-01, - 3.7524e-01, 1.9136e+00, 1.5520e+00, 1.4770e+00, - -7.2528e-01, 1.6218e+00, 1.3913e+00, -4.6857e-01, - 1.8802e+00, -1.5092e+00, 1.2351e+00, 7.6921e-01, - -1.0053e+00, 6.8689e-01, -7.5955e-04, -2.0495e-01, - 5.9023e-01, -1.6810e+00, 2.7183e-01, -4.6520e-01, - 1.0066e+00, 8.5910e-01, 1.7604e+00, -1.9178e-01, - 2.4071e-01, 2.9389e-01, 3.6538e-01, -5.6589e-01, - 1.3161e+00, -3.2850e-01, 3.9506e-01, 6.7031e-01, - 3.6241e-01, -3.2368e-01, -7.4567e-01, -1.3325e+00, - -1.4737e+00, 1.2499e-01, 4.8335e-01, 1.0624e+00, - -6.2145e-02, -1.3911e-01, 5.3350e-02, 1.0612e+00, - 1.2390e+00, 6.6573e-01, -4.9155e-01, -6.2863e-01, - -8.2469e-01, -1.3683e-01, 6.3889e-01, 1.0005e+00, - -1.3996e-01, 2.9769e-01, 4.5611e-01, -9.2999e-01, - -1.8859e+00, 1.2055e+00, -6.2605e-01, 3.3277e-01, - -8.1502e-02, 1.1438e+00, 7.8002e-01, -1.5138e+00, - -2.3224e+00, -5.7821e-01, 1.4663e+00, 8.5344e-01, - -1.8175e+00, -3.3000e-01, -7.3008e-01, -1.5576e+00, - -1.4711e+00, 3.8730e-01, 6.9583e-01, -1.7366e+00, - -4.3335e-01, -6.7175e-02, -2.7834e-01, 4.9017e-02, - -1.5258e-01, -1.1023e+00, -5.4763e-01, 2.5046e+00, - 4.6364e-01, -6.2363e-02, 7.6653e-01, -2.2947e-01, - -9.0390e-01, -1.1563e+00, -8.5724e-01, -3.9168e-01, - -2.1033e-01, 1.8183e+00, -9.1074e-01, 3.7365e-01, - 4.3304e-01, 7.1439e-01, 5.7004e-01, -1.3989e+00, - 7.9014e-01, 1.3949e+00, -8.9148e-01, -3.4346e-01, - -1.3914e+00, 6.5261e-01, 2.9167e-01, 3.6922e-01, - -5.9333e-01, -2.7739e+00, 1.1133e+00, -1.2231e+00, - 5.4991e-01, 6.4870e-01, -1.4917e+00, 1.0496e+00, - 1.0591e+00, -5.5541e-01, -2.3619e-01, -9.3360e-01, - 6.1142e-01, 1.0655e+00, 6.0153e-02, -3.6276e-01, - -3.4025e-01, 1.0805e+00, -1.1760e+00, 6.7067e-01, - -8.2364e-02, -8.9822e-02, -1.6498e+00, -2.9596e+00, - 9.7391e-02, 2.0632e-01, -1.0856e+00, -1.0775e+00, - -1.3536e+00, -1.2602e+00, 5.3656e-01, -8.0887e-01, - 7.3527e-01, -1.7824e+00, -2.0984e+00, -1.0920e+00, - 1.2582e-01, -1.0993e+00, 1.0148e+00, -8.2411e-02, - 1.1943e+00, -3.4846e-01, -3.5307e-01, 7.5497e-02, - 2.2002e+00, -4.7438e-01, -5.3612e-01, 1.5412e-01, - -5.9839e-01, 9.2565e-01, 9.6331e-02, -7.9433e-01, - -3.3006e-01, 1.1340e-01, -8.4117e-01, -3.2478e-01, - 1.1603e+00, 1.8328e+00, 2.1091e-01, -7.8107e-01, - 4.2426e-01, -1.5542e-01, -1.0653e-01, -1.2663e+00, - -9.7847e-01, -3.8561e-01, -1.3099e+00, -9.3042e-01, - -1.5147e+00, -7.6837e-01, 6.2618e-01, 1.1598e+00, - -1.6879e+00, -4.5018e-01, 3.4336e-01, -5.6076e-01, - -1.3405e+00, -3.4903e-02, 2.3190e-01, 1.1081e+00, - 5.1637e-01, 1.0407e+00, 6.3189e-01, -1.3375e+00, - -2.2021e+00, -2.0489e+00, 7.1092e-01, -1.4466e+00, - 8.4133e-01, 2.5221e-01, 2.3478e-01, -2.1980e+00, - 1.3469e+00, 1.6152e-01, -3.0549e-01, 7.6936e-01, - 4.7731e-01, -7.0236e-01, -5.4445e-01, -2.9199e-01, - -1.6005e+00, -1.1530e-01, -6.7962e-01, -2.5965e+00, - 1.4621e+00, 2.1549e+00, -2.6996e-01, -1.0004e+00, - 2.3301e-01, -8.0532e-01, -4.2155e-01, -5.5519e-01, - 2.9395e+00, -4.4709e-01, 5.9063e-01, -6.0565e-01, - 1.5926e+00, -3.5547e-01, 1.1764e+00, 1.0966e+00, - 3.6933e-01, -1.9844e+00, -5.7384e-01, 2.7573e-01, - -5.1623e-01, 2.8516e-01, -8.6919e-01, -1.5053e+00, - -1.9598e+00, 8.4913e-01, -7.1647e-01, 5.2092e-01, - 6.3545e-01, 1.5906e-01, -1.6677e+00, -1.1006e-01, - 6.5367e-01, -1.8133e-01, -1.5784e+00, -5.6024e-01, - 4.6707e-01, 4.2867e-01, -4.7990e-01, 1.3229e+00, - 9.7518e-01, -5.6057e-01, 3.3221e-01, -4.9842e-01, - 4.4850e-01, 6.9743e-01, -9.7515e-01, 1.2833e+00, - -1.0105e+00, 6.7100e-01, -1.3963e-01, -3.8938e-01, - 1.3465e+00, -1.7468e-01, -9.3358e-01, -7.5945e-01, - -8.1102e-01, -3.4454e-01, 1.3065e+00, -4.7449e-01, - 4.9001e-01, 1.0820e+00, 1.7612e-02, 5.2664e-01, - -1.1578e+00, -4.8489e-01, 3.4374e-01, -3.8269e-01, - -2.6050e+00, 2.1765e-01, 9.0948e-01, -9.8154e-01, - 7.4263e-01, -4.0510e-01, -8.0862e-01, 9.0853e-01, - 6.8509e-01, 2.7986e+00, -2.7111e+00, -1.4642e+00, - -4.9731e-01, -2.3791e-01, 1.7415e+00, -5.1687e-01, - 8.4695e-01, -3.5342e-01, -1.2315e+00, -9.6857e-01, - 6.2956e-01, 2.4499e-02, -5.1362e-01, -3.9584e-01, - -4.0065e-01, -1.3969e+00, -1.1367e+00, -7.6019e-01, - 3.2031e-01, 1.5243e-01, 4.2041e-01, -7.0057e-01, - 4.8590e-01, -1.2584e+00, 2.8135e-01, -1.8969e+00, - -1.7345e+00, 1.0226e+00, 9.2681e-02, -5.7239e-01, - -1.4450e-01, -4.9126e-01, 2.9241e-02, 1.2347e+00, - -6.9204e-02, -7.3272e-01, 3.7425e-01, 9.2322e-01, - -1.4598e+00, 2.4180e-01, -1.1695e+00, -1.0944e-01, - -4.8185e-01, -1.7684e+00, -5.0640e-01, 1.1126e+00, - -6.8184e-01, 1.5294e-01, 7.8498e-02, 2.2730e-01, - 1.5581e+00, -1.5763e+00, -1.0901e+00, -8.1852e-01, - -1.1738e-01, -4.6564e-01, 5.0686e-01, 1.6145e+00, - -1.6309e+00, -1.3573e+00, 7.2946e-01, -1.5365e-01, - -3.6824e-01, -1.9309e+00, -3.8211e-02, 3.3727e-03, - 6.8167e-01, -1.4194e-01, -3.5695e-01, -8.6402e-01, - -4.3686e-01, 7.0320e-01, 6.4675e-01, 2.0179e+00, - 7.5527e-01, -1.4920e+00, -1.5170e+00, 9.8222e-01, - -1.6156e+00, -9.2911e-02, 2.3418e-01, -9.6111e-01, - -1.6887e+00, 9.1975e-01, -5.2985e-01, 4.5644e-01, - 6.4104e-01, -4.1656e-01, 6.6246e-01, 2.2738e-01, - 2.0388e+00, 1.3300e+00, 1.7837e+00, 1.5238e+00, - 9.5890e-01, 3.8693e-01, 3.4254e-01, 1.3093e+00, - -7.9969e-01, -5.5454e-01, -1.3979e+00, 1.4255e+00, - 3.2702e-01, -5.8318e-01, -2.7239e-01, -5.3564e-01, - 9.5936e-01, -2.7681e-01, -4.0661e-02, 6.0723e-01, - 5.3524e-01, 2.6037e-01, -1.1927e+00, -3.0224e-02, - -3.8458e-01, 1.1226e+00, -3.5571e-01, 2.2183e-01, - 1.8577e+00, -3.5936e-01, 1.0863e+00, 8.4736e-01, - -4.8762e-01, -3.4355e-01, -6.8260e-01, 8.7783e-02, - 2.4012e-01, -4.8950e-01, -7.6359e-02, 8.9875e-01, - -1.2251e+00, 1.0629e-01, -4.5080e-01, 4.4906e-01, - -5.6769e-01, -8.1678e-01, 1.4544e-02, 9.6868e-02, - 3.0829e-01, 2.9944e-01, 4.2881e-02, -1.4693e+00, - 1.0233e+00, 2.1733e-01, -9.8022e-01, -4.2883e-01, - -5.4886e-01, 6.4325e-01, 3.2618e-01, 1.9940e+00, - 4.9949e-01, 5.1399e-01, -7.7528e-01, 7.2480e-02, - -8.1031e-01, 6.1383e-01, 4.8928e-01, 5.4020e-01, - 2.5247e-01, 1.1509e+00, -9.9766e-01, 1.5535e+00, - 5.2454e-01, -1.9794e+00, 2.5560e-01, 2.1126e-01, - 2.3100e-01, 1.2699e+00, 1.8299e+00, 7.8469e-01, - 4.1626e-01, -4.4572e-01, 1.3625e+00, -4.8276e-01, - -3.8035e-01, -1.5471e+00, -9.5400e-01, 3.3230e-01, - 1.0282e+00, 1.3163e+00, -6.1885e-01, 7.8040e-01, - 1.1954e+00, -2.0367e+00, 9.3689e-01, -1.4643e+00, - -2.1503e+00, 8.6546e-01, -1.2933e+00, -1.9580e+00, - 6.4062e-03, -1.1038e+00, 1.4813e+00, 3.1369e-01, - 4.2972e-01, 2.9188e-01, 4.9601e-01, -2.3210e-01, - -1.4342e+00, 1.3140e-01, 1.1390e+00, 1.0035e+00, - 7.8398e-02, 9.3424e-01, 1.7692e+00, -5.0649e-01, - -7.5531e-01, -4.9314e-01, 1.6721e+00, -9.9940e-01, - -3.4393e-01, 2.8409e+00, -8.2738e-01, -6.4175e-01, - -4.3816e-01, -6.1304e-01, -1.5490e-01, -4.4373e-01, - 3.9740e-01, -1.4586e+00, 7.4474e-01, 5.3479e-01, - 4.3232e-01, 1.2882e+00, 2.8760e-01, 1.2113e-01, - 9.3176e-01, 8.7078e-01, -1.9289e-01, -1.8236e+00, - -1.1373e+00, 1.4489e+00, 1.5214e-01, 9.9766e-01, - -3.8627e-01, 9.6097e-01, 7.4233e-01, -2.2032e+00, - -2.1896e+00, 2.5253e+00, -2.3891e-01, 3.1124e-01, - -1.0993e+00, -4.6040e-01, -1.5348e+00, 4.4286e-01, - -3.9695e-01, 3.7830e-01, -7.5658e-01, 1.0850e+00, - -9.1474e-01, -2.6790e-01, 1.3358e+00, -1.0221e+00, - 2.2408e+00, -1.2558e-01, 4.0623e-01, 6.0478e-01, - -1.3043e+00, -6.8057e-01, -6.0028e-01, 1.9285e+00, - 3.9517e-02, 2.7440e-01, 9.9702e-02, 1.0522e-01, - -7.6941e-01, 2.9307e-01, 7.2625e-01, -1.5822e+00, - -8.8656e-03, -1.4193e+00, -9.9712e-01, 7.8685e-01, - -9.7000e-01, 2.0863e-01, 8.5271e-01, -1.7785e-01, - -8.4912e-01, 1.5973e+00, 1.9490e-01, 1.8397e+00, - -2.3108e-01, -9.8951e-01, 1.0640e+00, -1.3490e-01, - -2.7799e-01, -1.0887e-01, 1.7074e+00, 5.5751e-01, - -6.0756e-01, 1.2098e+00, 1.3412e+00, -5.7233e-01, - 1.2816e+00, 1.2262e-02, 5.4063e-01, 4.6592e-01, - -2.5016e+00, -9.9476e-01, 6.4856e-01, -8.4479e-02, - 8.9676e-02, 1.3805e+00, -1.5003e+00, -2.3972e-01, - -1.7953e+00, 2.7948e-01, 7.0458e-01, 8.8988e-02, - 3.7965e-01, 2.4350e-01, 9.6890e-01, 1.6177e-01, - 1.4253e+00, -1.3157e-01, -1.0490e+00, 4.3436e-01, - -5.9698e-01, 1.2268e+00, 6.8289e-01, 1.6553e+00, - 8.2571e-01, 1.1059e-01, -1.6574e+00, -1.4684e-01, - 2.3540e-01, 7.2439e-02, 7.7001e-01, 1.0229e+00, - 1.1218e+00, -2.8606e-01, 1.4772e+00, 3.6853e-01, - -7.6550e-01, -7.7720e-01, 1.6147e+00, 1.3509e+00, - -9.9500e-01, -4.9639e-01, -2.1524e-02, -8.0876e-01, - 4.2998e-01, -3.5938e-01, 1.2479e+00, 1.1463e+00, - 5.5722e-01, 4.4375e-01, 1.8282e+00, -9.1236e-01, - -2.5088e-01, 2.4748e-01, 2.0442e+00, 1.3857e+00, - -2.6958e-01, 6.9474e-01, -9.7977e-01, -7.3137e-01, - 3.8544e-01, 9.4488e-01, 5.0242e-02, 5.9173e-01, - -1.0692e+00, 6.8817e-01, -8.5924e-01, -5.3146e-01, - 1.8126e+00, -3.4514e-01, 1.5046e+00, -1.2570e+00, - -8.0226e-01, 5.0428e-01, 8.7169e-02, 1.3796e+00, - -1.3936e+00, 6.9121e-01, 1.0361e+00, 9.6047e-01, - -3.3124e-01, 5.8172e-01, 2.4301e+00, -3.0787e-01]), - size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.8356, 0.5566, 0.3874, ..., 0.4735, 0.4173, 0.1842]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 1000 -Density: 1e-05 -Time: 11.203821897506714 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([2208, 3194, 8248, 3979, 1910, 9958, 4160, 5101, 1098, - 4689, 1389, 216, 9069, 9298, 185, 6867, 8825, 7295, - 248, 1982, 7519, 3151, 8106, 6490, 2709, 9743, 8136, - 7724, 1737, 8062, 4239, 6637, 9264, 8967, 1878, 4772, - 48, 8040, 9656, 4405, 9729, 9982, 2578, 4456, 4886, - 4774, 7108, 3443, 9402, 1294, 3094, 1454, 5497, 6773, - 2294, 1277, 4630, 9296, 8967, 4267, 7695, 3980, 1095, - 1221, 637, 3060, 3550, 9711, 6262, 1658, 1379, 4171, - 3084, 8609, 3496, 9519, 2372, 4850, 6099, 952, 2017, - 369, 1506, 5137, 9071, 8716, 5147, 2857, 7922, 3186, - 4695, 626, 8660, 9242, 2235, 1183, 8806, 932, 4385, - 3155, 4040, 7394, 624, 4475, 66, 9928, 4116, 3136, - 4742, 3197, 1343, 7371, 3392, 7171, 8058, 5365, 8129, - 2239, 7841, 8866, 6688, 2584, 5361, 6865, 4729, 7267, - 401, 7285, 1041, 9963, 2289, 9480, 4371, 3365, 2996, - 9948, 678, 4846, 5671, 8496, 4930, 8967, 5398, 7328, - 1157, 7297, 438, 8117, 1132, 7340, 2399, 9875, 6873, - 9915, 5662, 5933, 8864, 3423, 92, 2134, 8437, 4101, - 1420, 981, 6130, 9037, 1916, 9321, 8375, 4477, 9451, - 6272, 6752, 7213, 7364, 4384, 7944, 5471, 7834, 7570, - 7801, 9873, 6614, 9124, 7238, 2320, 3083, 5487, 9031, - 2127, 3154, 1113, 46, 8176, 1259, 6169, 6862, 2167, - 7492, 3296, 1369, 4598, 6361, 6252, 6681, 993, 2030, - 6995, 9891, 7557, 9719, 9648, 9095, 848, 320, 2282, - 7430, 8877, 5977, 535, 9089, 1066, 2895, 3887, 4816, - 1033, 7669, 549, 924, 8881, 2214, 6800, 4932, 56, - 3237, 7774, 1913, 3598, 2989, 6041, 8295, 9160, 1725, - 7971, 3212, 632, 9443, 2230, 1262, 6050, 2208, 6464, - 4919, 7751, 9812, 6889, 7381, 3729, 7186, 9396, 524, - 7639, 5642, 7733, 6997, 1074, 7017, 1313, 4325, 3205, - 879, 4606, 4689, 1779, 4208, 9113, 7675, 8760, 6771, - 3460, 9972, 4984, 7983, 7627, 2723, 5596, 4888, 7474, - 6154, 1959, 2273, 2394, 1280, 5429, 8333, 9579, 7404, - 1787, 7037, 7873, 4895, 9239, 8778, 4930, 7959, 8562, - 7379, 2295, 951, 188, 7230, 5725, 9582, 8405, 8091, - 4710, 89, 2949, 9864, 4741, 8126, 8559, 9819, 9527, - 7038, 6840, 8694, 8820, 2265, 9498, 2938, 3267, 3228, - 4991, 1683, 9595, 8278, 7615, 3457, 6839, 4010, 5664, - 4367, 8856, 4814, 9753, 38, 7270, 4811, 9984, 3648, - 9130, 3723, 1688, 5153, 3428, 5634, 2891, 5195, 9932, - 7360, 5054, 8991, 2581, 1892, 3516, 9457, 3319, 4945, - 2115, 3928, 9313, 2756, 4889, 6240, 9780, 1908, 5270, - 394, 4553, 2311, 1266, 9862, 5030, 4876, 2581, 4550, - 2190, 4429, 5809, 2268, 1610, 3905, 148, 6953, 924, - 8699, 9622, 9696, 3792, 5982, 40, 7060, 41, 9634, - 1292, 3305, 5649, 5763, 7350, 324, 1548, 2000, 2436, - 3866, 8918, 3037, 8728, 3725, 238, 8097, 151, 1414, - 5764, 9386, 6314, 9575, 6880, 2895, 8514, 2803, 1629, - 2746, 4827, 3247, 742, 658, 6224, 432, 8749, 5564, - 9242, 2793, 3426, 8446, 1503, 5804, 9863, 8350, 9314, - 8057, 9487, 7203, 394, 9341, 6125, 9084, 914, 8174, - 3692, 4067, 452, 5670, 2463, 9994, 5034, 4639, 4426, - 3028, 8923, 6235, 7044, 8944, 5047, 3643, 1068, 9271, - 429, 3446, 4374, 4501, 1404, 2721, 1836, 3923, 3380, - 1911, 4592, 6168, 2885, 8263, 1641, 3582, 5076, 2311, - 5577, 244, 939, 1882, 6202, 3802, 772, 6651, 5253, - 7301, 357, 6380, 5247, 7708, 6681, 4518, 2496, 4099, - 4066, 3418, 6699, 5400, 9234, 8546, 6682, 529, 9369, - 2987, 7509, 6754, 2055, 477, 4941, 7632, 8057, 8301, - 613, 6444, 5412, 4809, 4276, 8875, 2274, 2526, 8254, - 2388, 305, 702, 6597, 3621, 4288, 8379, 2926, 4511, - 8059, 4379, 6966, 1729, 8893, 3701, 5876, 9912, 6137, - 9395, 6718, 12, 7188, 8892, 7285, 7886, 9741, 3208, - 2802, 1030, 62, 1095, 8623, 6072, 5284, 6464, 9687, - 1094, 535, 926, 7311, 8257, 1419, 167, 6074, 1931, - 4764, 7192, 9274, 7634, 2483, 6012, 3899, 5649, 9691, - 7452, 5506, 968, 3421, 6043, 1105, 6492, 6614, 7659, - 7369, 9932, 2401, 7707, 8472, 2177, 1220, 8621, 8570, - 9501, 7596, 231, 1903, 2848, 6692, 2362, 7807, 6347, - 4233, 4760, 6451, 4797, 5227, 4788, 8565, 6443, 3220, - 2838, 8496, 3346, 2759, 3169, 2689, 875, 3230, 7031, - 1848, 4346, 4930, 5156, 4848, 8104, 7338, 5234, 3697, - 310, 3377, 5946, 3672, 6047, 6909, 2990, 2807, 1086, - 4119, 2587, 7610, 1464, 5603, 249, 992, 9947, 7245, - 2199, 9486, 142, 9477, 5618, 1048, 402, 536, 8859, - 5992, 8496, 5394, 8118, 5429, 8950, 1563, 8214, 8794, - 5860, 9039, 313, 7718, 288, 5871, 1599, 8984, 7927, - 4119, 2843, 3593, 6356, 8538, 3994, 8591, 1186, 6555, - 1721, 8324, 4031, 253, 1749, 7806, 4804, 9764, 8893, - 3026, 5156, 5059, 7467, 7763, 7906, 3723, 3782, 7972, - 1420, 645, 5500, 6047, 9574, 1332, 9156, 1999, 5586, - 3109, 4760, 8008, 6780, 1642, 8528, 3595, 3025, 5519, - 7529, 4889, 1908, 9879, 9432, 5864, 7831, 3326, 1104, - 2169, 6553, 4204, 4348, 2019, 5991, 1695, 1177, 1073, - 859, 9148, 6821, 4526, 2493, 9206, 557, 5595, 1189, - 4508, 2706, 9501, 3588, 471, 381, 6819, 8084, 1817, - 8147, 2644, 8640, 4697, 1737, 8092, 5516, 6529, 3287, - 1645, 8887, 2460, 4183, 5417, 5111, 5017, 8695, 8024, - 5285, 6603, 985, 597, 8032, 4668, 6871, 3692, 2839, - 4231, 561, 8660, 5006, 9, 916, 1060, 8930, 4257, - 7579, 4817, 5474, 4207, 2757, 3510, 1941, 8636, 1179, - 2151, 8095, 3235, 766, 1260, 6339, 6277, 488, 2348, - 1210, 8048, 5679, 8626, 1268, 8069, 5943, 9788, 5634, - 1091, 3379, 3795, 1476, 1654, 6181, 6630, 3605, 7612, - 2990, 2897, 5067, 842, 878, 935, 309, 2204, 9485, - 5045, 5595, 5585, 1297, 5235, 9568, 4817, 6971, 5792, - 9259, 3608, 5416, 9064, 7565, 7555, 7624, 3935, 8727, - 74, 5618, 9130, 7627, 9097, 6915, 9562, 1267, 7390, - 1677, 80, 6217, 1332, 5889, 8151, 9367, 2660, 1184, - 4705, 8479, 7752, 540, 7355, 1929, 229, 1169, 1692, - 2305, 1898, 989, 124, 7008, 9895, 9600, 7029, 8002, - 5903, 9963, 3552, 209, 2599, 2861, 2481, 4248, 9754, - 5104, 6295, 6099, 6834, 4208, 9819, 1575, 1241, 3074, - 4555, 9011, 4580, 9323, 3594, 2345, 8720, 7479, 9072, - 1172, 4184, 1297, 429, 2488, 4842, 6597, 6755, 1670, - 5836, 2189, 2447, 8312, 4063, 7930, 1254, 9770, 7266, - 7271, 5888, 391, 403, 6522, 89, 5853, 2941, 5692, - 682, 6138, 8149, 9585, 3403, 9265, 4158, 5925, 8349, - 2805, 8441, 2969, 7641, 9466, 4587, 9433, 1576, 7770, - 1063, 5949, 5235, 3335, 9027, 2720, 4728, 4487, 1732, - 3427, 6100, 8523, 8786, 2976, 253, 1176, 8327, 4130, - 8220]), - values=tensor([-1.8869e+00, -1.0874e+00, -9.8337e-01, 4.5128e-01, - -1.3899e-01, -1.4085e+00, 1.8319e+00, -2.9824e-01, - -2.9007e-01, -1.0830e+00, 1.8296e+00, 7.0347e-01, - 8.0532e-01, -1.3704e+00, 1.0011e+00, -1.5591e+00, - -2.2775e-01, -1.7077e-01, 4.9351e-01, 1.6862e+00, - -3.5932e-01, 5.9849e-01, 6.8732e-02, 1.4060e+00, - 2.0382e+00, -5.8500e-01, -4.1688e-02, 8.3254e-01, - 5.0043e-01, -2.2479e+00, 9.0466e-01, 9.0263e-02, - 5.5254e-01, 1.3914e+00, 9.7754e-01, -1.0533e+00, - 1.5358e+00, -1.2251e+00, 3.2124e-01, -4.0089e-01, - 5.1726e-01, 3.6379e-01, -8.9350e-01, -6.8440e-01, - 9.8756e-01, -1.7394e+00, -4.8734e-01, -7.1909e-01, - 1.8058e+00, 6.8939e-01, -4.5703e-01, 3.6393e-01, - 5.7166e-01, -2.5136e-01, -1.3726e-01, -4.9980e-01, - -7.9045e-01, -1.4746e+00, -8.4182e-01, -5.0295e-01, - -1.4258e+00, 3.4341e-01, 9.4227e-01, -1.5189e+00, - -8.6567e-01, -4.0081e-01, -3.1708e-01, -2.6742e-01, - -1.2195e+00, -5.2117e-01, 1.5928e+00, -3.6492e-01, - -9.9511e-01, -1.7899e-01, -1.9512e+00, -6.1893e-01, - -3.7459e-01, -3.1730e-01, -9.9960e-01, 4.7970e-01, - 9.6973e-01, -5.8714e-01, 6.6231e-01, 5.4592e-01, - -1.4168e-01, 1.6571e+00, -5.7778e-03, -1.0538e-01, - 7.2606e-01, -7.1253e-01, -9.9077e-02, 8.2629e-02, - 1.7111e+00, 1.6896e-03, 8.7316e-01, -2.3668e-01, - -3.1060e-01, 1.9099e+00, 5.9385e-01, 2.0343e+00, - 6.1842e-01, -1.0033e+00, -1.6711e+00, 2.0958e-01, - 1.0743e+00, 8.5540e-01, 1.9295e-01, -1.7138e-01, - 5.9866e-01, -9.9095e-01, -7.8713e-01, 5.9338e-01, - 1.2993e-01, -9.9809e-02, -5.7388e-01, 9.9489e-01, - 7.9334e-01, -8.8623e-01, -1.7928e-01, 9.7753e-01, - 1.3457e+00, -1.3771e+00, -2.9285e-01, -1.9107e+00, - 1.0635e+00, -1.6208e+00, -5.5537e-01, -5.2173e-01, - -2.5977e-01, -1.0624e-01, 2.1697e+00, -4.4851e-01, - -1.9869e+00, 3.0074e-01, -9.5688e-01, -2.5425e-01, - -1.0223e+00, -6.4030e-01, 8.0508e-01, -9.5571e-01, - -8.8600e-01, 8.2238e-02, -5.5926e-01, 4.1819e-02, - 1.2546e+00, -2.3587e+00, 5.2453e-01, 3.4421e-01, - 6.8416e-04, 3.3435e-01, 6.6938e-01, 1.1108e+00, - -2.2210e+00, -2.0454e-01, 1.1940e+00, 1.0849e+00, - -5.4913e-01, -3.0016e-01, -1.9306e+00, -5.7492e-01, - 8.8786e-01, 1.6483e+00, -4.2770e-01, 1.1728e+00, - -2.5338e-01, -7.6020e-01, 1.5390e+00, -4.4935e-01, - -1.8934e+00, -5.4604e-01, -6.4069e-01, -9.0203e-01, - 1.4465e-01, -2.1602e+00, -2.1651e-01, 1.9518e+00, - 2.2580e-01, -1.1688e+00, 7.5173e-01, 7.6988e-01, - -6.1221e-01, 6.5760e-01, -9.1026e-02, -1.4777e+00, - -2.2598e+00, 4.0915e-01, 1.1165e+00, 5.3990e-01, - 1.1370e+00, 8.6317e-01, 3.2800e-01, 6.5464e-01, - -4.5658e-01, -1.4765e+00, -1.7465e+00, 7.9917e-01, - -1.3987e+00, 1.0264e+00, 1.2331e+00, -1.7279e+00, - 9.9542e-01, 2.0683e+00, 1.2858e+00, 1.0251e+00, - 9.9242e-01, 2.2496e+00, 1.6605e-01, 1.3585e+00, - 1.5144e-01, -6.5066e-01, 1.4772e+00, 6.7857e-01, - 8.1605e-01, -7.1698e-01, -5.6946e-01, 3.5651e-01, - -2.8203e-01, 1.2853e-01, -4.0745e-01, 1.2820e+00, - -7.7646e-01, 2.2325e+00, 2.4540e-01, 9.9064e-01, - -6.6128e-01, 6.4041e-01, 1.7619e+00, 1.1263e+00, - 4.6979e-01, -5.1876e-01, 4.4834e-01, -1.1468e+00, - 6.1698e-01, -1.7917e-01, -8.0010e-01, 2.0869e-01, - -7.8554e-01, -5.5187e-01, -1.4288e+00, -1.2413e+00, - -8.0342e-01, -2.6986e-01, -1.7918e+00, 3.8550e-01, - -1.0102e+00, 1.0573e+00, -4.5470e-01, -4.6081e-01, - 2.0754e+00, -1.0446e+00, 8.4764e-01, -2.9433e-01, - -6.2001e-02, -9.8113e-01, -4.1164e-01, 8.0594e-01, - 1.3222e+00, -1.6033e-01, -4.3999e-02, -2.8184e-01, - -3.4838e-01, 1.6604e+00, 9.6669e-01, 4.0602e-01, - -1.0754e+00, -7.9273e-01, -1.8644e-01, 2.1003e+00, - -5.3542e-01, 1.0142e-01, -8.1503e-01, 9.2777e-01, - 2.1476e+00, 1.0898e+00, 3.4383e-01, -1.1546e+00, - -1.4931e+00, -6.1567e-01, -1.5962e-03, -3.8548e-01, - -1.1498e+00, 7.8691e-01, 1.8396e-01, -2.5873e-01, - -3.5983e-01, -3.1432e-01, 4.5913e-01, 1.2846e+00, - 1.1608e+00, -6.2020e-01, 5.3271e-01, -5.1930e-01, - -1.8613e-01, 1.3282e+00, -8.6994e-02, -3.8737e-01, - -1.4276e+00, -2.0487e-01, -1.1931e-01, 6.3543e-01, - -4.6946e-01, 1.4213e-01, -1.8783e-01, -1.4187e+00, - -8.5058e-01, 2.0297e-01, -7.6660e-01, -3.3614e-01, - -4.4619e-01, 1.4337e+00, -5.2976e-01, 6.4143e-01, - -1.2785e+00, -1.3595e+00, 7.2314e-01, 2.4877e-01, - -4.7373e-01, 6.6101e-01, 4.7286e-02, 8.5834e-01, - 1.6308e+00, 3.8030e-01, -6.3701e-01, 1.4711e+00, - 6.5158e-01, 3.2146e-01, 1.1041e+00, 1.2783e-01, - 3.7524e-01, 1.9136e+00, 1.5520e+00, 1.4770e+00, - -7.2528e-01, 1.6218e+00, 1.3913e+00, -4.6857e-01, - 1.8802e+00, -1.5092e+00, 1.2351e+00, 7.6921e-01, - -1.0053e+00, 6.8689e-01, -7.5955e-04, -2.0495e-01, - 5.9023e-01, -1.6810e+00, 2.7183e-01, -4.6520e-01, - 1.0066e+00, 8.5910e-01, 1.7604e+00, -1.9178e-01, - 2.4071e-01, 2.9389e-01, 3.6538e-01, -5.6589e-01, - 1.3161e+00, -3.2850e-01, 3.9506e-01, 6.7031e-01, - 3.6241e-01, -3.2368e-01, -7.4567e-01, -1.3325e+00, - -1.4737e+00, 1.2499e-01, 4.8335e-01, 1.0624e+00, - -6.2145e-02, -1.3911e-01, 5.3350e-02, 1.0612e+00, - 1.2390e+00, 6.6573e-01, -4.9155e-01, -6.2863e-01, - -8.2469e-01, -1.3683e-01, 6.3889e-01, 1.0005e+00, - -1.3996e-01, 2.9769e-01, 4.5611e-01, -9.2999e-01, - -1.8859e+00, 1.2055e+00, -6.2605e-01, 3.3277e-01, - -8.1502e-02, 1.1438e+00, 7.8002e-01, -1.5138e+00, - -2.3224e+00, -5.7821e-01, 1.4663e+00, 8.5344e-01, - -1.8175e+00, -3.3000e-01, -7.3008e-01, -1.5576e+00, - -1.4711e+00, 3.8730e-01, 6.9583e-01, -1.7366e+00, - -4.3335e-01, -6.7175e-02, -2.7834e-01, 4.9017e-02, - -1.5258e-01, -1.1023e+00, -5.4763e-01, 2.5046e+00, - 4.6364e-01, -6.2363e-02, 7.6653e-01, -2.2947e-01, - -9.0390e-01, -1.1563e+00, -8.5724e-01, -3.9168e-01, - -2.1033e-01, 1.8183e+00, -9.1074e-01, 3.7365e-01, - 4.3304e-01, 7.1439e-01, 5.7004e-01, -1.3989e+00, - 7.9014e-01, 1.3949e+00, -8.9148e-01, -3.4346e-01, - -1.3914e+00, 6.5261e-01, 2.9167e-01, 3.6922e-01, - -5.9333e-01, -2.7739e+00, 1.1133e+00, -1.2231e+00, - 5.4991e-01, 6.4870e-01, -1.4917e+00, 1.0496e+00, - 1.0591e+00, -5.5541e-01, -2.3619e-01, -9.3360e-01, - 6.1142e-01, 1.0655e+00, 6.0153e-02, -3.6276e-01, - -3.4025e-01, 1.0805e+00, -1.1760e+00, 6.7067e-01, - -8.2364e-02, -8.9822e-02, -1.6498e+00, -2.9596e+00, - 9.7391e-02, 2.0632e-01, -1.0856e+00, -1.0775e+00, - -1.3536e+00, -1.2602e+00, 5.3656e-01, -8.0887e-01, - 7.3527e-01, -1.7824e+00, -2.0984e+00, -1.0920e+00, - 1.2582e-01, -1.0993e+00, 1.0148e+00, -8.2411e-02, - 1.1943e+00, -3.4846e-01, -3.5307e-01, 7.5497e-02, - 2.2002e+00, -4.7438e-01, -5.3612e-01, 1.5412e-01, - -5.9839e-01, 9.2565e-01, 9.6331e-02, -7.9433e-01, - -3.3006e-01, 1.1340e-01, -8.4117e-01, -3.2478e-01, - 1.1603e+00, 1.8328e+00, 2.1091e-01, -7.8107e-01, - 4.2426e-01, -1.5542e-01, -1.0653e-01, -1.2663e+00, - -9.7847e-01, -3.8561e-01, -1.3099e+00, -9.3042e-01, - -1.5147e+00, -7.6837e-01, 6.2618e-01, 1.1598e+00, - -1.6879e+00, -4.5018e-01, 3.4336e-01, -5.6076e-01, - -1.3405e+00, -3.4903e-02, 2.3190e-01, 1.1081e+00, - 5.1637e-01, 1.0407e+00, 6.3189e-01, -1.3375e+00, - -2.2021e+00, -2.0489e+00, 7.1092e-01, -1.4466e+00, - 8.4133e-01, 2.5221e-01, 2.3478e-01, -2.1980e+00, - 1.3469e+00, 1.6152e-01, -3.0549e-01, 7.6936e-01, - 4.7731e-01, -7.0236e-01, -5.4445e-01, -2.9199e-01, - -1.6005e+00, -1.1530e-01, -6.7962e-01, -2.5965e+00, - 1.4621e+00, 2.1549e+00, -2.6996e-01, -1.0004e+00, - 2.3301e-01, -8.0532e-01, -4.2155e-01, -5.5519e-01, - 2.9395e+00, -4.4709e-01, 5.9063e-01, -6.0565e-01, - 1.5926e+00, -3.5547e-01, 1.1764e+00, 1.0966e+00, - 3.6933e-01, -1.9844e+00, -5.7384e-01, 2.7573e-01, - -5.1623e-01, 2.8516e-01, -8.6919e-01, -1.5053e+00, - -1.9598e+00, 8.4913e-01, -7.1647e-01, 5.2092e-01, - 6.3545e-01, 1.5906e-01, -1.6677e+00, -1.1006e-01, - 6.5367e-01, -1.8133e-01, -1.5784e+00, -5.6024e-01, - 4.6707e-01, 4.2867e-01, -4.7990e-01, 1.3229e+00, - 9.7518e-01, -5.6057e-01, 3.3221e-01, -4.9842e-01, - 4.4850e-01, 6.9743e-01, -9.7515e-01, 1.2833e+00, - -1.0105e+00, 6.7100e-01, -1.3963e-01, -3.8938e-01, - 1.3465e+00, -1.7468e-01, -9.3358e-01, -7.5945e-01, - -8.1102e-01, -3.4454e-01, 1.3065e+00, -4.7449e-01, - 4.9001e-01, 1.0820e+00, 1.7612e-02, 5.2664e-01, - -1.1578e+00, -4.8489e-01, 3.4374e-01, -3.8269e-01, - -2.6050e+00, 2.1765e-01, 9.0948e-01, -9.8154e-01, - 7.4263e-01, -4.0510e-01, -8.0862e-01, 9.0853e-01, - 6.8509e-01, 2.7986e+00, -2.7111e+00, -1.4642e+00, - -4.9731e-01, -2.3791e-01, 1.7415e+00, -5.1687e-01, - 8.4695e-01, -3.5342e-01, -1.2315e+00, -9.6857e-01, - 6.2956e-01, 2.4499e-02, -5.1362e-01, -3.9584e-01, - -4.0065e-01, -1.3969e+00, -1.1367e+00, -7.6019e-01, - 3.2031e-01, 1.5243e-01, 4.2041e-01, -7.0057e-01, - 4.8590e-01, -1.2584e+00, 2.8135e-01, -1.8969e+00, - -1.7345e+00, 1.0226e+00, 9.2681e-02, -5.7239e-01, - -1.4450e-01, -4.9126e-01, 2.9241e-02, 1.2347e+00, - -6.9204e-02, -7.3272e-01, 3.7425e-01, 9.2322e-01, - -1.4598e+00, 2.4180e-01, -1.1695e+00, -1.0944e-01, - -4.8185e-01, -1.7684e+00, -5.0640e-01, 1.1126e+00, - -6.8184e-01, 1.5294e-01, 7.8498e-02, 2.2730e-01, - 1.5581e+00, -1.5763e+00, -1.0901e+00, -8.1852e-01, - -1.1738e-01, -4.6564e-01, 5.0686e-01, 1.6145e+00, - -1.6309e+00, -1.3573e+00, 7.2946e-01, -1.5365e-01, - -3.6824e-01, -1.9309e+00, -3.8211e-02, 3.3727e-03, - 6.8167e-01, -1.4194e-01, -3.5695e-01, -8.6402e-01, - -4.3686e-01, 7.0320e-01, 6.4675e-01, 2.0179e+00, - 7.5527e-01, -1.4920e+00, -1.5170e+00, 9.8222e-01, - -1.6156e+00, -9.2911e-02, 2.3418e-01, -9.6111e-01, - -1.6887e+00, 9.1975e-01, -5.2985e-01, 4.5644e-01, - 6.4104e-01, -4.1656e-01, 6.6246e-01, 2.2738e-01, - 2.0388e+00, 1.3300e+00, 1.7837e+00, 1.5238e+00, - 9.5890e-01, 3.8693e-01, 3.4254e-01, 1.3093e+00, - -7.9969e-01, -5.5454e-01, -1.3979e+00, 1.4255e+00, - 3.2702e-01, -5.8318e-01, -2.7239e-01, -5.3564e-01, - 9.5936e-01, -2.7681e-01, -4.0661e-02, 6.0723e-01, - 5.3524e-01, 2.6037e-01, -1.1927e+00, -3.0224e-02, - -3.8458e-01, 1.1226e+00, -3.5571e-01, 2.2183e-01, - 1.8577e+00, -3.5936e-01, 1.0863e+00, 8.4736e-01, - -4.8762e-01, -3.4355e-01, -6.8260e-01, 8.7783e-02, - 2.4012e-01, -4.8950e-01, -7.6359e-02, 8.9875e-01, - -1.2251e+00, 1.0629e-01, -4.5080e-01, 4.4906e-01, - -5.6769e-01, -8.1678e-01, 1.4544e-02, 9.6868e-02, - 3.0829e-01, 2.9944e-01, 4.2881e-02, -1.4693e+00, - 1.0233e+00, 2.1733e-01, -9.8022e-01, -4.2883e-01, - -5.4886e-01, 6.4325e-01, 3.2618e-01, 1.9940e+00, - 4.9949e-01, 5.1399e-01, -7.7528e-01, 7.2480e-02, - -8.1031e-01, 6.1383e-01, 4.8928e-01, 5.4020e-01, - 2.5247e-01, 1.1509e+00, -9.9766e-01, 1.5535e+00, - 5.2454e-01, -1.9794e+00, 2.5560e-01, 2.1126e-01, - 2.3100e-01, 1.2699e+00, 1.8299e+00, 7.8469e-01, - 4.1626e-01, -4.4572e-01, 1.3625e+00, -4.8276e-01, - -3.8035e-01, -1.5471e+00, -9.5400e-01, 3.3230e-01, - 1.0282e+00, 1.3163e+00, -6.1885e-01, 7.8040e-01, - 1.1954e+00, -2.0367e+00, 9.3689e-01, -1.4643e+00, - -2.1503e+00, 8.6546e-01, -1.2933e+00, -1.9580e+00, - 6.4062e-03, -1.1038e+00, 1.4813e+00, 3.1369e-01, - 4.2972e-01, 2.9188e-01, 4.9601e-01, -2.3210e-01, - -1.4342e+00, 1.3140e-01, 1.1390e+00, 1.0035e+00, - 7.8398e-02, 9.3424e-01, 1.7692e+00, -5.0649e-01, - -7.5531e-01, -4.9314e-01, 1.6721e+00, -9.9940e-01, - -3.4393e-01, 2.8409e+00, -8.2738e-01, -6.4175e-01, - -4.3816e-01, -6.1304e-01, -1.5490e-01, -4.4373e-01, - 3.9740e-01, -1.4586e+00, 7.4474e-01, 5.3479e-01, - 4.3232e-01, 1.2882e+00, 2.8760e-01, 1.2113e-01, - 9.3176e-01, 8.7078e-01, -1.9289e-01, -1.8236e+00, - -1.1373e+00, 1.4489e+00, 1.5214e-01, 9.9766e-01, - -3.8627e-01, 9.6097e-01, 7.4233e-01, -2.2032e+00, - -2.1896e+00, 2.5253e+00, -2.3891e-01, 3.1124e-01, - -1.0993e+00, -4.6040e-01, -1.5348e+00, 4.4286e-01, - -3.9695e-01, 3.7830e-01, -7.5658e-01, 1.0850e+00, - -9.1474e-01, -2.6790e-01, 1.3358e+00, -1.0221e+00, - 2.2408e+00, -1.2558e-01, 4.0623e-01, 6.0478e-01, - -1.3043e+00, -6.8057e-01, -6.0028e-01, 1.9285e+00, - 3.9517e-02, 2.7440e-01, 9.9702e-02, 1.0522e-01, - -7.6941e-01, 2.9307e-01, 7.2625e-01, -1.5822e+00, - -8.8656e-03, -1.4193e+00, -9.9712e-01, 7.8685e-01, - -9.7000e-01, 2.0863e-01, 8.5271e-01, -1.7785e-01, - -8.4912e-01, 1.5973e+00, 1.9490e-01, 1.8397e+00, - -2.3108e-01, -9.8951e-01, 1.0640e+00, -1.3490e-01, - -2.7799e-01, -1.0887e-01, 1.7074e+00, 5.5751e-01, - -6.0756e-01, 1.2098e+00, 1.3412e+00, -5.7233e-01, - 1.2816e+00, 1.2262e-02, 5.4063e-01, 4.6592e-01, - -2.5016e+00, -9.9476e-01, 6.4856e-01, -8.4479e-02, - 8.9676e-02, 1.3805e+00, -1.5003e+00, -2.3972e-01, - -1.7953e+00, 2.7948e-01, 7.0458e-01, 8.8988e-02, - 3.7965e-01, 2.4350e-01, 9.6890e-01, 1.6177e-01, - 1.4253e+00, -1.3157e-01, -1.0490e+00, 4.3436e-01, - -5.9698e-01, 1.2268e+00, 6.8289e-01, 1.6553e+00, - 8.2571e-01, 1.1059e-01, -1.6574e+00, -1.4684e-01, - 2.3540e-01, 7.2439e-02, 7.7001e-01, 1.0229e+00, - 1.1218e+00, -2.8606e-01, 1.4772e+00, 3.6853e-01, - -7.6550e-01, -7.7720e-01, 1.6147e+00, 1.3509e+00, - -9.9500e-01, -4.9639e-01, -2.1524e-02, -8.0876e-01, - 4.2998e-01, -3.5938e-01, 1.2479e+00, 1.1463e+00, - 5.5722e-01, 4.4375e-01, 1.8282e+00, -9.1236e-01, - -2.5088e-01, 2.4748e-01, 2.0442e+00, 1.3857e+00, - -2.6958e-01, 6.9474e-01, -9.7977e-01, -7.3137e-01, - 3.8544e-01, 9.4488e-01, 5.0242e-02, 5.9173e-01, - -1.0692e+00, 6.8817e-01, -8.5924e-01, -5.3146e-01, - 1.8126e+00, -3.4514e-01, 1.5046e+00, -1.2570e+00, - -8.0226e-01, 5.0428e-01, 8.7169e-02, 1.3796e+00, - -1.3936e+00, 6.9121e-01, 1.0361e+00, 9.6047e-01, - -3.3124e-01, 5.8172e-01, 2.4301e+00, -3.0787e-01]), - size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.8356, 0.5566, 0.3874, ..., 0.4735, 0.4173, 0.1842]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 1000 -Density: 1e-05 -Time: 11.203821897506714 seconds - -[39.12, 38.55, 38.44, 39.03, 38.81, 38.42, 38.42, 38.33, 38.47, 38.46] -[65.11] -10.94342041015625 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 687353, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 1e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 11.203821897506714, 'TIME_S_1KI': 0.016299953440963688, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 712.5261029052734, 'W': 65.11} -[39.12, 38.55, 38.44, 39.03, 38.81, 38.42, 38.42, 38.33, 38.47, 38.46, 39.52, 38.41, 38.44, 38.35, 38.45, 38.48, 38.4, 44.02, 38.83, 38.96] -699.88 -34.994 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 687353, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 1e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 11.203821897506714, 'TIME_S_1KI': 0.016299953440963688, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 712.5261029052734, 'W': 65.11, 'J_1KI': 1.036623253125066, 'W_1KI': 0.09472570862424402, 'W_D': 30.116, 'J_D': 329.5720490722656, 'W_D_1KI': 0.04381445923710233, 'J_D_1KI': 6.374375209987056e-05} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_10000_2e-05.json b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_10000_2e-05.json deleted file mode 100644 index cf9ec67..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_10000_2e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 602748, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 2000, "MATRIX_DENSITY": 2e-05, "TIME_S": 10.21125841140747, "TIME_S_1KI": 0.01694117344463602, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 683.3000339078902, "W": 65.71, "J_1KI": 1.1336413126346172, "W_1KI": 0.10901736712523309, "W_D": 30.58899999999999, "J_D": 318.0865125126838, "W_D_1KI": 0.05074923516958993, "J_D_1KI": 8.41964389257035e-05} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_10000_2e-05.output b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_10000_2e-05.output deleted file mode 100644 index 19d9ed0..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_10000_2e-05.output +++ /dev/null @@ -1,85 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '2e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 2000, "MATRIX_DENSITY": 2e-05, "TIME_S": 0.02713489532470703} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 2000, 2000, 2000]), - col_indices=tensor([2645, 76, 1809, ..., 1614, 2006, 9458]), - values=tensor([ 1.3874, -1.1677, 0.9784, ..., -0.1842, 0.3648, - -1.9952]), size=(10000, 10000), nnz=2000, - layout=torch.sparse_csr) -tensor([0.1031, 0.9681, 0.6651, ..., 0.3559, 0.0936, 0.1162]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 2000 -Density: 2e-05 -Time: 0.02713489532470703 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '386955', '-ss', '10000', '-sd', '2e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 2000, "MATRIX_DENSITY": 2e-05, "TIME_S": 6.74083685874939} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 2000, 2000, 2000]), - col_indices=tensor([3749, 8011, 2966, ..., 9889, 3092, 195]), - values=tensor([-0.5969, -1.4126, 0.8624, ..., -0.7538, 0.1983, - 1.5978]), size=(10000, 10000), nnz=2000, - layout=torch.sparse_csr) -tensor([0.3440, 0.9117, 0.4915, ..., 0.1931, 0.2897, 0.9406]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 2000 -Density: 2e-05 -Time: 6.74083685874939 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '602748', '-ss', '10000', '-sd', '2e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 2000, "MATRIX_DENSITY": 2e-05, "TIME_S": 10.21125841140747} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 2000, 2000, 2000]), - col_indices=tensor([1802, 2893, 8322, ..., 2363, 3238, 4274]), - values=tensor([ 0.0300, 1.2086, -0.2481, ..., -0.0804, 2.1056, - 0.1582]), size=(10000, 10000), nnz=2000, - layout=torch.sparse_csr) -tensor([0.1979, 0.3419, 0.8009, ..., 0.6035, 0.7629, 0.5268]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 2000 -Density: 2e-05 -Time: 10.21125841140747 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 2000, 2000, 2000]), - col_indices=tensor([1802, 2893, 8322, ..., 2363, 3238, 4274]), - values=tensor([ 0.0300, 1.2086, -0.2481, ..., -0.0804, 2.1056, - 0.1582]), size=(10000, 10000), nnz=2000, - layout=torch.sparse_csr) -tensor([0.1979, 0.3419, 0.8009, ..., 0.6035, 0.7629, 0.5268]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 2000 -Density: 2e-05 -Time: 10.21125841140747 seconds - -[41.3, 38.81, 38.44, 38.39, 38.33, 38.28, 38.37, 38.72, 38.46, 38.78] -[65.71] -10.398722171783447 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 602748, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 2e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 2000, 'MATRIX_DENSITY': 2e-05, 'TIME_S': 10.21125841140747, 'TIME_S_1KI': 0.01694117344463602, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 683.3000339078902, 'W': 65.71} -[41.3, 38.81, 38.44, 38.39, 38.33, 38.28, 38.37, 38.72, 38.46, 38.78, 39.88, 38.49, 38.53, 38.42, 38.35, 38.28, 39.06, 44.36, 40.0, 38.3] -702.4200000000001 -35.121 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 602748, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 2e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 2000, 'MATRIX_DENSITY': 2e-05, 'TIME_S': 10.21125841140747, 'TIME_S_1KI': 0.01694117344463602, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 683.3000339078902, 'W': 65.71, 'J_1KI': 1.1336413126346172, 'W_1KI': 0.10901736712523309, 'W_D': 30.58899999999999, 'J_D': 318.0865125126838, 'W_D_1KI': 0.05074923516958993, 'J_D_1KI': 8.41964389257035e-05} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_10000_5e-05.json b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_10000_5e-05.json deleted file mode 100644 index d929a37..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_10000_5e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 475418, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.451735258102417, "TIME_S_1KI": 0.021984306984805826, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 688.6502168655395, "W": 65.24, "J_1KI": 1.4485152368348264, "W_1KI": 0.1372266090051281, "W_D": 30.341749999999998, "J_D": 320.27671240925787, "W_D_1KI": 0.06382120575998383, "J_D_1KI": 0.00013424229995495299} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_10000_5e-05.output b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_10000_5e-05.output deleted file mode 100644 index 66d61b6..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_10000_5e-05.output +++ /dev/null @@ -1,85 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.032665252685546875} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 4998, 4998, 5000]), - col_indices=tensor([2543, 4228, 5675, ..., 7099, 979, 1021]), - values=tensor([ 2.5612, -1.4114, -1.2194, ..., -1.6806, 0.1446, - -0.8334]), size=(10000, 10000), nnz=5000, - layout=torch.sparse_csr) -tensor([0.1235, 0.4410, 0.8098, ..., 0.0872, 0.6747, 0.3389]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 5000 -Density: 5e-05 -Time: 0.032665252685546875 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '321442', '-ss', '10000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 7.099309921264648} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 4, ..., 4999, 4999, 5000]), - col_indices=tensor([ 689, 2907, 3020, ..., 8328, 764, 7546]), - values=tensor([ 1.2282, 0.2524, -0.1503, ..., -1.6702, 1.0701, - -0.5727]), size=(10000, 10000), nnz=5000, - layout=torch.sparse_csr) -tensor([0.2869, 0.4983, 0.2994, ..., 0.7250, 0.9680, 0.2854]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 5000 -Density: 5e-05 -Time: 7.099309921264648 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '475418', '-ss', '10000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.451735258102417} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 4, ..., 5000, 5000, 5000]), - col_indices=tensor([1429, 1621, 2379, ..., 8177, 7655, 3539]), - values=tensor([ 1.6185, -0.3081, 0.3132, ..., 0.9048, 0.9246, - 0.1203]), size=(10000, 10000), nnz=5000, - layout=torch.sparse_csr) -tensor([0.9646, 0.4747, 0.7415, ..., 0.6425, 0.1934, 0.4010]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 5000 -Density: 5e-05 -Time: 10.451735258102417 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 4, ..., 5000, 5000, 5000]), - col_indices=tensor([1429, 1621, 2379, ..., 8177, 7655, 3539]), - values=tensor([ 1.6185, -0.3081, 0.3132, ..., 0.9048, 0.9246, - 0.1203]), size=(10000, 10000), nnz=5000, - layout=torch.sparse_csr) -tensor([0.9646, 0.4747, 0.7415, ..., 0.6425, 0.1934, 0.4010]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 5000 -Density: 5e-05 -Time: 10.451735258102417 seconds - -[39.03, 39.16, 38.53, 39.16, 38.96, 38.83, 39.53, 38.34, 38.57, 38.38] -[65.24] -10.555644035339355 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 475418, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 5e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.451735258102417, 'TIME_S_1KI': 0.021984306984805826, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 688.6502168655395, 'W': 65.24} -[39.03, 39.16, 38.53, 39.16, 38.96, 38.83, 39.53, 38.34, 38.57, 38.38, 39.85, 38.31, 39.06, 39.04, 38.89, 38.51, 38.44, 38.39, 38.43, 38.37] -697.9649999999999 -34.89825 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 475418, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 5e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.451735258102417, 'TIME_S_1KI': 0.021984306984805826, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 688.6502168655395, 'W': 65.24, 'J_1KI': 1.4485152368348264, 'W_1KI': 0.1372266090051281, 'W_D': 30.341749999999998, 'J_D': 320.27671240925787, 'W_D_1KI': 0.06382120575998383, 'J_D_1KI': 0.00013424229995495299} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_10000_8e-05.json b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_10000_8e-05.json deleted file mode 100644 index 4899a00..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_10000_8e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 399296, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 7999, "MATRIX_DENSITY": 7.999e-05, "TIME_S": 10.458914041519165, "TIME_S_1KI": 0.026193385462211404, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 673.009458374977, "W": 65.55, "J_1KI": 1.6854901085284526, "W_1KI": 0.16416392851418496, "W_D": 30.665499999999994, "J_D": 314.8462478382587, "W_D_1KI": 0.07679891609232248, "J_D_1KI": 0.00019233580124099032} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_10000_8e-05.output b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_10000_8e-05.output deleted file mode 100644 index 7469eca..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_10000_8e-05.output +++ /dev/null @@ -1,85 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '8e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 8000, "MATRIX_DENSITY": 8e-05, "TIME_S": 0.03641247749328613} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 3, ..., 7998, 7999, 8000]), - col_indices=tensor([6117, 8477, 6510, ..., 3404, 5465, 8467]), - values=tensor([-1.6081, -0.0719, -2.1307, ..., 0.1441, 0.1036, - 0.0623]), size=(10000, 10000), nnz=8000, - layout=torch.sparse_csr) -tensor([0.8595, 0.7644, 0.3531, ..., 0.0315, 0.6130, 0.2786]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 8000 -Density: 8e-05 -Time: 0.03641247749328613 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '288362', '-ss', '10000', '-sd', '8e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 7999, "MATRIX_DENSITY": 7.999e-05, "TIME_S": 7.582841157913208} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 7998, 7998, 7999]), - col_indices=tensor([7743, 8729, 4527, ..., 5020, 3758, 9585]), - values=tensor([-0.7905, 0.7067, -0.3667, ..., -1.9197, 0.6727, - -0.2685]), size=(10000, 10000), nnz=7999, - layout=torch.sparse_csr) -tensor([0.2965, 0.4690, 0.6034, ..., 0.9291, 0.5376, 0.8914]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 7999 -Density: 7.999e-05 -Time: 7.582841157913208 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '399296', '-ss', '10000', '-sd', '8e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 7999, "MATRIX_DENSITY": 7.999e-05, "TIME_S": 10.458914041519165} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 7998, 7998, 7999]), - col_indices=tensor([4248, 4898, 9130, ..., 2629, 4508, 4391]), - values=tensor([-0.1804, -0.2691, 0.3496, ..., -0.6907, 1.8081, - -1.1816]), size=(10000, 10000), nnz=7999, - layout=torch.sparse_csr) -tensor([0.3531, 0.0473, 0.4264, ..., 0.6320, 0.2793, 0.8248]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 7999 -Density: 7.999e-05 -Time: 10.458914041519165 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 7998, 7998, 7999]), - col_indices=tensor([4248, 4898, 9130, ..., 2629, 4508, 4391]), - values=tensor([-0.1804, -0.2691, 0.3496, ..., -0.6907, 1.8081, - -1.1816]), size=(10000, 10000), nnz=7999, - layout=torch.sparse_csr) -tensor([0.3531, 0.0473, 0.4264, ..., 0.6320, 0.2793, 0.8248]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 7999 -Density: 7.999e-05 -Time: 10.458914041519165 seconds - -[39.14, 38.74, 38.55, 38.47, 38.62, 38.75, 38.59, 38.85, 38.96, 38.38] -[65.55] -10.267116069793701 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 399296, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 8e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 7999, 'MATRIX_DENSITY': 7.999e-05, 'TIME_S': 10.458914041519165, 'TIME_S_1KI': 0.026193385462211404, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 673.009458374977, 'W': 65.55} -[39.14, 38.74, 38.55, 38.47, 38.62, 38.75, 38.59, 38.85, 38.96, 38.38, 39.03, 38.81, 38.52, 39.68, 38.42, 38.47, 39.2, 38.75, 38.86, 38.35] -697.69 -34.8845 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 399296, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 8e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 7999, 'MATRIX_DENSITY': 7.999e-05, 'TIME_S': 10.458914041519165, 'TIME_S_1KI': 0.026193385462211404, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 673.009458374977, 'W': 65.55, 'J_1KI': 1.6854901085284526, 'W_1KI': 0.16416392851418496, 'W_D': 30.665499999999994, 'J_D': 314.8462478382587, 'W_D_1KI': 0.07679891609232248, 'J_D_1KI': 0.00019233580124099032} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_150000_0.0001.json b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_150000_0.0001.json deleted file mode 100644 index 1e3e4a3..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_150000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 3623, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 2249858, "MATRIX_DENSITY": 9.999368888888889e-05, "TIME_S": 10.667315244674683, "TIME_S_1KI": 2.944332112800078, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 750.546510848999, "W": 70.88, "J_1KI": 207.16160939801242, "W_1KI": 19.563897322660775, "W_D": 35.933749999999996, "J_D": 380.5015615719556, "W_D_1KI": 9.918230747998896, "J_D_1KI": 2.737574040297791} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_150000_0.0001.output b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_150000_0.0001.output deleted file mode 100644 index 6ed7d09..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_150000_0.0001.output +++ /dev/null @@ -1,71 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '150000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 2249884, "MATRIX_DENSITY": 9.999484444444444e-05, "TIME_S": 2.8976516723632812} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 22, 43, ..., 2249846, - 2249863, 2249884]), - col_indices=tensor([ 2507, 16314, 31317, ..., 120903, 121359, - 147768]), - values=tensor([-0.6085, -0.7004, 0.1228, ..., 0.9020, -0.4601, - -1.0639]), size=(150000, 150000), nnz=2249884, - layout=torch.sparse_csr) -tensor([0.3195, 0.5583, 0.9597, ..., 0.5573, 0.0634, 0.8941]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 2249884 -Density: 9.999484444444444e-05 -Time: 2.8976516723632812 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '3623', '-ss', '150000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 2249858, "MATRIX_DENSITY": 9.999368888888889e-05, "TIME_S": 10.667315244674683} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 11, 24, ..., 2249832, - 2249846, 2249858]), - col_indices=tensor([ 10890, 12729, 17252, ..., 102978, 126802, - 132653]), - values=tensor([ 1.4097, 0.2679, 0.6261, ..., 1.5911, 1.7075, - -0.0145]), size=(150000, 150000), nnz=2249858, - layout=torch.sparse_csr) -tensor([0.0131, 0.1649, 0.4269, ..., 0.2547, 0.5949, 0.0782]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 2249858 -Density: 9.999368888888889e-05 -Time: 10.667315244674683 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 11, 24, ..., 2249832, - 2249846, 2249858]), - col_indices=tensor([ 10890, 12729, 17252, ..., 102978, 126802, - 132653]), - values=tensor([ 1.4097, 0.2679, 0.6261, ..., 1.5911, 1.7075, - -0.0145]), size=(150000, 150000), nnz=2249858, - layout=torch.sparse_csr) -tensor([0.0131, 0.1649, 0.4269, ..., 0.2547, 0.5949, 0.0782]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 2249858 -Density: 9.999368888888889e-05 -Time: 10.667315244674683 seconds - -[39.59, 39.85, 38.38, 38.2, 38.69, 38.44, 38.21, 38.33, 38.22, 38.26] -[70.88] -10.588974475860596 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 3623, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 0.0001, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [150000, 150000], 'MATRIX_ROWS': 150000, 'MATRIX_SIZE': 22500000000, 'MATRIX_NNZ': 2249858, 'MATRIX_DENSITY': 9.999368888888889e-05, 'TIME_S': 10.667315244674683, 'TIME_S_1KI': 2.944332112800078, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 750.546510848999, 'W': 70.88} -[39.59, 39.85, 38.38, 38.2, 38.69, 38.44, 38.21, 38.33, 38.22, 38.26, 39.35, 38.9, 38.85, 38.27, 38.23, 38.22, 38.53, 38.31, 43.38, 38.63] -698.925 -34.94625 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 3623, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 0.0001, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [150000, 150000], 'MATRIX_ROWS': 150000, 'MATRIX_SIZE': 22500000000, 'MATRIX_NNZ': 2249858, 'MATRIX_DENSITY': 9.999368888888889e-05, 'TIME_S': 10.667315244674683, 'TIME_S_1KI': 2.944332112800078, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 750.546510848999, 'W': 70.88, 'J_1KI': 207.16160939801242, 'W_1KI': 19.563897322660775, 'W_D': 35.933749999999996, 'J_D': 380.5015615719556, 'W_D_1KI': 9.918230747998896, 'J_D_1KI': 2.737574040297791} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_150000_1e-05.json b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_150000_1e-05.json deleted file mode 100644 index 5b12edc..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_150000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 9166, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 224999, "MATRIX_DENSITY": 9.999955555555555e-06, "TIME_S": 10.329043865203857, "TIME_S_1KI": 1.1268867406942895, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 669.3000064229966, "W": 64.62, "J_1KI": 73.01985669026801, "W_1KI": 7.049967270346935, "W_D": 29.8575, "J_D": 309.24829683959484, "W_D_1KI": 3.2574187213615535, "J_D_1KI": 0.35538061546602157} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_150000_1e-05.output b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_150000_1e-05.output deleted file mode 100644 index e7cd17e..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_150000_1e-05.output +++ /dev/null @@ -1,71 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '150000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 224998, "MATRIX_DENSITY": 9.999911111111111e-06, "TIME_S": 1.1455013751983643} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 4, ..., 224998, 224998, - 224998]), - col_indices=tensor([100518, 131563, 9790, ..., 76958, 129090, - 127826]), - values=tensor([-0.9354, 0.0861, 0.0469, ..., -0.0733, -0.3369, - -0.3156]), size=(150000, 150000), nnz=224998, - layout=torch.sparse_csr) -tensor([0.7914, 0.1064, 0.9881, ..., 0.4061, 0.8175, 0.2421]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 224998 -Density: 9.999911111111111e-06 -Time: 1.1455013751983643 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '9166', '-ss', '150000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 224999, "MATRIX_DENSITY": 9.999955555555555e-06, "TIME_S": 10.329043865203857} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 224996, 224996, - 224999]), - col_indices=tensor([101672, 82567, 101421, ..., 14061, 17263, - 44668]), - values=tensor([-1.0159, 1.4417, -1.5888, ..., 0.7553, -0.8014, - -0.0962]), size=(150000, 150000), nnz=224999, - layout=torch.sparse_csr) -tensor([0.8156, 0.5997, 0.6168, ..., 0.9317, 0.7110, 0.6190]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 224999 -Density: 9.999955555555555e-06 -Time: 10.329043865203857 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 224996, 224996, - 224999]), - col_indices=tensor([101672, 82567, 101421, ..., 14061, 17263, - 44668]), - values=tensor([-1.0159, 1.4417, -1.5888, ..., 0.7553, -0.8014, - -0.0962]), size=(150000, 150000), nnz=224999, - layout=torch.sparse_csr) -tensor([0.8156, 0.5997, 0.6168, ..., 0.9317, 0.7110, 0.6190]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 224999 -Density: 9.999955555555555e-06 -Time: 10.329043865203857 seconds - -[39.27, 38.75, 38.65, 38.37, 38.39, 38.52, 38.44, 38.75, 38.32, 38.42] -[64.62] -10.357474565505981 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 9166, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 1e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [150000, 150000], 'MATRIX_ROWS': 150000, 'MATRIX_SIZE': 22500000000, 'MATRIX_NNZ': 224999, 'MATRIX_DENSITY': 9.999955555555555e-06, 'TIME_S': 10.329043865203857, 'TIME_S_1KI': 1.1268867406942895, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 669.3000064229966, 'W': 64.62} -[39.27, 38.75, 38.65, 38.37, 38.39, 38.52, 38.44, 38.75, 38.32, 38.42, 39.1, 38.8, 39.12, 38.37, 38.31, 39.57, 38.4, 38.47, 38.33, 38.59] -695.25 -34.7625 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 9166, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 1e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [150000, 150000], 'MATRIX_ROWS': 150000, 'MATRIX_SIZE': 22500000000, 'MATRIX_NNZ': 224999, 'MATRIX_DENSITY': 9.999955555555555e-06, 'TIME_S': 10.329043865203857, 'TIME_S_1KI': 1.1268867406942895, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 669.3000064229966, 'W': 64.62, 'J_1KI': 73.01985669026801, 'W_1KI': 7.049967270346935, 'W_D': 29.8575, 'J_D': 309.24829683959484, 'W_D_1KI': 3.2574187213615535, 'J_D_1KI': 0.35538061546602157} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_150000_2e-05.json b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_150000_2e-05.json deleted file mode 100644 index fa55b44..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_150000_2e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 6901, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 449998, "MATRIX_DENSITY": 1.999991111111111e-05, "TIME_S": 10.454267024993896, "TIME_S_1KI": 1.5148916135333859, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 675.2335901641846, "W": 64.84, "J_1KI": 97.845760058569, "W_1KI": 9.39573974786263, "W_D": 29.732999999999997, "J_D": 309.63479852485654, "W_D_1KI": 4.308506013621214, "J_D_1KI": 0.6243306786873227} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_150000_2e-05.output b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_150000_2e-05.output deleted file mode 100644 index 191d3c9..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_150000_2e-05.output +++ /dev/null @@ -1,71 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '150000', '-sd', '2e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 449999, "MATRIX_DENSITY": 1.9999955555555556e-05, "TIME_S": 1.5213685035705566} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 12, ..., 449995, 449998, - 449999]), - col_indices=tensor([ 8360, 33252, 44362, ..., 21408, 124412, - 124334]), - values=tensor([-0.5427, 0.0515, 0.0818, ..., -1.2949, 1.3790, - 0.6657]), size=(150000, 150000), nnz=449999, - layout=torch.sparse_csr) -tensor([0.2708, 0.0613, 0.1710, ..., 0.9721, 0.9560, 0.0829]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 449999 -Density: 1.9999955555555556e-05 -Time: 1.5213685035705566 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '6901', '-ss', '150000', '-sd', '2e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 449998, "MATRIX_DENSITY": 1.999991111111111e-05, "TIME_S": 10.454267024993896} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 9, ..., 449993, 449994, - 449998]), - col_indices=tensor([ 20846, 26091, 54441, ..., 30732, 48515, - 104522]), - values=tensor([-1.2604, -0.1905, -0.1295, ..., 0.2361, 0.1736, - -0.7596]), size=(150000, 150000), nnz=449998, - layout=torch.sparse_csr) -tensor([0.1571, 0.6178, 0.3753, ..., 0.9438, 0.5462, 0.3709]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 449998 -Density: 1.999991111111111e-05 -Time: 10.454267024993896 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 9, ..., 449993, 449994, - 449998]), - col_indices=tensor([ 20846, 26091, 54441, ..., 30732, 48515, - 104522]), - values=tensor([-1.2604, -0.1905, -0.1295, ..., 0.2361, 0.1736, - -0.7596]), size=(150000, 150000), nnz=449998, - layout=torch.sparse_csr) -tensor([0.1571, 0.6178, 0.3753, ..., 0.9438, 0.5462, 0.3709]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 449998 -Density: 1.999991111111111e-05 -Time: 10.454267024993896 seconds - -[40.27, 38.44, 38.96, 38.34, 38.54, 38.31, 38.38, 38.3, 38.55, 45.52] -[64.84] -10.413843154907227 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 6901, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 2e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [150000, 150000], 'MATRIX_ROWS': 150000, 'MATRIX_SIZE': 22500000000, 'MATRIX_NNZ': 449998, 'MATRIX_DENSITY': 1.999991111111111e-05, 'TIME_S': 10.454267024993896, 'TIME_S_1KI': 1.5148916135333859, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 675.2335901641846, 'W': 64.84} -[40.27, 38.44, 38.96, 38.34, 38.54, 38.31, 38.38, 38.3, 38.55, 45.52, 39.02, 38.38, 38.38, 38.52, 38.37, 38.29, 43.89, 38.47, 38.4, 38.43] -702.1400000000001 -35.107000000000006 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 6901, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 2e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [150000, 150000], 'MATRIX_ROWS': 150000, 'MATRIX_SIZE': 22500000000, 'MATRIX_NNZ': 449998, 'MATRIX_DENSITY': 1.999991111111111e-05, 'TIME_S': 10.454267024993896, 'TIME_S_1KI': 1.5148916135333859, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 675.2335901641846, 'W': 64.84, 'J_1KI': 97.845760058569, 'W_1KI': 9.39573974786263, 'W_D': 29.732999999999997, 'J_D': 309.63479852485654, 'W_D_1KI': 4.308506013621214, 'J_D_1KI': 0.6243306786873227} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_150000_5e-05.json b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_150000_5e-05.json deleted file mode 100644 index 0cd4cbd..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_150000_5e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 5109, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 1124975, "MATRIX_DENSITY": 4.999888888888889e-05, "TIME_S": 10.481255531311035, "TIME_S_1KI": 2.051527800217466, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 704.4864772510529, "W": 66.92, "J_1KI": 137.89126585458072, "W_1KI": 13.098453709140731, "W_D": 32.048, "J_D": 337.37870028305053, "W_D_1KI": 6.272851830103739, "J_D_1KI": 1.2278042337255313} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_150000_5e-05.output b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_150000_5e-05.output deleted file mode 100644 index 5775412..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_150000_5e-05.output +++ /dev/null @@ -1,71 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '150000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 1124977, "MATRIX_DENSITY": 4.9998977777777776e-05, "TIME_S": 2.0548505783081055} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 10, 18, ..., 1124965, - 1124969, 1124977]), - col_indices=tensor([ 31453, 47537, 66534, ..., 102759, 106663, - 136823]), - values=tensor([ 1.5868, -0.7934, 0.7941, ..., -1.6063, 2.0651, - 0.5690]), size=(150000, 150000), nnz=1124977, - layout=torch.sparse_csr) -tensor([0.6210, 0.5889, 0.5144, ..., 0.1174, 0.3534, 0.8613]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 1124977 -Density: 4.9998977777777776e-05 -Time: 2.0548505783081055 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '5109', '-ss', '150000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 1124975, "MATRIX_DENSITY": 4.999888888888889e-05, "TIME_S": 10.481255531311035} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 17, ..., 1124954, - 1124964, 1124975]), - col_indices=tensor([ 13599, 24811, 30718, ..., 135710, 137278, - 148349]), - values=tensor([ 1.1951, -1.4782, 1.4832, ..., 1.3639, 0.0995, - 0.3762]), size=(150000, 150000), nnz=1124975, - layout=torch.sparse_csr) -tensor([0.0815, 0.9569, 0.3513, ..., 0.2811, 0.4692, 0.1935]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 1124975 -Density: 4.999888888888889e-05 -Time: 10.481255531311035 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 17, ..., 1124954, - 1124964, 1124975]), - col_indices=tensor([ 13599, 24811, 30718, ..., 135710, 137278, - 148349]), - values=tensor([ 1.1951, -1.4782, 1.4832, ..., 1.3639, 0.0995, - 0.3762]), size=(150000, 150000), nnz=1124975, - layout=torch.sparse_csr) -tensor([0.0815, 0.9569, 0.3513, ..., 0.2811, 0.4692, 0.1935]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 1124975 -Density: 4.999888888888889e-05 -Time: 10.481255531311035 seconds - -[39.89, 38.34, 38.79, 38.32, 38.57, 38.23, 38.56, 38.9, 38.74, 38.66] -[66.92] -10.52729344367981 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 5109, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 5e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [150000, 150000], 'MATRIX_ROWS': 150000, 'MATRIX_SIZE': 22500000000, 'MATRIX_NNZ': 1124975, 'MATRIX_DENSITY': 4.999888888888889e-05, 'TIME_S': 10.481255531311035, 'TIME_S_1KI': 2.051527800217466, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 704.4864772510529, 'W': 66.92} -[39.89, 38.34, 38.79, 38.32, 38.57, 38.23, 38.56, 38.9, 38.74, 38.66, 39.47, 40.06, 38.45, 38.3, 38.41, 39.98, 38.88, 38.28, 38.36, 38.52] -697.44 -34.872 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 5109, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 5e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [150000, 150000], 'MATRIX_ROWS': 150000, 'MATRIX_SIZE': 22500000000, 'MATRIX_NNZ': 1124975, 'MATRIX_DENSITY': 4.999888888888889e-05, 'TIME_S': 10.481255531311035, 'TIME_S_1KI': 2.051527800217466, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 704.4864772510529, 'W': 66.92, 'J_1KI': 137.89126585458072, 'W_1KI': 13.098453709140731, 'W_D': 32.048, 'J_D': 337.37870028305053, 'W_D_1KI': 6.272851830103739, 'J_D_1KI': 1.2278042337255313} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_150000_8e-05.json b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_150000_8e-05.json deleted file mode 100644 index eab6a26..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_150000_8e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 4220, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 1799931, "MATRIX_DENSITY": 7.999693333333333e-05, "TIME_S": 10.419165134429932, "TIME_S_1KI": 2.4689964773530644, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 692.4653435611725, "W": 67.59, "J_1KI": 164.0913136400883, "W_1KI": 16.016587677725116, "W_D": 32.66975000000001, "J_D": 334.70438907837877, "W_D_1KI": 7.741646919431282, "J_D_1KI": 1.8345134880168914} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_150000_8e-05.output b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_150000_8e-05.output deleted file mode 100644 index 672156d..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_150000_8e-05.output +++ /dev/null @@ -1,71 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '150000', '-sd', '8e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 1799939, "MATRIX_DENSITY": 7.999728888888888e-05, "TIME_S": 2.488084554672241} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 12, 22, ..., 1799913, - 1799924, 1799939]), - col_indices=tensor([ 13746, 15057, 18265, ..., 123846, 124411, - 145916]), - values=tensor([-0.7466, 0.0637, -0.8689, ..., -0.5743, -0.3689, - 0.2622]), size=(150000, 150000), nnz=1799939, - layout=torch.sparse_csr) -tensor([0.9366, 0.5730, 0.1137, ..., 0.8382, 0.9191, 0.9155]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 1799939 -Density: 7.999728888888888e-05 -Time: 2.488084554672241 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '4220', '-ss', '150000', '-sd', '8e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 1799931, "MATRIX_DENSITY": 7.999693333333333e-05, "TIME_S": 10.419165134429932} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 11, 23, ..., 1799907, - 1799918, 1799931]), - col_indices=tensor([ 5037, 12265, 35290, ..., 122025, 127242, - 133587]), - values=tensor([-0.8165, -0.4506, -1.1214, ..., 0.3012, 0.9164, - 0.9097]), size=(150000, 150000), nnz=1799931, - layout=torch.sparse_csr) -tensor([0.5758, 0.1969, 0.5929, ..., 0.3681, 0.1106, 0.1361]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 1799931 -Density: 7.999693333333333e-05 -Time: 10.419165134429932 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 11, 23, ..., 1799907, - 1799918, 1799931]), - col_indices=tensor([ 5037, 12265, 35290, ..., 122025, 127242, - 133587]), - values=tensor([-0.8165, -0.4506, -1.1214, ..., 0.3012, 0.9164, - 0.9097]), size=(150000, 150000), nnz=1799931, - layout=torch.sparse_csr) -tensor([0.5758, 0.1969, 0.5929, ..., 0.3681, 0.1106, 0.1361]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 1799931 -Density: 7.999693333333333e-05 -Time: 10.419165134429932 seconds - -[44.49, 38.32, 38.33, 39.2, 38.32, 39.11, 38.45, 38.58, 38.73, 38.29] -[67.59] -10.245085716247559 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 4220, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 8e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [150000, 150000], 'MATRIX_ROWS': 150000, 'MATRIX_SIZE': 22500000000, 'MATRIX_NNZ': 1799931, 'MATRIX_DENSITY': 7.999693333333333e-05, 'TIME_S': 10.419165134429932, 'TIME_S_1KI': 2.4689964773530644, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 692.4653435611725, 'W': 67.59} -[44.49, 38.32, 38.33, 39.2, 38.32, 39.11, 38.45, 38.58, 38.73, 38.29, 39.26, 38.32, 38.8, 38.76, 38.46, 39.62, 38.37, 38.42, 38.47, 38.25] -698.405 -34.920249999999996 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 4220, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 8e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [150000, 150000], 'MATRIX_ROWS': 150000, 'MATRIX_SIZE': 22500000000, 'MATRIX_NNZ': 1799931, 'MATRIX_DENSITY': 7.999693333333333e-05, 'TIME_S': 10.419165134429932, 'TIME_S_1KI': 2.4689964773530644, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 692.4653435611725, 'W': 67.59, 'J_1KI': 164.0913136400883, 'W_1KI': 16.016587677725116, 'W_D': 32.66975000000001, 'J_D': 334.70438907837877, 'W_D_1KI': 7.741646919431282, 'J_D_1KI': 1.8345134880168914} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_200000_0.0001.json b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_200000_0.0001.json deleted file mode 100644 index c573f2a..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_200000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 2126, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 3999815, "MATRIX_DENSITY": 9.9995375e-05, "TIME_S": 10.061469793319702, "TIME_S_1KI": 4.732582216989512, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 814.6817216920853, "W": 76.98, "J_1KI": 383.19930465290935, "W_1KI": 36.20884289746002, "W_D": 42.339000000000006, "J_D": 448.07494693064695, "W_D_1KI": 19.914863593603013, "J_D_1KI": 9.367292377047512} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_200000_0.0001.output b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_200000_0.0001.output deleted file mode 100644 index 09b130d..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_200000_0.0001.output +++ /dev/null @@ -1,71 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '200000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 3999788, "MATRIX_DENSITY": 9.99947e-05, "TIME_S": 4.937520503997803} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 17, 37, ..., 3999749, - 3999767, 3999788]), - col_indices=tensor([ 1100, 11052, 12103, ..., 167542, 179467, - 199307]), - values=tensor([ 0.5475, -1.1224, 0.0722, ..., -0.1144, 1.0163, - 0.7412]), size=(200000, 200000), nnz=3999788, - layout=torch.sparse_csr) -tensor([0.8210, 0.9181, 0.4540, ..., 0.0365, 0.2540, 0.3511]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 3999788 -Density: 9.99947e-05 -Time: 4.937520503997803 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '2126', '-ss', '200000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 3999815, "MATRIX_DENSITY": 9.9995375e-05, "TIME_S": 10.061469793319702} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 17, 34, ..., 3999773, - 3999791, 3999815]), - col_indices=tensor([ 25603, 32992, 34253, ..., 185349, 188179, - 193803]), - values=tensor([-0.5584, -0.3177, -0.8346, ..., -0.6017, 0.3720, - 0.8986]), size=(200000, 200000), nnz=3999815, - layout=torch.sparse_csr) -tensor([0.9144, 0.6087, 0.6108, ..., 0.3591, 0.6548, 0.2005]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 3999815 -Density: 9.9995375e-05 -Time: 10.061469793319702 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 17, 34, ..., 3999773, - 3999791, 3999815]), - col_indices=tensor([ 25603, 32992, 34253, ..., 185349, 188179, - 193803]), - values=tensor([-0.5584, -0.3177, -0.8346, ..., -0.6017, 0.3720, - 0.8986]), size=(200000, 200000), nnz=3999815, - layout=torch.sparse_csr) -tensor([0.9144, 0.6087, 0.6108, ..., 0.3591, 0.6548, 0.2005]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 3999815 -Density: 9.9995375e-05 -Time: 10.061469793319702 seconds - -[38.95, 38.12, 38.27, 38.18, 38.23, 38.11, 38.27, 39.77, 38.33, 38.11] -[76.98] -10.583030939102173 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 2126, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 0.0001, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [200000, 200000], 'MATRIX_ROWS': 200000, 'MATRIX_SIZE': 40000000000, 'MATRIX_NNZ': 3999815, 'MATRIX_DENSITY': 9.9995375e-05, 'TIME_S': 10.061469793319702, 'TIME_S_1KI': 4.732582216989512, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 814.6817216920853, 'W': 76.98} -[38.95, 38.12, 38.27, 38.18, 38.23, 38.11, 38.27, 39.77, 38.33, 38.11, 39.73, 38.91, 38.21, 38.63, 38.46, 38.62, 38.71, 38.18, 38.28, 38.29] -692.8199999999999 -34.641 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 2126, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 0.0001, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [200000, 200000], 'MATRIX_ROWS': 200000, 'MATRIX_SIZE': 40000000000, 'MATRIX_NNZ': 3999815, 'MATRIX_DENSITY': 9.9995375e-05, 'TIME_S': 10.061469793319702, 'TIME_S_1KI': 4.732582216989512, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 814.6817216920853, 'W': 76.98, 'J_1KI': 383.19930465290935, 'W_1KI': 36.20884289746002, 'W_D': 42.339000000000006, 'J_D': 448.07494693064695, 'W_D_1KI': 19.914863593603013, 'J_D_1KI': 9.367292377047512} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_200000_1e-05.json b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_200000_1e-05.json deleted file mode 100644 index 532f1a2..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_200000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 6335, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 399999, "MATRIX_DENSITY": 9.999975e-06, "TIME_S": 10.42294692993164, "TIME_S_1KI": 1.6452954901233845, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 677.8716199588775, "W": 64.66, "J_1KI": 107.00420204560024, "W_1KI": 10.20678768745067, "W_D": 29.919749999999993, "J_D": 313.66763688933844, "W_D_1KI": 4.7229281767955795, "J_D_1KI": 0.7455293096757032} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_200000_1e-05.output b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_200000_1e-05.output deleted file mode 100644 index 32065ad..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_200000_1e-05.output +++ /dev/null @@ -1,71 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '200000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 399998, "MATRIX_DENSITY": 9.99995e-06, "TIME_S": 1.6573054790496826} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 3, ..., 399994, 399997, - 399998]), - col_indices=tensor([ 66300, 2284, 53244, ..., 49679, 62137, - 168627]), - values=tensor([ 0.5044, -0.0503, -2.0900, ..., 0.4461, -0.3815, - -1.5372]), size=(200000, 200000), nnz=399998, - layout=torch.sparse_csr) -tensor([0.1588, 0.0055, 0.7727, ..., 0.8522, 0.5649, 0.6738]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 399998 -Density: 9.99995e-06 -Time: 1.6573054790496826 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '6335', '-ss', '200000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 399999, "MATRIX_DENSITY": 9.999975e-06, "TIME_S": 10.42294692993164} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 5, ..., 399991, 399995, - 399999]), - col_indices=tensor([ 6449, 13437, 68699, ..., 173042, 178967, - 192775]), - values=tensor([ 1.5716, 0.2369, 0.7778, ..., 0.5457, 0.4701, - -0.8057]), size=(200000, 200000), nnz=399999, - layout=torch.sparse_csr) -tensor([0.7762, 0.0703, 0.2592, ..., 0.1464, 0.7439, 0.6172]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 399999 -Density: 9.999975e-06 -Time: 10.42294692993164 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 5, ..., 399991, 399995, - 399999]), - col_indices=tensor([ 6449, 13437, 68699, ..., 173042, 178967, - 192775]), - values=tensor([ 1.5716, 0.2369, 0.7778, ..., 0.5457, 0.4701, - -0.8057]), size=(200000, 200000), nnz=399999, - layout=torch.sparse_csr) -tensor([0.7762, 0.0703, 0.2592, ..., 0.1464, 0.7439, 0.6172]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 399999 -Density: 9.999975e-06 -Time: 10.42294692993164 seconds - -[39.04, 38.53, 39.14, 38.5, 38.68, 38.37, 38.33, 38.63, 38.3, 40.39] -[64.66] -10.483631610870361 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 6335, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 1e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [200000, 200000], 'MATRIX_ROWS': 200000, 'MATRIX_SIZE': 40000000000, 'MATRIX_NNZ': 399999, 'MATRIX_DENSITY': 9.999975e-06, 'TIME_S': 10.42294692993164, 'TIME_S_1KI': 1.6452954901233845, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 677.8716199588775, 'W': 64.66} -[39.04, 38.53, 39.14, 38.5, 38.68, 38.37, 38.33, 38.63, 38.3, 40.39, 39.02, 38.55, 38.31, 38.69, 38.65, 38.25, 38.41, 38.23, 38.67, 38.68] -694.8050000000001 -34.74025 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 6335, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 1e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [200000, 200000], 'MATRIX_ROWS': 200000, 'MATRIX_SIZE': 40000000000, 'MATRIX_NNZ': 399999, 'MATRIX_DENSITY': 9.999975e-06, 'TIME_S': 10.42294692993164, 'TIME_S_1KI': 1.6452954901233845, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 677.8716199588775, 'W': 64.66, 'J_1KI': 107.00420204560024, 'W_1KI': 10.20678768745067, 'W_D': 29.919749999999993, 'J_D': 313.66763688933844, 'W_D_1KI': 4.7229281767955795, 'J_D_1KI': 0.7455293096757032} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_200000_2e-05.json b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_200000_2e-05.json deleted file mode 100644 index 43dce29..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_200000_2e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 4694, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 799988, "MATRIX_DENSITY": 1.99997e-05, "TIME_S": 10.433407306671143, "TIME_S_1KI": 2.2227113989499663, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 685.4626195144654, "W": 65.56, "J_1KI": 146.02953121313706, "W_1KI": 13.966766084363018, "W_D": 30.37675, "J_D": 317.60412793374064, "W_D_1KI": 6.471399659139327, "J_D_1KI": 1.3786535277246117} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_200000_2e-05.output b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_200000_2e-05.output deleted file mode 100644 index 5684182..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_200000_2e-05.output +++ /dev/null @@ -1,71 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '200000', '-sd', '2e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 799991, "MATRIX_DENSITY": 1.9999775e-05, "TIME_S": 2.2366011142730713} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 5, ..., 799986, 799987, - 799991]), - col_indices=tensor([150067, 181412, 47629, ..., 70392, 74082, - 103785]), - values=tensor([ 1.4929, -0.1532, -0.9013, ..., -0.6923, 0.5828, - -0.1352]), size=(200000, 200000), nnz=799991, - layout=torch.sparse_csr) -tensor([0.5880, 0.2805, 0.9130, ..., 0.2024, 0.8848, 0.8005]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 799991 -Density: 1.9999775e-05 -Time: 2.2366011142730713 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '4694', '-ss', '200000', '-sd', '2e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 799988, "MATRIX_DENSITY": 1.99997e-05, "TIME_S": 10.433407306671143} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 8, 10, ..., 799981, 799982, - 799988]), - col_indices=tensor([ 26638, 34068, 51464, ..., 90655, 104981, - 178084]), - values=tensor([ 0.5771, -0.4861, -1.4112, ..., 1.0374, 0.9570, - 0.9346]), size=(200000, 200000), nnz=799988, - layout=torch.sparse_csr) -tensor([0.0015, 0.2894, 0.6814, ..., 0.9382, 0.9968, 0.5782]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 799988 -Density: 1.99997e-05 -Time: 10.433407306671143 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 8, 10, ..., 799981, 799982, - 799988]), - col_indices=tensor([ 26638, 34068, 51464, ..., 90655, 104981, - 178084]), - values=tensor([ 0.5771, -0.4861, -1.4112, ..., 1.0374, 0.9570, - 0.9346]), size=(200000, 200000), nnz=799988, - layout=torch.sparse_csr) -tensor([0.0015, 0.2894, 0.6814, ..., 0.9382, 0.9968, 0.5782]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 799988 -Density: 1.99997e-05 -Time: 10.433407306671143 seconds - -[39.12, 38.28, 38.59, 38.33, 38.43, 38.29, 39.08, 38.65, 39.89, 43.78] -[65.56] -10.455500602722168 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 4694, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 2e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [200000, 200000], 'MATRIX_ROWS': 200000, 'MATRIX_SIZE': 40000000000, 'MATRIX_NNZ': 799988, 'MATRIX_DENSITY': 1.99997e-05, 'TIME_S': 10.433407306671143, 'TIME_S_1KI': 2.2227113989499663, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 685.4626195144654, 'W': 65.56} -[39.12, 38.28, 38.59, 38.33, 38.43, 38.29, 39.08, 38.65, 39.89, 43.78, 39.0, 38.3, 38.84, 38.7, 38.69, 38.36, 43.77, 38.52, 38.75, 38.49] -703.665 -35.18325 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 4694, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 2e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [200000, 200000], 'MATRIX_ROWS': 200000, 'MATRIX_SIZE': 40000000000, 'MATRIX_NNZ': 799988, 'MATRIX_DENSITY': 1.99997e-05, 'TIME_S': 10.433407306671143, 'TIME_S_1KI': 2.2227113989499663, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 685.4626195144654, 'W': 65.56, 'J_1KI': 146.02953121313706, 'W_1KI': 13.966766084363018, 'W_D': 30.37675, 'J_D': 317.60412793374064, 'W_D_1KI': 6.471399659139327, 'J_D_1KI': 1.3786535277246117} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_200000_5e-05.json b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_200000_5e-05.json deleted file mode 100644 index 658fa56..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_200000_5e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 3293, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 1999955, "MATRIX_DENSITY": 4.9998875e-05, "TIME_S": 10.427314758300781, "TIME_S_1KI": 3.16650918867318, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 724.648955361843, "W": 69.33, "J_1KI": 220.05738091765656, "W_1KI": 21.053750379593076, "W_D": 34.4645, "J_D": 360.22881756913665, "W_D_1KI": 10.465988460370484, "J_D_1KI": 3.178253404303214} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_200000_5e-05.output b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_200000_5e-05.output deleted file mode 100644 index db547fa..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_200000_5e-05.output +++ /dev/null @@ -1,71 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '200000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 1999939, "MATRIX_DENSITY": 4.9998475e-05, "TIME_S": 3.1881461143493652} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 15, 21, ..., 1999920, - 1999927, 1999939]), - col_indices=tensor([ 21664, 28016, 30855, ..., 178410, 188321, - 197716]), - values=tensor([-0.8535, -1.1008, -0.1929, ..., -0.4377, -0.2253, - -1.5714]), size=(200000, 200000), nnz=1999939, - layout=torch.sparse_csr) -tensor([0.6684, 0.0214, 0.6544, ..., 0.8819, 0.1706, 0.8563]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 1999939 -Density: 4.9998475e-05 -Time: 3.1881461143493652 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '3293', '-ss', '200000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 1999955, "MATRIX_DENSITY": 4.9998875e-05, "TIME_S": 10.427314758300781} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 10, 18, ..., 1999938, - 1999949, 1999955]), - col_indices=tensor([ 20189, 20497, 53226, ..., 105399, 143618, - 172009]), - values=tensor([-0.5680, 0.4071, -0.7459, ..., 0.6726, 0.9697, - 0.1668]), size=(200000, 200000), nnz=1999955, - layout=torch.sparse_csr) -tensor([0.0111, 0.0503, 0.6606, ..., 0.3364, 0.9745, 0.1391]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 1999955 -Density: 4.9998875e-05 -Time: 10.427314758300781 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 10, 18, ..., 1999938, - 1999949, 1999955]), - col_indices=tensor([ 20189, 20497, 53226, ..., 105399, 143618, - 172009]), - values=tensor([-0.5680, 0.4071, -0.7459, ..., 0.6726, 0.9697, - 0.1668]), size=(200000, 200000), nnz=1999955, - layout=torch.sparse_csr) -tensor([0.0111, 0.0503, 0.6606, ..., 0.3364, 0.9745, 0.1391]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 1999955 -Density: 4.9998875e-05 -Time: 10.427314758300781 seconds - -[39.96, 38.8, 38.3, 38.76, 38.9, 38.39, 38.83, 38.3, 38.63, 38.25] -[69.33] -10.452170133590698 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 3293, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 5e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [200000, 200000], 'MATRIX_ROWS': 200000, 'MATRIX_SIZE': 40000000000, 'MATRIX_NNZ': 1999955, 'MATRIX_DENSITY': 4.9998875e-05, 'TIME_S': 10.427314758300781, 'TIME_S_1KI': 3.16650918867318, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 724.648955361843, 'W': 69.33} -[39.96, 38.8, 38.3, 38.76, 38.9, 38.39, 38.83, 38.3, 38.63, 38.25, 39.14, 38.68, 38.82, 39.03, 38.72, 38.25, 39.51, 38.98, 38.47, 38.53] -697.31 -34.8655 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 3293, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 5e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [200000, 200000], 'MATRIX_ROWS': 200000, 'MATRIX_SIZE': 40000000000, 'MATRIX_NNZ': 1999955, 'MATRIX_DENSITY': 4.9998875e-05, 'TIME_S': 10.427314758300781, 'TIME_S_1KI': 3.16650918867318, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 724.648955361843, 'W': 69.33, 'J_1KI': 220.05738091765656, 'W_1KI': 21.053750379593076, 'W_D': 34.4645, 'J_D': 360.22881756913665, 'W_D_1KI': 10.465988460370484, 'J_D_1KI': 3.178253404303214} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_200000_8e-05.json b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_200000_8e-05.json deleted file mode 100644 index 8f8c413..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_200000_8e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 2490, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 3199899, "MATRIX_DENSITY": 7.9997475e-05, "TIME_S": 10.601098775863647, "TIME_S_1KI": 4.257469387897046, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 795.9878641939164, "W": 76.09, "J_1KI": 319.67384104173345, "W_1KI": 30.55823293172691, "W_D": 41.107, "J_D": 430.02593157339095, "W_D_1KI": 16.50883534136546, "J_D_1KI": 6.630054353962033} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_200000_8e-05.output b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_200000_8e-05.output deleted file mode 100644 index 4e21bc0..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_200000_8e-05.output +++ /dev/null @@ -1,71 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '200000', '-sd', '8e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 3199878, "MATRIX_DENSITY": 7.999695e-05, "TIME_S": 4.216526031494141} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 11, 25, ..., 3199848, - 3199865, 3199878]), - col_indices=tensor([ 1190, 4142, 36852, ..., 152325, 165332, - 197913]), - values=tensor([-1.1974, -1.6535, -0.6800, ..., 0.1845, 0.8814, - 1.8310]), size=(200000, 200000), nnz=3199878, - layout=torch.sparse_csr) -tensor([0.6219, 0.5921, 0.3977, ..., 0.8913, 0.7959, 0.8662]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 3199878 -Density: 7.999695e-05 -Time: 4.216526031494141 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '2490', '-ss', '200000', '-sd', '8e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 3199899, "MATRIX_DENSITY": 7.9997475e-05, "TIME_S": 10.601098775863647} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 25, 41, ..., 3199869, - 3199882, 3199899]), - col_indices=tensor([ 1990, 4370, 9639, ..., 160214, 168732, - 178999]), - values=tensor([ 0.5183, -0.4982, -0.8462, ..., -0.9582, 1.1229, - -0.5337]), size=(200000, 200000), nnz=3199899, - layout=torch.sparse_csr) -tensor([0.7497, 0.2196, 0.7439, ..., 0.7006, 0.8017, 0.8731]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 3199899 -Density: 7.9997475e-05 -Time: 10.601098775863647 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 25, 41, ..., 3199869, - 3199882, 3199899]), - col_indices=tensor([ 1990, 4370, 9639, ..., 160214, 168732, - 178999]), - values=tensor([ 0.5183, -0.4982, -0.8462, ..., -0.9582, 1.1229, - -0.5337]), size=(200000, 200000), nnz=3199899, - layout=torch.sparse_csr) -tensor([0.7497, 0.2196, 0.7439, ..., 0.7006, 0.8017, 0.8731]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 3199899 -Density: 7.9997475e-05 -Time: 10.601098775863647 seconds - -[38.96, 38.63, 38.71, 43.66, 38.88, 38.56, 38.48, 38.23, 38.62, 38.27] -[76.09] -10.46113634109497 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 2490, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 8e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [200000, 200000], 'MATRIX_ROWS': 200000, 'MATRIX_SIZE': 40000000000, 'MATRIX_NNZ': 3199899, 'MATRIX_DENSITY': 7.9997475e-05, 'TIME_S': 10.601098775863647, 'TIME_S_1KI': 4.257469387897046, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 795.9878641939164, 'W': 76.09} -[38.96, 38.63, 38.71, 43.66, 38.88, 38.56, 38.48, 38.23, 38.62, 38.27, 39.83, 38.65, 38.85, 38.23, 38.32, 38.69, 38.39, 38.19, 38.76, 38.56] -699.6600000000001 -34.983000000000004 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 2490, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 8e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [200000, 200000], 'MATRIX_ROWS': 200000, 'MATRIX_SIZE': 40000000000, 'MATRIX_NNZ': 3199899, 'MATRIX_DENSITY': 7.9997475e-05, 'TIME_S': 10.601098775863647, 'TIME_S_1KI': 4.257469387897046, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 795.9878641939164, 'W': 76.09, 'J_1KI': 319.67384104173345, 'W_1KI': 30.55823293172691, 'W_D': 41.107, 'J_D': 430.02593157339095, 'W_D_1KI': 16.50883534136546, 'J_D_1KI': 6.630054353962033} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_20000_0.0001.json b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_20000_0.0001.json deleted file mode 100644 index 8e747f4..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_20000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 75516, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 39996, "MATRIX_DENSITY": 9.999e-05, "TIME_S": 10.605977296829224, "TIME_S_1KI": 0.1404467569366654, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 663.8375467443466, "W": 64.41, "J_1KI": 8.790687360881755, "W_1KI": 0.8529318290163673, "W_D": 29.4705, "J_D": 303.73582396101955, "W_D_1KI": 0.3902550452884157, "J_D_1KI": 0.005167845824572484} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_20000_0.0001.output b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_20000_0.0001.output deleted file mode 100644 index c19663c..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_20000_0.0001.output +++ /dev/null @@ -1,85 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '20000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 39999, "MATRIX_DENSITY": 9.99975e-05, "TIME_S": 0.14612913131713867} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 4, ..., 39997, 39997, 39999]), - col_indices=tensor([ 3326, 18555, 4228, ..., 17664, 6419, 7917]), - values=tensor([-1.0470, 0.5823, 1.1689, ..., -0.2173, 0.5375, - -0.3767]), size=(20000, 20000), nnz=39999, - layout=torch.sparse_csr) -tensor([0.0681, 0.4704, 0.0398, ..., 0.0951, 0.7480, 0.6593]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 39999 -Density: 9.99975e-05 -Time: 0.14612913131713867 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '71854', '-ss', '20000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 39997, "MATRIX_DENSITY": 9.99925e-05, "TIME_S": 9.99073576927185} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 4, ..., 39992, 39996, 39997]), - col_indices=tensor([ 552, 2441, 13958, ..., 14462, 15494, 895]), - values=tensor([-0.5868, -1.3676, 0.5364, ..., -1.1498, -2.3092, - 0.8161]), size=(20000, 20000), nnz=39997, - layout=torch.sparse_csr) -tensor([0.8473, 0.0908, 0.2782, ..., 0.6731, 0.4199, 0.6466]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 39997 -Density: 9.99925e-05 -Time: 9.99073576927185 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '75516', '-ss', '20000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 39996, "MATRIX_DENSITY": 9.999e-05, "TIME_S": 10.605977296829224} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 3, ..., 39990, 39992, 39996]), - col_indices=tensor([ 2547, 15863, 17237, ..., 16048, 17814, 18213]), - values=tensor([ 0.6167, -0.9603, -1.8975, ..., -1.4286, -2.6420, - -0.8475]), size=(20000, 20000), nnz=39996, - layout=torch.sparse_csr) -tensor([0.0665, 0.2382, 0.8503, ..., 0.0677, 0.4247, 0.7998]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 39996 -Density: 9.999e-05 -Time: 10.605977296829224 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 3, ..., 39990, 39992, 39996]), - col_indices=tensor([ 2547, 15863, 17237, ..., 16048, 17814, 18213]), - values=tensor([ 0.6167, -0.9603, -1.8975, ..., -1.4286, -2.6420, - -0.8475]), size=(20000, 20000), nnz=39996, - layout=torch.sparse_csr) -tensor([0.0665, 0.2382, 0.8503, ..., 0.0677, 0.4247, 0.7998]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 39996 -Density: 9.999e-05 -Time: 10.605977296829224 seconds - -[44.4, 38.48, 38.37, 38.8, 38.62, 38.64, 38.57, 38.61, 39.43, 38.56] -[64.41] -10.30643606185913 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 75516, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 0.0001, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [20000, 20000], 'MATRIX_ROWS': 20000, 'MATRIX_SIZE': 400000000, 'MATRIX_NNZ': 39996, 'MATRIX_DENSITY': 9.999e-05, 'TIME_S': 10.605977296829224, 'TIME_S_1KI': 0.1404467569366654, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 663.8375467443466, 'W': 64.41} -[44.4, 38.48, 38.37, 38.8, 38.62, 38.64, 38.57, 38.61, 39.43, 38.56, 39.45, 38.71, 38.43, 39.06, 38.84, 38.68, 38.41, 38.39, 38.33, 38.43] -698.79 -34.939499999999995 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 75516, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 0.0001, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [20000, 20000], 'MATRIX_ROWS': 20000, 'MATRIX_SIZE': 400000000, 'MATRIX_NNZ': 39996, 'MATRIX_DENSITY': 9.999e-05, 'TIME_S': 10.605977296829224, 'TIME_S_1KI': 0.1404467569366654, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 663.8375467443466, 'W': 64.41, 'J_1KI': 8.790687360881755, 'W_1KI': 0.8529318290163673, 'W_D': 29.4705, 'J_D': 303.73582396101955, 'W_D_1KI': 0.3902550452884157, 'J_D_1KI': 0.005167845824572484} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_20000_1e-05.json b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_20000_1e-05.json deleted file mode 100644 index 3c9d499..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_20000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 339949, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 4000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.276112794876099, "TIME_S_1KI": 0.030228395420713396, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 694.2471476650238, "W": 67.16, "J_1KI": 2.042209706941405, "W_1KI": 0.1975590456215491, "W_D": 24.180500000000002, "J_D": 249.95895107376577, "W_D_1KI": 0.0711297871151261, "J_D_1KI": 0.0002092366417172167} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_20000_1e-05.output b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_20000_1e-05.output deleted file mode 100644 index 278b7fb..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_20000_1e-05.output +++ /dev/null @@ -1,85 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '20000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 4000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.0411067008972168} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 4000, 4000, 4000]), - col_indices=tensor([ 5347, 5696, 11766, ..., 4248, 13946, 5573]), - values=tensor([-0.4980, -1.3505, -0.8938, ..., 0.5986, 0.0338, - 1.0931]), size=(20000, 20000), nnz=4000, - layout=torch.sparse_csr) -tensor([0.9429, 0.2794, 0.2982, ..., 0.4497, 0.8504, 0.9047]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 4000 -Density: 1e-05 -Time: 0.0411067008972168 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '255432', '-ss', '20000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 4000, "MATRIX_DENSITY": 1e-05, "TIME_S": 7.889511346817017} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 4000, 4000, 4000]), - col_indices=tensor([13641, 18974, 10301, ..., 1121, 3566, 13234]), - values=tensor([ 0.6988, 1.0871, -3.1801, ..., 2.5854, 0.7153, - 0.8838]), size=(20000, 20000), nnz=4000, - layout=torch.sparse_csr) -tensor([0.0618, 0.7127, 0.3136, ..., 0.2800, 0.2973, 0.5498]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 4000 -Density: 1e-05 -Time: 7.889511346817017 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '339949', '-ss', '20000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 4000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.276112794876099} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 4000, 4000, 4000]), - col_indices=tensor([ 1046, 3721, 2827, ..., 8777, 4353, 16975]), - values=tensor([-1.9029, 0.7368, -1.0539, ..., 1.1041, -0.4381, - 0.2593]), size=(20000, 20000), nnz=4000, - layout=torch.sparse_csr) -tensor([0.3506, 0.0236, 0.5489, ..., 0.0225, 0.5979, 0.0151]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 4000 -Density: 1e-05 -Time: 10.276112794876099 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 4000, 4000, 4000]), - col_indices=tensor([ 1046, 3721, 2827, ..., 8777, 4353, 16975]), - values=tensor([-1.9029, 0.7368, -1.0539, ..., 1.1041, -0.4381, - 0.2593]), size=(20000, 20000), nnz=4000, - layout=torch.sparse_csr) -tensor([0.3506, 0.0236, 0.5489, ..., 0.0225, 0.5979, 0.0151]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 4000 -Density: 1e-05 -Time: 10.276112794876099 seconds - -[39.88, 38.89, 39.23, 38.46, 38.61, 38.4, 38.36, 38.82, 38.6, 40.13] -[67.16] -10.337211847305298 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 339949, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 1e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [20000, 20000], 'MATRIX_ROWS': 20000, 'MATRIX_SIZE': 400000000, 'MATRIX_NNZ': 4000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.276112794876099, 'TIME_S_1KI': 0.030228395420713396, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 694.2471476650238, 'W': 67.16} -[39.88, 38.89, 39.23, 38.46, 38.61, 38.4, 38.36, 38.82, 38.6, 40.13, 50.42, 38.65, 38.77, 39.82, 63.51, 65.36, 67.78, 69.28, 68.99, 65.69] -859.5899999999999 -42.979499999999994 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 339949, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 1e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [20000, 20000], 'MATRIX_ROWS': 20000, 'MATRIX_SIZE': 400000000, 'MATRIX_NNZ': 4000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.276112794876099, 'TIME_S_1KI': 0.030228395420713396, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 694.2471476650238, 'W': 67.16, 'J_1KI': 2.042209706941405, 'W_1KI': 0.1975590456215491, 'W_D': 24.180500000000002, 'J_D': 249.95895107376577, 'W_D_1KI': 0.0711297871151261, 'J_D_1KI': 0.0002092366417172167} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_20000_2e-05.json b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_20000_2e-05.json deleted file mode 100644 index 411e200..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_20000_2e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 285548, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 7999, "MATRIX_DENSITY": 1.99975e-05, "TIME_S": 10.352515459060669, "TIME_S_1KI": 0.036254904461108704, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 675.8093022727966, "W": 65.57, "J_1KI": 2.36670998316499, "W_1KI": 0.22962864387073276, "W_D": 29.88799999999999, "J_D": 308.0461861572265, "W_D_1KI": 0.10466891730987432, "J_D_1KI": 0.0003665545453299421} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_20000_2e-05.output b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_20000_2e-05.output deleted file mode 100644 index 152508e..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_20000_2e-05.output +++ /dev/null @@ -1,85 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '20000', '-sd', '2e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 8000, "MATRIX_DENSITY": 2e-05, "TIME_S": 0.04795694351196289} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 8000, 8000, 8000]), - col_indices=tensor([ 1931, 15671, 551, ..., 10824, 602, 8365]), - values=tensor([-0.3071, -0.8361, 0.5279, ..., 0.1316, -0.7746, - 0.2897]), size=(20000, 20000), nnz=8000, - layout=torch.sparse_csr) -tensor([0.4457, 0.6923, 0.1579, ..., 0.1463, 0.6374, 0.7419]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 8000 -Density: 2e-05 -Time: 0.04795694351196289 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '218946', '-ss', '20000', '-sd', '2e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 8000, "MATRIX_DENSITY": 2e-05, "TIME_S": 8.050928354263306} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 7999, 8000, 8000]), - col_indices=tensor([ 250, 19583, 11201, ..., 12881, 11624, 15255]), - values=tensor([-0.1348, 0.3885, 0.0508, ..., -0.1074, -2.1160, - -0.8991]), size=(20000, 20000), nnz=8000, - layout=torch.sparse_csr) -tensor([0.9201, 0.0390, 0.9857, ..., 0.9043, 0.0958, 0.9692]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 8000 -Density: 2e-05 -Time: 8.050928354263306 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '285548', '-ss', '20000', '-sd', '2e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 7999, "MATRIX_DENSITY": 1.99975e-05, "TIME_S": 10.352515459060669} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 7997, 7999, 7999]), - col_indices=tensor([13310, 16575, 4046, ..., 16030, 5973, 13569]), - values=tensor([-1.2087, -0.7501, 0.7250, ..., 0.6657, 1.0228, - -1.2445]), size=(20000, 20000), nnz=7999, - layout=torch.sparse_csr) -tensor([0.1578, 0.4087, 0.7151, ..., 0.9314, 0.3311, 0.2040]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 7999 -Density: 1.99975e-05 -Time: 10.352515459060669 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 7997, 7999, 7999]), - col_indices=tensor([13310, 16575, 4046, ..., 16030, 5973, 13569]), - values=tensor([-1.2087, -0.7501, 0.7250, ..., 0.6657, 1.0228, - -1.2445]), size=(20000, 20000), nnz=7999, - layout=torch.sparse_csr) -tensor([0.1578, 0.4087, 0.7151, ..., 0.9314, 0.3311, 0.2040]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 7999 -Density: 1.99975e-05 -Time: 10.352515459060669 seconds - -[39.02, 38.64, 38.39, 38.55, 38.86, 54.26, 38.47, 38.79, 38.47, 38.32] -[65.57] -10.306684494018555 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 285548, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 2e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [20000, 20000], 'MATRIX_ROWS': 20000, 'MATRIX_SIZE': 400000000, 'MATRIX_NNZ': 7999, 'MATRIX_DENSITY': 1.99975e-05, 'TIME_S': 10.352515459060669, 'TIME_S_1KI': 0.036254904461108704, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 675.8093022727966, 'W': 65.57} -[39.02, 38.64, 38.39, 38.55, 38.86, 54.26, 38.47, 38.79, 38.47, 38.32, 40.89, 38.31, 39.01, 38.42, 38.75, 40.61, 38.36, 38.75, 38.61, 38.55] -713.64 -35.682 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 285548, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 2e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [20000, 20000], 'MATRIX_ROWS': 20000, 'MATRIX_SIZE': 400000000, 'MATRIX_NNZ': 7999, 'MATRIX_DENSITY': 1.99975e-05, 'TIME_S': 10.352515459060669, 'TIME_S_1KI': 0.036254904461108704, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 675.8093022727966, 'W': 65.57, 'J_1KI': 2.36670998316499, 'W_1KI': 0.22962864387073276, 'W_D': 29.88799999999999, 'J_D': 308.0461861572265, 'W_D_1KI': 0.10466891730987432, 'J_D_1KI': 0.0003665545453299421} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_20000_5e-05.json b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_20000_5e-05.json deleted file mode 100644 index cdf7147..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_20000_5e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 97592, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 20000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.165460586547852, "TIME_S_1KI": 0.10416284722669739, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 659.6195613670349, "W": 64.08, "J_1KI": 6.758951157544009, "W_1KI": 0.6566111976391507, "W_D": 29.14, "J_D": 299.95808393001556, "W_D_1KI": 0.29859004836462005, "J_D_1KI": 0.0030595750508711785} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_20000_5e-05.output b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_20000_5e-05.output deleted file mode 100644 index 71c2f2c..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_20000_5e-05.output +++ /dev/null @@ -1,65 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '20000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 20000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.1075906753540039} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 19998, 20000, 20000]), - col_indices=tensor([17710, 7536, 5172, ..., 5428, 6699, 9848]), - values=tensor([-0.8289, -1.4184, 1.0749, ..., -0.2236, 0.1687, - 1.2650]), size=(20000, 20000), nnz=20000, - layout=torch.sparse_csr) -tensor([0.0992, 0.9347, 0.7971, ..., 0.8604, 0.9782, 0.4441]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 20000 -Density: 5e-05 -Time: 0.1075906753540039 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '97592', '-ss', '20000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 20000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.165460586547852} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 19997, 19999, 20000]), - col_indices=tensor([8238, 5335, 5770, ..., 311, 3957, 400]), - values=tensor([-0.8346, -0.7266, -0.0442, ..., -0.0880, -0.5661, - 0.1738]), size=(20000, 20000), nnz=20000, - layout=torch.sparse_csr) -tensor([0.8819, 0.4818, 0.1709, ..., 0.7772, 0.9887, 0.3265]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 20000 -Density: 5e-05 -Time: 10.165460586547852 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 19997, 19999, 20000]), - col_indices=tensor([8238, 5335, 5770, ..., 311, 3957, 400]), - values=tensor([-0.8346, -0.7266, -0.0442, ..., -0.0880, -0.5661, - 0.1738]), size=(20000, 20000), nnz=20000, - layout=torch.sparse_csr) -tensor([0.8819, 0.4818, 0.1709, ..., 0.7772, 0.9887, 0.3265]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 20000 -Density: 5e-05 -Time: 10.165460586547852 seconds - -[38.87, 43.95, 38.95, 38.61, 38.35, 38.21, 38.32, 38.56, 38.25, 38.88] -[64.08] -10.293688535690308 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 97592, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 5e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [20000, 20000], 'MATRIX_ROWS': 20000, 'MATRIX_SIZE': 400000000, 'MATRIX_NNZ': 20000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.165460586547852, 'TIME_S_1KI': 0.10416284722669739, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 659.6195613670349, 'W': 64.08} -[38.87, 43.95, 38.95, 38.61, 38.35, 38.21, 38.32, 38.56, 38.25, 38.88, 38.93, 38.39, 38.67, 38.17, 38.71, 38.29, 38.27, 38.8, 38.68, 38.56] -698.8 -34.94 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 97592, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 5e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [20000, 20000], 'MATRIX_ROWS': 20000, 'MATRIX_SIZE': 400000000, 'MATRIX_NNZ': 20000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.165460586547852, 'TIME_S_1KI': 0.10416284722669739, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 659.6195613670349, 'W': 64.08, 'J_1KI': 6.758951157544009, 'W_1KI': 0.6566111976391507, 'W_D': 29.14, 'J_D': 299.95808393001556, 'W_D_1KI': 0.29859004836462005, 'J_D_1KI': 0.0030595750508711785} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_20000_8e-05.json b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_20000_8e-05.json deleted file mode 100644 index cb5f2f7..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_20000_8e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 81282, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 31999, "MATRIX_DENSITY": 7.99975e-05, "TIME_S": 10.148210287094116, "TIME_S_1KI": 0.12485187725565458, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 676.805806517601, "W": 64.5, "J_1KI": 8.326638204246954, "W_1KI": 0.7935336236805197, "W_D": 29.411749999999998, "J_D": 308.6208244937062, "W_D_1KI": 0.3618482566865972, "J_D_1KI": 0.004451763695364252} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_20000_8e-05.output b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_20000_8e-05.output deleted file mode 100644 index c4de927..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_20000_8e-05.output +++ /dev/null @@ -1,85 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '20000', '-sd', '8e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 31999, "MATRIX_DENSITY": 7.99975e-05, "TIME_S": 0.13827872276306152} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 3, ..., 31995, 31996, 31999]), - col_indices=tensor([ 8349, 13931, 16016, ..., 3726, 5541, 9337]), - values=tensor([ 0.7226, 1.2585, -0.5085, ..., 1.1966, -0.7230, - 1.5919]), size=(20000, 20000), nnz=31999, - layout=torch.sparse_csr) -tensor([0.0699, 0.2259, 0.1321, ..., 0.9218, 0.5118, 0.0418]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 31999 -Density: 7.99975e-05 -Time: 0.13827872276306152 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '75933', '-ss', '20000', '-sd', '8e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 31998, "MATRIX_DENSITY": 7.9995e-05, "TIME_S": 9.80895185470581} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 31997, 31998, 31998]), - col_indices=tensor([ 2304, 4849, 10859, ..., 15960, 19963, 4761]), - values=tensor([ 0.2492, -0.8893, -2.1312, ..., 0.8827, 1.1257, - 0.6256]), size=(20000, 20000), nnz=31998, - layout=torch.sparse_csr) -tensor([0.5188, 0.1332, 0.6896, ..., 0.0693, 0.0565, 0.0574]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 31998 -Density: 7.9995e-05 -Time: 9.80895185470581 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '81282', '-ss', '20000', '-sd', '8e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 31999, "MATRIX_DENSITY": 7.99975e-05, "TIME_S": 10.148210287094116} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 3, ..., 31995, 31996, 31999]), - col_indices=tensor([11703, 5310, 7783, ..., 268, 15130, 18328]), - values=tensor([ 1.6320, -1.8080, -0.8820, ..., -0.6415, -2.1078, - 0.9447]), size=(20000, 20000), nnz=31999, - layout=torch.sparse_csr) -tensor([0.2832, 0.3016, 0.8566, ..., 0.4433, 0.2436, 0.1599]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 31999 -Density: 7.99975e-05 -Time: 10.148210287094116 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 3, ..., 31995, 31996, 31999]), - col_indices=tensor([11703, 5310, 7783, ..., 268, 15130, 18328]), - values=tensor([ 1.6320, -1.8080, -0.8820, ..., -0.6415, -2.1078, - 0.9447]), size=(20000, 20000), nnz=31999, - layout=torch.sparse_csr) -tensor([0.2832, 0.3016, 0.8566, ..., 0.4433, 0.2436, 0.1599]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 31999 -Density: 7.99975e-05 -Time: 10.148210287094116 seconds - -[39.56, 38.34, 38.99, 38.37, 38.37, 38.33, 38.45, 38.75, 38.77, 42.23] -[64.5] -10.493113279342651 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 81282, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 8e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [20000, 20000], 'MATRIX_ROWS': 20000, 'MATRIX_SIZE': 400000000, 'MATRIX_NNZ': 31999, 'MATRIX_DENSITY': 7.99975e-05, 'TIME_S': 10.148210287094116, 'TIME_S_1KI': 0.12485187725565458, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 676.805806517601, 'W': 64.5} -[39.56, 38.34, 38.99, 38.37, 38.37, 38.33, 38.45, 38.75, 38.77, 42.23, 38.92, 38.7, 38.58, 38.17, 38.26, 38.26, 43.19, 40.34, 38.32, 38.44] -701.7650000000001 -35.08825 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 81282, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 8e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [20000, 20000], 'MATRIX_ROWS': 20000, 'MATRIX_SIZE': 400000000, 'MATRIX_NNZ': 31999, 'MATRIX_DENSITY': 7.99975e-05, 'TIME_S': 10.148210287094116, 'TIME_S_1KI': 0.12485187725565458, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 676.805806517601, 'W': 64.5, 'J_1KI': 8.326638204246954, 'W_1KI': 0.7935336236805197, 'W_D': 29.411749999999998, 'J_D': 308.6208244937062, 'W_D_1KI': 0.3618482566865972, 'J_D_1KI': 0.004451763695364252} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_50000_0.0001.json b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_50000_0.0001.json deleted file mode 100644 index 1594935..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_50000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 19706, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 249985, "MATRIX_DENSITY": 9.9994e-05, "TIME_S": 10.285418033599854, "TIME_S_1KI": 0.5219434706992719, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 673.8808571100235, "W": 65.5, "J_1KI": 34.19673485791249, "W_1KI": 3.323860753070131, "W_D": 30.33149999999999, "J_D": 312.0582781287431, "W_D_1KI": 1.5392012584999488, "J_D_1KI": 0.07810825426265852} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_50000_0.0001.output b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_50000_0.0001.output deleted file mode 100644 index 5b6d8f3..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_50000_0.0001.output +++ /dev/null @@ -1,68 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '50000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 249988, "MATRIX_DENSITY": 9.99952e-05, "TIME_S": 0.5328168869018555} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 10, ..., 249977, 249984, - 249988]), - col_indices=tensor([ 6276, 18097, 43338, ..., 28082, 42161, 47884]), - values=tensor([-1.5310, 1.3461, 0.0305, ..., 0.3362, 0.2100, - 0.9366]), size=(50000, 50000), nnz=249988, - layout=torch.sparse_csr) -tensor([0.7132, 0.2461, 0.0956, ..., 0.1247, 0.5632, 0.1025]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 249988 -Density: 9.99952e-05 -Time: 0.5328168869018555 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '19706', '-ss', '50000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 249985, "MATRIX_DENSITY": 9.9994e-05, "TIME_S": 10.285418033599854} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 9, ..., 249975, 249979, - 249985]), - col_indices=tensor([ 2112, 5845, 8001, ..., 35290, 44333, 46875]), - values=tensor([ 1.0354, 0.0467, 0.1747, ..., -1.3076, -0.3502, - -0.0613]), size=(50000, 50000), nnz=249985, - layout=torch.sparse_csr) -tensor([0.1799, 0.0246, 0.3504, ..., 0.8483, 0.2843, 0.7035]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 249985 -Density: 9.9994e-05 -Time: 10.285418033599854 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 9, ..., 249975, 249979, - 249985]), - col_indices=tensor([ 2112, 5845, 8001, ..., 35290, 44333, 46875]), - values=tensor([ 1.0354, 0.0467, 0.1747, ..., -1.3076, -0.3502, - -0.0613]), size=(50000, 50000), nnz=249985, - layout=torch.sparse_csr) -tensor([0.1799, 0.0246, 0.3504, ..., 0.8483, 0.2843, 0.7035]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 249985 -Density: 9.9994e-05 -Time: 10.285418033599854 seconds - -[39.42, 45.99, 38.94, 38.27, 38.64, 38.29, 38.37, 38.7, 38.39, 39.49] -[65.5] -10.288257360458374 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 19706, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 0.0001, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 249985, 'MATRIX_DENSITY': 9.9994e-05, 'TIME_S': 10.285418033599854, 'TIME_S_1KI': 0.5219434706992719, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 673.8808571100235, 'W': 65.5} -[39.42, 45.99, 38.94, 38.27, 38.64, 38.29, 38.37, 38.7, 38.39, 39.49, 39.4, 38.47, 38.37, 38.55, 38.92, 38.3, 38.84, 38.96, 38.76, 38.91] -703.3700000000001 -35.16850000000001 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 19706, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 0.0001, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 249985, 'MATRIX_DENSITY': 9.9994e-05, 'TIME_S': 10.285418033599854, 'TIME_S_1KI': 0.5219434706992719, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 673.8808571100235, 'W': 65.5, 'J_1KI': 34.19673485791249, 'W_1KI': 3.323860753070131, 'W_D': 30.33149999999999, 'J_D': 312.0582781287431, 'W_D_1KI': 1.5392012584999488, 'J_D_1KI': 0.07810825426265852} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_50000_1e-05.json b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_50000_1e-05.json deleted file mode 100644 index 135fb86..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_50000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 44050, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.054011583328247, "TIME_S_1KI": 0.2282408985999602, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 667.1566281318665, "W": 64.12, "J_1KI": 15.14543991218766, "W_1KI": 1.4556186152099888, "W_D": 29.378, "J_D": 305.67260482311247, "W_D_1KI": 0.6669239500567536, "J_D_1KI": 0.015140157776543784} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_50000_1e-05.output b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_50000_1e-05.output deleted file mode 100644 index 99f78d4..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_50000_1e-05.output +++ /dev/null @@ -1,65 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '50000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.23836493492126465} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 24998, 24998, 25000]), - col_indices=tensor([46125, 4742, 9904, ..., 25599, 29949, 38716]), - values=tensor([ 1.3695, 1.6391, -0.4051, ..., 1.6329, -1.1834, - 0.5969]), size=(50000, 50000), nnz=25000, - layout=torch.sparse_csr) -tensor([0.6342, 0.2900, 0.2358, ..., 0.4034, 0.6196, 0.0492]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 25000 -Density: 1e-05 -Time: 0.23836493492126465 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '44050', '-ss', '50000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.054011583328247} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 2, ..., 24999, 24999, 25000]), - col_indices=tensor([21698, 44610, 15464, ..., 28037, 35532, 2783]), - values=tensor([-0.1119, 2.1481, 0.0145, ..., -1.3653, -1.0049, - 0.1748]), size=(50000, 50000), nnz=25000, - layout=torch.sparse_csr) -tensor([0.4466, 0.9615, 0.6432, ..., 0.6668, 0.1662, 0.0530]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 25000 -Density: 1e-05 -Time: 10.054011583328247 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 2, ..., 24999, 24999, 25000]), - col_indices=tensor([21698, 44610, 15464, ..., 28037, 35532, 2783]), - values=tensor([-0.1119, 2.1481, 0.0145, ..., -1.3653, -1.0049, - 0.1748]), size=(50000, 50000), nnz=25000, - layout=torch.sparse_csr) -tensor([0.4466, 0.9615, 0.6432, ..., 0.6668, 0.1662, 0.0530]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 25000 -Density: 1e-05 -Time: 10.054011583328247 seconds - -[39.41, 38.43, 38.73, 38.26, 38.94, 38.75, 38.37, 38.26, 38.36, 38.25] -[64.12] -10.404813289642334 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 44050, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 1e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.054011583328247, 'TIME_S_1KI': 0.2282408985999602, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 667.1566281318665, 'W': 64.12} -[39.41, 38.43, 38.73, 38.26, 38.94, 38.75, 38.37, 38.26, 38.36, 38.25, 38.92, 39.13, 38.74, 38.35, 38.34, 38.26, 38.31, 38.44, 39.45, 38.86] -694.8400000000001 -34.742000000000004 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 44050, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 1e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.054011583328247, 'TIME_S_1KI': 0.2282408985999602, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 667.1566281318665, 'W': 64.12, 'J_1KI': 15.14543991218766, 'W_1KI': 1.4556186152099888, 'W_D': 29.378, 'J_D': 305.67260482311247, 'W_D_1KI': 0.6669239500567536, 'J_D_1KI': 0.015140157776543784} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_50000_2e-05.json b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_50000_2e-05.json deleted file mode 100644 index 5120a42..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_50000_2e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 31436, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 49999, "MATRIX_DENSITY": 1.99996e-05, "TIME_S": 10.264468908309937, "TIME_S_1KI": 0.32651956064098286, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 653.915125644207, "W": 63.85, "J_1KI": 20.80147364945308, "W_1KI": 2.0311108283496626, "W_D": 29.1025, "J_D": 298.0511345976591, "W_D_1KI": 0.9257698180430081, "J_D_1KI": 0.02944935163643619} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_50000_2e-05.output b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_50000_2e-05.output deleted file mode 100644 index 8068e2e..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_50000_2e-05.output +++ /dev/null @@ -1,65 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '50000', '-sd', '2e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 49999, "MATRIX_DENSITY": 1.99996e-05, "TIME_S": 0.33400678634643555} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 49994, 49997, 49999]), - col_indices=tensor([22258, 44811, 31827, ..., 38152, 10862, 23417]), - values=tensor([ 1.0033, 1.7002, 1.1808, ..., -2.4379, -0.5001, - 0.5794]), size=(50000, 50000), nnz=49999, - layout=torch.sparse_csr) -tensor([0.6113, 0.4763, 0.7825, ..., 0.8113, 0.0502, 0.7927]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 49999 -Density: 1.99996e-05 -Time: 0.33400678634643555 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '31436', '-ss', '50000', '-sd', '2e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 49999, "MATRIX_DENSITY": 1.99996e-05, "TIME_S": 10.264468908309937} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 3, ..., 49993, 49996, 49999]), - col_indices=tensor([11563, 44328, 12288, ..., 6450, 22217, 46108]), - values=tensor([ 0.9947, -1.4485, -0.9089, ..., -1.2359, 1.3525, - -2.1317]), size=(50000, 50000), nnz=49999, - layout=torch.sparse_csr) -tensor([0.5008, 0.6261, 0.6340, ..., 0.0882, 0.7441, 0.7817]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 49999 -Density: 1.99996e-05 -Time: 10.264468908309937 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 3, ..., 49993, 49996, 49999]), - col_indices=tensor([11563, 44328, 12288, ..., 6450, 22217, 46108]), - values=tensor([ 0.9947, -1.4485, -0.9089, ..., -1.2359, 1.3525, - -2.1317]), size=(50000, 50000), nnz=49999, - layout=torch.sparse_csr) -tensor([0.5008, 0.6261, 0.6340, ..., 0.0882, 0.7441, 0.7817]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 49999 -Density: 1.99996e-05 -Time: 10.264468908309937 seconds - -[39.17, 38.32, 38.45, 38.27, 38.42, 38.57, 38.96, 38.64, 38.39, 38.29] -[63.85] -10.241427183151245 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 31436, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 2e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 49999, 'MATRIX_DENSITY': 1.99996e-05, 'TIME_S': 10.264468908309937, 'TIME_S_1KI': 0.32651956064098286, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 653.915125644207, 'W': 63.85} -[39.17, 38.32, 38.45, 38.27, 38.42, 38.57, 38.96, 38.64, 38.39, 38.29, 39.07, 38.28, 39.02, 38.36, 38.78, 38.83, 39.05, 38.81, 38.36, 38.35] -694.95 -34.7475 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 31436, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 2e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 49999, 'MATRIX_DENSITY': 1.99996e-05, 'TIME_S': 10.264468908309937, 'TIME_S_1KI': 0.32651956064098286, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 653.915125644207, 'W': 63.85, 'J_1KI': 20.80147364945308, 'W_1KI': 2.0311108283496626, 'W_D': 29.1025, 'J_D': 298.0511345976591, 'W_D_1KI': 0.9257698180430081, 'J_D_1KI': 0.02944935163643619} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_50000_5e-05.json b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_50000_5e-05.json deleted file mode 100644 index 1579310..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_50000_5e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 23744, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 124997, "MATRIX_DENSITY": 4.99988e-05, "TIME_S": 10.221967697143555, "TIME_S_1KI": 0.43050739964384915, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 663.2418496704101, "W": 64.48, "J_1KI": 27.933029383019296, "W_1KI": 2.715633423180593, "W_D": 29.16225, "J_D": 299.96316114377976, "W_D_1KI": 1.2281944912398923, "J_D_1KI": 0.051726520015157186} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_50000_5e-05.output b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_50000_5e-05.output deleted file mode 100644 index 1a42eae..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_50000_5e-05.output +++ /dev/null @@ -1,68 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '50000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 124995, "MATRIX_DENSITY": 4.9998e-05, "TIME_S": 0.44220972061157227} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 4, ..., 124987, 124991, - 124995]), - col_indices=tensor([25991, 265, 12326, ..., 17738, 21897, 36250]), - values=tensor([-1.7726, 2.2930, -0.5068, ..., -0.8419, -1.0508, - 1.1507]), size=(50000, 50000), nnz=124995, - layout=torch.sparse_csr) -tensor([0.7247, 0.1934, 0.1552, ..., 0.2687, 0.5776, 0.7274]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 124995 -Density: 4.9998e-05 -Time: 0.44220972061157227 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '23744', '-ss', '50000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 124997, "MATRIX_DENSITY": 4.99988e-05, "TIME_S": 10.221967697143555} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 4, ..., 124994, 124996, - 124997]), - col_indices=tensor([ 3446, 16057, 18839, ..., 24419, 32624, 9367]), - values=tensor([-1.0215, -0.0710, 1.0183, ..., 1.0194, -2.7113, - -0.1037]), size=(50000, 50000), nnz=124997, - layout=torch.sparse_csr) -tensor([0.6795, 0.2671, 0.4716, ..., 0.3391, 0.3249, 0.2747]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 124997 -Density: 4.99988e-05 -Time: 10.221967697143555 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 4, ..., 124994, 124996, - 124997]), - col_indices=tensor([ 3446, 16057, 18839, ..., 24419, 32624, 9367]), - values=tensor([-1.0215, -0.0710, 1.0183, ..., 1.0194, -2.7113, - -0.1037]), size=(50000, 50000), nnz=124997, - layout=torch.sparse_csr) -tensor([0.6795, 0.2671, 0.4716, ..., 0.3391, 0.3249, 0.2747]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 124997 -Density: 4.99988e-05 -Time: 10.221967697143555 seconds - -[40.28, 38.29, 38.36, 38.74, 38.77, 38.66, 38.31, 41.27, 40.89, 38.71] -[64.48] -10.286008834838867 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 23744, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 5e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 124997, 'MATRIX_DENSITY': 4.99988e-05, 'TIME_S': 10.221967697143555, 'TIME_S_1KI': 0.43050739964384915, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 663.2418496704101, 'W': 64.48} -[40.28, 38.29, 38.36, 38.74, 38.77, 38.66, 38.31, 41.27, 40.89, 38.71, 39.48, 38.26, 38.73, 38.24, 38.85, 44.42, 38.59, 38.75, 38.87, 38.24] -706.355 -35.317750000000004 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 23744, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 5e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 124997, 'MATRIX_DENSITY': 4.99988e-05, 'TIME_S': 10.221967697143555, 'TIME_S_1KI': 0.43050739964384915, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 663.2418496704101, 'W': 64.48, 'J_1KI': 27.933029383019296, 'W_1KI': 2.715633423180593, 'W_D': 29.16225, 'J_D': 299.96316114377976, 'W_D_1KI': 1.2281944912398923, 'J_D_1KI': 0.051726520015157186} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_50000_8e-05.json b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_50000_8e-05.json deleted file mode 100644 index d9351a9..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_50000_8e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 19652, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 199995, "MATRIX_DENSITY": 7.9998e-05, "TIME_S": 10.312057971954346, "TIME_S_1KI": 0.52473325727429, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 673.4771731948853, "W": 65.12, "J_1KI": 34.27015943389402, "W_1KI": 3.3136576429879914, "W_D": 29.563250000000004, "J_D": 305.74591585463287, "W_D_1KI": 1.5043379808670876, "J_D_1KI": 0.0765488490162369} diff --git a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_50000_8e-05.output b/pytorch/output_1core_after_test/epyc_7313p_10_10_10_50000_8e-05.output deleted file mode 100644 index e83a549..0000000 --- a/pytorch/output_1core_after_test/epyc_7313p_10_10_10_50000_8e-05.output +++ /dev/null @@ -1,68 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '50000', '-sd', '8e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 199993, "MATRIX_DENSITY": 7.99972e-05, "TIME_S": 0.5342950820922852} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 3, ..., 199982, 199988, - 199993]), - col_indices=tensor([45309, 49059, 31228, ..., 7860, 9137, 42982]), - values=tensor([-0.6417, -1.5327, -0.2637, ..., -0.3270, 0.2349, - 0.3681]), size=(50000, 50000), nnz=199993, - layout=torch.sparse_csr) -tensor([0.0493, 0.7712, 0.5803, ..., 0.1824, 0.9798, 0.2503]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 199993 -Density: 7.99972e-05 -Time: 0.5342950820922852 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '19652', '-ss', '50000', '-sd', '8e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 199995, "MATRIX_DENSITY": 7.9998e-05, "TIME_S": 10.312057971954346} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 7, ..., 199987, 199992, - 199995]), - col_indices=tensor([ 5824, 15140, 35865, ..., 6476, 31010, 34332]), - values=tensor([-2.5680, 0.6813, 1.0320, ..., -0.0262, -1.7934, - 1.1305]), size=(50000, 50000), nnz=199995, - layout=torch.sparse_csr) -tensor([0.3409, 0.2199, 0.6691, ..., 0.0481, 0.1445, 0.8220]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 199995 -Density: 7.9998e-05 -Time: 10.312057971954346 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 7, ..., 199987, 199992, - 199995]), - col_indices=tensor([ 5824, 15140, 35865, ..., 6476, 31010, 34332]), - values=tensor([-2.5680, 0.6813, 1.0320, ..., -0.0262, -1.7934, - 1.1305]), size=(50000, 50000), nnz=199995, - layout=torch.sparse_csr) -tensor([0.3409, 0.2199, 0.6691, ..., 0.0481, 0.1445, 0.8220]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 199995 -Density: 7.9998e-05 -Time: 10.312057971954346 seconds - -[39.12, 39.14, 38.75, 38.92, 38.74, 38.88, 38.46, 39.32, 38.47, 38.27] -[65.12] -10.34209418296814 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 19652, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 8e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 199995, 'MATRIX_DENSITY': 7.9998e-05, 'TIME_S': 10.312057971954346, 'TIME_S_1KI': 0.52473325727429, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 673.4771731948853, 'W': 65.12} -[39.12, 39.14, 38.75, 38.92, 38.74, 38.88, 38.46, 39.32, 38.47, 38.27, 39.44, 38.59, 38.34, 38.31, 38.65, 38.23, 39.22, 52.79, 38.81, 38.2] -711.135 -35.55675 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 19652, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 8e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 199995, 'MATRIX_DENSITY': 7.9998e-05, 'TIME_S': 10.312057971954346, 'TIME_S_1KI': 0.52473325727429, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 673.4771731948853, 'W': 65.12, 'J_1KI': 34.27015943389402, 'W_1KI': 3.3136576429879914, 'W_D': 29.563250000000004, 'J_D': 305.74591585463287, 'W_D_1KI': 1.5043379808670876, 'J_D_1KI': 0.0765488490162369} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_100000_0.0001.json b/pytorch/output_1core_after_test/xeon_4216_10_10_10_100000_0.0001.json deleted file mode 100644 index 2c312c5..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_100000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 3877, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 999964, "MATRIX_DENSITY": 9.99964e-05, "TIME_S": 10.451881647109985, "TIME_S_1KI": 2.69586836396956, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 560.0268898200989, "W": 52.94, "J_1KI": 144.44851426879, "W_1KI": 13.654887799845241, "W_D": 35.50125, "J_D": 375.550710657835, "W_D_1KI": 9.15688676811968, "J_D_1KI": 2.3618485344647095} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_100000_0.0001.output b/pytorch/output_1core_after_test/xeon_4216_10_10_10_100000_0.0001.output deleted file mode 100644 index 4eda566..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_100000_0.0001.output +++ /dev/null @@ -1,68 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '100000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 999956, "MATRIX_DENSITY": 9.99956e-05, "TIME_S": 2.708162546157837} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 16, ..., 999934, 999943, - 999956]), - col_indices=tensor([13015, 16921, 17464, ..., 91986, 97484, 97707]), - values=tensor([-0.1219, -1.6514, 1.9178, ..., 2.0686, -0.4564, - 0.1849]), size=(100000, 100000), nnz=999956, - layout=torch.sparse_csr) -tensor([0.7331, 0.5213, 0.8542, ..., 0.6049, 0.8423, 0.0118]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 999956 -Density: 9.99956e-05 -Time: 2.708162546157837 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '3877', '-ss', '100000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 999964, "MATRIX_DENSITY": 9.99964e-05, "TIME_S": 10.451881647109985} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 11, 15, ..., 999950, 999958, - 999964]), - col_indices=tensor([ 2681, 9613, 15551, ..., 90557, 92681, 96918]), - values=tensor([-1.1994, -0.0343, 0.2381, ..., -0.0856, -0.2320, - -0.4300]), size=(100000, 100000), nnz=999964, - layout=torch.sparse_csr) -tensor([0.9548, 0.8008, 0.2023, ..., 0.2976, 0.8294, 0.8956]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 999964 -Density: 9.99964e-05 -Time: 10.451881647109985 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 11, 15, ..., 999950, 999958, - 999964]), - col_indices=tensor([ 2681, 9613, 15551, ..., 90557, 92681, 96918]), - values=tensor([-1.1994, -0.0343, 0.2381, ..., -0.0856, -0.2320, - -0.4300]), size=(100000, 100000), nnz=999964, - layout=torch.sparse_csr) -tensor([0.9548, 0.8008, 0.2023, ..., 0.2976, 0.8294, 0.8956]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 999964 -Density: 9.99964e-05 -Time: 10.451881647109985 seconds - -[19.05, 19.41, 19.13, 18.7, 18.72, 18.81, 18.72, 19.06, 22.88, 19.04] -[52.94] -10.578520774841309 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 3877, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 0.0001, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 999964, 'MATRIX_DENSITY': 9.99964e-05, 'TIME_S': 10.451881647109985, 'TIME_S_1KI': 2.69586836396956, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 560.0268898200989, 'W': 52.94} -[19.05, 19.41, 19.13, 18.7, 18.72, 18.81, 18.72, 19.06, 22.88, 19.04, 18.95, 18.85, 18.92, 18.48, 23.53, 19.32, 18.84, 18.97, 18.57, 18.69] -348.77500000000003 -17.438750000000002 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 3877, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 0.0001, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 999964, 'MATRIX_DENSITY': 9.99964e-05, 'TIME_S': 10.451881647109985, 'TIME_S_1KI': 2.69586836396956, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 560.0268898200989, 'W': 52.94, 'J_1KI': 144.44851426879, 'W_1KI': 13.654887799845241, 'W_D': 35.50125, 'J_D': 375.550710657835, 'W_D_1KI': 9.15688676811968, 'J_D_1KI': 2.3618485344647095} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_100000_1e-05.json b/pytorch/output_1core_after_test/xeon_4216_10_10_10_100000_1e-05.json deleted file mode 100644 index 6350cc0..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_100000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 10372, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.426263093948364, "TIME_S_1KI": 1.005231690507941, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 548.9229322242737, "W": 52.86000000000001, "J_1KI": 52.92353762285709, "W_1KI": 5.096413420748169, "W_D": 35.85775000000001, "J_D": 372.36362604928024, "W_D_1KI": 3.4571683378326274, "J_D_1KI": 0.3333174255527022} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_100000_1e-05.output b/pytorch/output_1core_after_test/xeon_4216_10_10_10_100000_1e-05.output deleted file mode 100644 index 95971ab..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_100000_1e-05.output +++ /dev/null @@ -1,68 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '100000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.0122957229614258} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 99998, 99999, - 100000]), - col_indices=tensor([85502, 81791, 3101, ..., 34598, 78026, 48521]), - values=tensor([ 0.2099, -1.8204, -0.1879, ..., 0.7437, -0.4536, - -0.1214]), size=(100000, 100000), nnz=100000, - layout=torch.sparse_csr) -tensor([0.3625, 0.6275, 0.0060, ..., 0.7338, 0.0056, 0.0046]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 100000 -Density: 1e-05 -Time: 1.0122957229614258 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '10372', '-ss', '100000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.426263093948364} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 99999, 99999, - 100000]), - col_indices=tensor([43958, 55839, 71974, ..., 137, 34540, 99185]), - values=tensor([-0.5703, 1.7597, -0.7264, ..., -0.5138, -1.2344, - 1.5616]), size=(100000, 100000), nnz=100000, - layout=torch.sparse_csr) -tensor([0.6166, 0.5956, 0.5547, ..., 0.0751, 0.2868, 0.7525]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 100000 -Density: 1e-05 -Time: 10.426263093948364 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 99999, 99999, - 100000]), - col_indices=tensor([43958, 55839, 71974, ..., 137, 34540, 99185]), - values=tensor([-0.5703, 1.7597, -0.7264, ..., -0.5138, -1.2344, - 1.5616]), size=(100000, 100000), nnz=100000, - layout=torch.sparse_csr) -tensor([0.6166, 0.5956, 0.5547, ..., 0.0751, 0.2868, 0.7525]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 100000 -Density: 1e-05 -Time: 10.426263093948364 seconds - -[19.1, 18.62, 19.37, 18.75, 18.76, 18.74, 19.04, 18.74, 19.6, 18.78] -[52.86] -10.384467124938965 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 10372, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 1e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.426263093948364, 'TIME_S_1KI': 1.005231690507941, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 548.9229322242737, 'W': 52.86000000000001} -[19.1, 18.62, 19.37, 18.75, 18.76, 18.74, 19.04, 18.74, 19.6, 18.78, 19.23, 18.74, 18.82, 18.67, 19.06, 18.77, 18.9, 18.58, 18.97, 18.72] -340.045 -17.00225 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 10372, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 1e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.426263093948364, 'TIME_S_1KI': 1.005231690507941, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 548.9229322242737, 'W': 52.86000000000001, 'J_1KI': 52.92353762285709, 'W_1KI': 5.096413420748169, 'W_D': 35.85775000000001, 'J_D': 372.36362604928024, 'W_D_1KI': 3.4571683378326274, 'J_D_1KI': 0.3333174255527022} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_100000_2e-05.json b/pytorch/output_1core_after_test/xeon_4216_10_10_10_100000_2e-05.json deleted file mode 100644 index 0359990..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_100000_2e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 8450, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 200000, "MATRIX_DENSITY": 2e-05, "TIME_S": 10.364590644836426, "TIME_S_1KI": 1.226578774536855, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 550.301865234375, "W": 52.879999999999995, "J_1KI": 65.12448109282543, "W_1KI": 6.257988165680473, "W_D": 35.89525, "J_D": 373.54809054565425, "W_D_1KI": 4.247958579881656, "J_D_1KI": 0.5027169917019711} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_100000_2e-05.output b/pytorch/output_1core_after_test/xeon_4216_10_10_10_100000_2e-05.output deleted file mode 100644 index cbb5d39..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_100000_2e-05.output +++ /dev/null @@ -1,67 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '100000', '-sd', '2e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 199996, "MATRIX_DENSITY": 1.99996e-05, "TIME_S": 1.2425594329833984} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 5, ..., 199991, 199993, - 199996]), - col_indices=tensor([21327, 22326, 24024, ..., 10430, 67006, 75980]), - values=tensor([0.0022, 0.0036, 0.5599, ..., 0.0166, 0.5758, 0.5521]), - size=(100000, 100000), nnz=199996, layout=torch.sparse_csr) -tensor([0.1273, 0.6151, 0.3012, ..., 0.6835, 0.8442, 0.5210]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 199996 -Density: 1.99996e-05 -Time: 1.2425594329833984 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '8450', '-ss', '100000', '-sd', '2e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 200000, "MATRIX_DENSITY": 2e-05, "TIME_S": 10.364590644836426} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 8, ..., 199998, 199999, - 200000]), - col_indices=tensor([34304, 39877, 83262, ..., 97629, 72074, 44635]), - values=tensor([-0.9985, -0.7898, -0.8616, ..., 0.7656, -1.7207, - -1.0837]), size=(100000, 100000), nnz=200000, - layout=torch.sparse_csr) -tensor([0.6657, 0.5567, 0.8768, ..., 0.9522, 0.0477, 0.7835]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 200000 -Density: 2e-05 -Time: 10.364590644836426 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 8, ..., 199998, 199999, - 200000]), - col_indices=tensor([34304, 39877, 83262, ..., 97629, 72074, 44635]), - values=tensor([-0.9985, -0.7898, -0.8616, ..., 0.7656, -1.7207, - -1.0837]), size=(100000, 100000), nnz=200000, - layout=torch.sparse_csr) -tensor([0.6657, 0.5567, 0.8768, ..., 0.9522, 0.0477, 0.7835]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 200000 -Density: 2e-05 -Time: 10.364590644836426 seconds - -[19.52, 18.52, 18.83, 18.88, 18.67, 18.91, 19.09, 18.76, 18.91, 18.54] -[52.88] -10.4066162109375 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 8450, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 2e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 200000, 'MATRIX_DENSITY': 2e-05, 'TIME_S': 10.364590644836426, 'TIME_S_1KI': 1.226578774536855, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 550.301865234375, 'W': 52.879999999999995} -[19.52, 18.52, 18.83, 18.88, 18.67, 18.91, 19.09, 18.76, 18.91, 18.54, 19.51, 18.66, 18.77, 18.69, 19.31, 18.67, 18.95, 18.76, 19.17, 18.72] -339.695 -16.98475 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 8450, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 2e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 200000, 'MATRIX_DENSITY': 2e-05, 'TIME_S': 10.364590644836426, 'TIME_S_1KI': 1.226578774536855, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 550.301865234375, 'W': 52.879999999999995, 'J_1KI': 65.12448109282543, 'W_1KI': 6.257988165680473, 'W_D': 35.89525, 'J_D': 373.54809054565425, 'W_D_1KI': 4.247958579881656, 'J_D_1KI': 0.5027169917019711} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_100000_5e-05.json b/pytorch/output_1core_after_test/xeon_4216_10_10_10_100000_5e-05.json deleted file mode 100644 index 5c236a8..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_100000_5e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 5943, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 499984, "MATRIX_DENSITY": 4.99984e-05, "TIME_S": 10.379753828048706, "TIME_S_1KI": 1.7465512078157002, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 556.8061262321472, "W": 53.480000000000004, "J_1KI": 93.69108635910268, "W_1KI": 8.99882214369847, "W_D": 36.261, "J_D": 377.5307954993248, "W_D_1KI": 6.101463907117617, "J_D_1KI": 1.0266639587948203} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_100000_5e-05.output b/pytorch/output_1core_after_test/xeon_4216_10_10_10_100000_5e-05.output deleted file mode 100644 index 393ed6d..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_100000_5e-05.output +++ /dev/null @@ -1,68 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '100000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 499987, "MATRIX_DENSITY": 4.99987e-05, "TIME_S": 1.7667756080627441} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 10, ..., 499982, 499983, - 499987]), - col_indices=tensor([14204, 16359, 24428, ..., 21942, 30461, 63005]), - values=tensor([-0.5086, -0.0823, 0.4466, ..., -0.7509, -0.5805, - 1.4534]), size=(100000, 100000), nnz=499987, - layout=torch.sparse_csr) -tensor([0.9985, 0.8548, 0.3137, ..., 0.5584, 0.7815, 0.8996]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 499987 -Density: 4.99987e-05 -Time: 1.7667756080627441 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '5943', '-ss', '100000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 499984, "MATRIX_DENSITY": 4.99984e-05, "TIME_S": 10.379753828048706} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 7, ..., 499977, 499979, - 499984]), - col_indices=tensor([23727, 54136, 3814, ..., 56227, 59419, 82737]), - values=tensor([ 0.1418, 0.9397, 0.4919, ..., -0.3857, 0.4976, - 0.4719]), size=(100000, 100000), nnz=499984, - layout=torch.sparse_csr) -tensor([0.8416, 0.3304, 0.9926, ..., 0.3044, 0.9208, 0.0883]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 499984 -Density: 4.99984e-05 -Time: 10.379753828048706 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 7, ..., 499977, 499979, - 499984]), - col_indices=tensor([23727, 54136, 3814, ..., 56227, 59419, 82737]), - values=tensor([ 0.1418, 0.9397, 0.4919, ..., -0.3857, 0.4976, - 0.4719]), size=(100000, 100000), nnz=499984, - layout=torch.sparse_csr) -tensor([0.8416, 0.3304, 0.9926, ..., 0.3044, 0.9208, 0.0883]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 499984 -Density: 4.99984e-05 -Time: 10.379753828048706 seconds - -[19.19, 18.93, 18.57, 19.18, 18.75, 18.54, 18.86, 18.7, 19.17, 18.83] -[53.48] -10.41148328781128 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 5943, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 5e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 499984, 'MATRIX_DENSITY': 4.99984e-05, 'TIME_S': 10.379753828048706, 'TIME_S_1KI': 1.7465512078157002, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 556.8061262321472, 'W': 53.480000000000004} -[19.19, 18.93, 18.57, 19.18, 18.75, 18.54, 18.86, 18.7, 19.17, 18.83, 19.42, 18.79, 18.85, 18.72, 18.69, 18.8, 22.92, 19.51, 18.98, 19.4] -344.38 -17.219 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 5943, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 5e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 499984, 'MATRIX_DENSITY': 4.99984e-05, 'TIME_S': 10.379753828048706, 'TIME_S_1KI': 1.7465512078157002, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 556.8061262321472, 'W': 53.480000000000004, 'J_1KI': 93.69108635910268, 'W_1KI': 8.99882214369847, 'W_D': 36.261, 'J_D': 377.5307954993248, 'W_D_1KI': 6.101463907117617, 'J_D_1KI': 1.0266639587948203} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_100000_8e-05.json b/pytorch/output_1core_after_test/xeon_4216_10_10_10_100000_8e-05.json deleted file mode 100644 index a33dac1..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_100000_8e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 4804, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 799966, "MATRIX_DENSITY": 7.99966e-05, "TIME_S": 10.40351128578186, "TIME_S_1KI": 2.165593523268497, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 566.4653639435768, "W": 53.989999999999995, "J_1KI": 117.91535469266793, "W_1KI": 11.238551207327227, "W_D": 37.04725, "J_D": 388.7013142129779, "W_D_1KI": 7.7117506244796, "J_D_1KI": 1.6052769826144047} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_100000_8e-05.output b/pytorch/output_1core_after_test/xeon_4216_10_10_10_100000_8e-05.output deleted file mode 100644 index 85520ab..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_100000_8e-05.output +++ /dev/null @@ -1,68 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '100000', '-sd', '8e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 799970, "MATRIX_DENSITY": 7.9997e-05, "TIME_S": 2.1852731704711914} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 8, 15, ..., 799956, 799966, - 799970]), - col_indices=tensor([15426, 20941, 27435, ..., 49005, 95676, 98444]), - values=tensor([-0.0220, 1.2945, -0.9038, ..., 0.6403, -0.1985, - -1.2325]), size=(100000, 100000), nnz=799970, - layout=torch.sparse_csr) -tensor([0.7301, 0.0780, 0.5115, ..., 0.1634, 0.3815, 0.8553]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 799970 -Density: 7.9997e-05 -Time: 2.1852731704711914 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '4804', '-ss', '100000', '-sd', '8e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 799966, "MATRIX_DENSITY": 7.99966e-05, "TIME_S": 10.40351128578186} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 12, ..., 799953, 799960, - 799966]), - col_indices=tensor([21142, 35701, 52722, ..., 47826, 80565, 89939]), - values=tensor([ 0.8608, 1.3187, 0.1580, ..., -1.9871, -0.1529, - -1.4031]), size=(100000, 100000), nnz=799966, - layout=torch.sparse_csr) -tensor([0.2812, 0.5718, 0.0227, ..., 0.9716, 0.0754, 0.7397]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 799966 -Density: 7.99966e-05 -Time: 10.40351128578186 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 12, ..., 799953, 799960, - 799966]), - col_indices=tensor([21142, 35701, 52722, ..., 47826, 80565, 89939]), - values=tensor([ 0.8608, 1.3187, 0.1580, ..., -1.9871, -0.1529, - -1.4031]), size=(100000, 100000), nnz=799966, - layout=torch.sparse_csr) -tensor([0.2812, 0.5718, 0.0227, ..., 0.9716, 0.0754, 0.7397]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 799966 -Density: 7.99966e-05 -Time: 10.40351128578186 seconds - -[19.24, 18.81, 18.63, 18.66, 18.82, 19.06, 18.76, 18.76, 18.94, 18.69] -[53.99] -10.492042303085327 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 4804, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 8e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 799966, 'MATRIX_DENSITY': 7.99966e-05, 'TIME_S': 10.40351128578186, 'TIME_S_1KI': 2.165593523268497, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 566.4653639435768, 'W': 53.989999999999995} -[19.24, 18.81, 18.63, 18.66, 18.82, 19.06, 18.76, 18.76, 18.94, 18.69, 19.29, 18.74, 18.68, 19.12, 18.94, 18.63, 18.89, 18.73, 18.79, 18.57] -338.85499999999996 -16.942749999999997 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 4804, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 8e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 799966, 'MATRIX_DENSITY': 7.99966e-05, 'TIME_S': 10.40351128578186, 'TIME_S_1KI': 2.165593523268497, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 566.4653639435768, 'W': 53.989999999999995, 'J_1KI': 117.91535469266793, 'W_1KI': 11.238551207327227, 'W_D': 37.04725, 'J_D': 388.7013142129779, 'W_D_1KI': 7.7117506244796, 'J_D_1KI': 1.6052769826144047} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_10000_0.0001.json b/pytorch/output_1core_after_test/xeon_4216_10_10_10_10000_0.0001.json deleted file mode 100644 index a4adf0e..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_10000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 124505, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 9999, "MATRIX_DENSITY": 9.999e-05, "TIME_S": 10.563384056091309, "TIME_S_1KI": 0.08484305093041491, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 555.5393806743622, "W": 52.26, "J_1KI": 4.46198450403086, "W_1KI": 0.41974217902895467, "W_D": 35.2205, "J_D": 374.40441555762294, "W_D_1KI": 0.28288422151720816, "J_D_1KI": 0.0022720711739866524} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_10000_0.0001.output b/pytorch/output_1core_after_test/xeon_4216_10_10_10_10000_0.0001.output deleted file mode 100644 index f97faa7..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_10000_0.0001.output +++ /dev/null @@ -1,85 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.09986138343811035} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 3, ..., 9997, 9999, 10000]), - col_indices=tensor([4098, 8764, 3624, ..., 4836, 6869, 2271]), - values=tensor([-1.0278, -1.1902, 0.0710, ..., 0.3414, -0.7104, - 1.2736]), size=(10000, 10000), nnz=10000, - layout=torch.sparse_csr) -tensor([0.8623, 0.9127, 0.1334, ..., 0.6167, 0.1364, 0.4617]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 10000 -Density: 0.0001 -Time: 0.09986138343811035 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '105145', '-ss', '10000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 9999, "MATRIX_DENSITY": 9.999e-05, "TIME_S": 8.867227554321289} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 3, ..., 9998, 9999, 9999]), - col_indices=tensor([4709, 1528, 5298, ..., 1558, 5239, 3828]), - values=tensor([-0.4053, -0.0790, -1.1659, ..., -0.1552, 0.3676, - 0.9244]), size=(10000, 10000), nnz=9999, - layout=torch.sparse_csr) -tensor([0.9932, 0.9332, 0.7893, ..., 0.6335, 0.0975, 0.8261]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 9999 -Density: 9.999e-05 -Time: 8.867227554321289 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '124505', '-ss', '10000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 9999, "MATRIX_DENSITY": 9.999e-05, "TIME_S": 10.563384056091309} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 5, ..., 9998, 9999, 9999]), - col_indices=tensor([1398, 6553, 7659, ..., 5210, 2382, 8067]), - values=tensor([-0.1143, -0.0775, -0.1679, ..., 0.7553, 0.6473, - -2.8190]), size=(10000, 10000), nnz=9999, - layout=torch.sparse_csr) -tensor([0.6740, 0.5040, 0.8526, ..., 0.4236, 0.6108, 0.7559]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 9999 -Density: 9.999e-05 -Time: 10.563384056091309 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 5, ..., 9998, 9999, 9999]), - col_indices=tensor([1398, 6553, 7659, ..., 5210, 2382, 8067]), - values=tensor([-0.1143, -0.0775, -0.1679, ..., 0.7553, 0.6473, - -2.8190]), size=(10000, 10000), nnz=9999, - layout=torch.sparse_csr) -tensor([0.6740, 0.5040, 0.8526, ..., 0.4236, 0.6108, 0.7559]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 9999 -Density: 9.999e-05 -Time: 10.563384056091309 seconds - -[19.16, 18.88, 18.89, 18.61, 18.84, 18.92, 18.74, 18.84, 18.99, 19.03] -[52.26] -10.630298137664795 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 124505, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 0.0001, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 9999, 'MATRIX_DENSITY': 9.999e-05, 'TIME_S': 10.563384056091309, 'TIME_S_1KI': 0.08484305093041491, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 555.5393806743622, 'W': 52.26} -[19.16, 18.88, 18.89, 18.61, 18.84, 18.92, 18.74, 18.84, 18.99, 19.03, 19.19, 18.59, 18.63, 19.2, 19.16, 19.66, 18.93, 19.02, 18.92, 18.56] -340.78999999999996 -17.039499999999997 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 124505, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 0.0001, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 9999, 'MATRIX_DENSITY': 9.999e-05, 'TIME_S': 10.563384056091309, 'TIME_S_1KI': 0.08484305093041491, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 555.5393806743622, 'W': 52.26, 'J_1KI': 4.46198450403086, 'W_1KI': 0.41974217902895467, 'W_D': 35.2205, 'J_D': 374.40441555762294, 'W_D_1KI': 0.28288422151720816, 'J_D_1KI': 0.0022720711739866524} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_10000_1e-05.json b/pytorch/output_1core_after_test/xeon_4216_10_10_10_10000_1e-05.json deleted file mode 100644 index 096eaa1..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_10000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 351951, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.568303346633911, "TIME_S_1KI": 0.030027769054879545, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 566.7232446336747, "W": 53.27, "J_1KI": 1.6102333695135818, "W_1KI": 0.15135629675721907, "W_D": 24.33175, "J_D": 258.8580497018099, "W_D_1KI": 0.06913391352773539, "J_D_1KI": 0.00019643050745056948} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_10000_1e-05.output b/pytorch/output_1core_after_test/xeon_4216_10_10_10_10000_1e-05.output deleted file mode 100644 index 9ec2f7d..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_10000_1e-05.output +++ /dev/null @@ -1,1521 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.045130252838134766} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([8943, 8857, 1800, 4476, 3141, 9329, 3780, 4010, 5616, - 9852, 1221, 1606, 2023, 363, 4138, 4042, 4268, 704, - 148, 7456, 4704, 8760, 141, 4131, 2806, 2711, 50, - 5731, 6653, 5900, 8209, 4729, 9220, 3406, 7445, 7817, - 4260, 2813, 1728, 8257, 4452, 884, 8591, 6517, 9823, - 1689, 7566, 944, 9115, 4514, 2862, 3702, 5122, 6808, - 3124, 9540, 7900, 4403, 5505, 6763, 4694, 887, 4374, - 7353, 4680, 7703, 5934, 9713, 6094, 3630, 479, 4964, - 956, 1841, 4607, 878, 8899, 7014, 7013, 3554, 5585, - 7147, 856, 2264, 3209, 1017, 3938, 8408, 7109, 1863, - 5752, 2038, 8619, 808, 9145, 5620, 4855, 3826, 6803, - 2249, 4498, 1936, 7705, 4485, 7389, 1969, 8505, 7346, - 8870, 2832, 7666, 3764, 1736, 5038, 4326, 4919, 4162, - 9424, 2075, 2989, 8547, 2192, 8638, 7405, 7072, 8685, - 8412, 2070, 7512, 8105, 5216, 6508, 9624, 6356, 4799, - 1440, 9278, 6343, 7424, 6140, 2430, 1815, 5600, 4776, - 9764, 6276, 2184, 1386, 5274, 7060, 8245, 8816, 2882, - 559, 1899, 2200, 3377, 2229, 7585, 1557, 1687, 6703, - 8690, 2043, 9320, 9612, 4480, 8821, 3622, 2906, 4438, - 7405, 3239, 4955, 2254, 737, 7045, 6176, 4808, 3135, - 4323, 7770, 9578, 8166, 820, 1307, 5731, 5018, 9871, - 7565, 5304, 7674, 3973, 568, 1999, 6181, 4594, 6632, - 9841, 3030, 1970, 460, 489, 3267, 6488, 2502, 1788, - 2332, 3274, 7021, 2340, 7976, 771, 1650, 7235, 8508, - 4344, 8426, 6104, 6388, 2615, 4549, 7375, 9756, 2631, - 3755, 4610, 7841, 6136, 2848, 8981, 3090, 3577, 2422, - 2911, 6314, 3214, 8388, 4536, 3166, 7675, 3974, 8229, - 1460, 1952, 9233, 9470, 7753, 2337, 6593, 7020, 302, - 9124, 8492, 8595, 6331, 1042, 8081, 2745, 489, 567, - 5861, 8292, 9896, 1181, 8098, 7696, 6658, 753, 3607, - 4449, 7622, 885, 3160, 5877, 1818, 8627, 7874, 1279, - 1387, 7428, 7463, 2244, 8816, 9236, 9288, 1635, 8175, - 5891, 389, 3936, 9510, 2254, 6648, 9804, 5805, 1503, - 1143, 1492, 3505, 4120, 137, 1565, 576, 4298, 9548, - 5756, 2247, 5375, 6936, 9135, 5626, 640, 8475, 6150, - 1758, 3637, 9154, 9749, 1785, 1251, 4037, 8724, 3533, - 6482, 6390, 3771, 4415, 5307, 1031, 2950, 1745, 6162, - 2012, 4665, 5389, 6960, 1689, 986, 344, 2186, 5920, - 7631, 1677, 7693, 3764, 8923, 4504, 5192, 175, 2310, - 780, 8541, 3917, 2608, 4089, 6953, 2847, 4642, 481, - 4468, 9026, 8736, 4768, 4876, 5059, 3964, 8040, 7107, - 8207, 7809, 3853, 6517, 2292, 890, 7452, 3596, 9503, - 9797, 7688, 9535, 4253, 9459, 439, 34, 9531, 2152, - 9463, 2216, 6456, 6411, 949, 6309, 7448, 475, 4278, - 7029, 9098, 68, 2762, 9845, 1785, 407, 2242, 259, - 4834, 5283, 8407, 4848, 8917, 7321, 8415, 9952, 5801, - 9825, 265, 4321, 8472, 2023, 3154, 4067, 7433, 7774, - 9984, 3615, 7493, 65, 1354, 9751, 569, 1755, 3419, - 9234, 6498, 1258, 7149, 1818, 1318, 7046, 3007, 3940, - 6721, 3991, 6094, 1037, 6013, 1042, 6782, 5961, 9628, - 5279, 3499, 999, 9818, 78, 3521, 9524, 5420, 192, - 4181, 6393, 4829, 2098, 3938, 4507, 9941, 7812, 9429, - 1485, 7292, 7301, 1104, 8955, 1493, 9345, 5180, 465, - 4843, 124, 6950, 1582, 693, 5270, 8649, 1576, 3280, - 974, 6124, 7207, 8117, 6137, 6256, 3050, 6230, 9791, - 9212, 4069, 4942, 1835, 2716, 7156, 6871, 842, 9294, - 3402, 5019, 841, 1763, 201, 3799, 2528, 7999, 1368, - 5649, 5686, 9472, 6201, 6645, 8364, 7393, 3503, 3879, - 8008, 3834, 3502, 6441, 7423, 2975, 4963, 6968, 5866, - 9192, 3756, 4123, 7472, 3759, 1193, 9745, 5431, 4399, - 9797, 3677, 3138, 3759, 8667, 7115, 746, 5160, 1726, - 6256, 3808, 8428, 3540, 3594, 256, 4119, 7604, 7310, - 219, 5164, 7410, 8433, 9322, 5311, 6758, 3800, 6170, - 3363, 3660, 9904, 1742, 1241, 7393, 4884, 9188, 9142, - 7053, 8685, 4965, 3974, 4298, 3299, 4625, 9013, 9655, - 2884, 3669, 5877, 3520, 5096, 1908, 3731, 4660, 3757, - 8915, 5832, 3680, 3091, 7880, 1439, 3868, 1915, 9407, - 6088, 2414, 2748, 7302, 301, 6149, 6147, 3543, 9918, - 8108, 2275, 4443, 8507, 302, 9069, 6026, 5990, 8106, - 6264, 5915, 2496, 7899, 7373, 2759, 8725, 2230, 9528, - 6229, 3646, 9085, 1209, 7087, 7309, 4669, 4870, 9673, - 2507, 2805, 6163, 1808, 6473, 5026, 6083, 7828, 112, - 6734, 4865, 6556, 8738, 84, 96, 8709, 2125, 3230, - 9477, 2647, 6788, 9778, 4742, 5531, 4841, 3483, 7198, - 1517, 4466, 6142, 5444, 8822, 9035, 9766, 3681, 5724, - 162, 5785, 4791, 3028, 4767, 2580, 7731, 7982, 3897, - 5774, 4711, 4838, 998, 9140, 1145, 6371, 3563, 27, - 9282, 8980, 7352, 8533, 2867, 7806, 6323, 5937, 6249, - 267, 9190, 2685, 3459, 156, 9682, 6546, 7368, 5338, - 3936, 19, 3949, 4702, 4937, 1708, 3354, 7281, 5865, - 3295, 4950, 6827, 3077, 899, 7085, 2996, 3767, 5864, - 4765, 6089, 4197, 7723, 3510, 3741, 5351, 1495, 4426, - 2620, 1782, 2606, 5040, 4128, 2608, 651, 8552, 3071, - 2139, 6832, 3583, 2758, 3889, 795, 4008, 800, 8099, - 9993, 3191, 8931, 9182, 4894, 1695, 102, 8973, 9684, - 559, 6856, 1388, 9331, 3363, 7476, 5415, 6381, 6051, - 4074, 4949, 3245, 5772, 7624, 9880, 9667, 5258, 1033, - 8326, 8221, 3577, 4539, 3152, 6737, 4810, 2146, 7076, - 3878, 4327, 868, 3981, 4384, 9005, 4690, 7511, 1657, - 3390, 5003, 6288, 3868, 2980, 2342, 8520, 7633, 9468, - 6095, 8010, 5671, 986, 6975, 8687, 336, 8522, 6928, - 6441, 4544, 8460, 774, 663, 7372, 1676, 9380, 5860, - 8097, 6605, 1050, 4653, 2252, 4876, 8284, 9823, 9259, - 2843, 1633, 155, 1667, 9874, 8854, 5922, 5188, 854, - 8943, 5486, 1177, 4904, 4156, 4456, 108, 6963, 1852, - 4251, 7843, 4316, 6513, 3493, 3135, 5268, 3625, 2195, - 9239, 4990, 804, 3240, 235, 725, 3268, 2255, 8864, - 8476, 1829, 5986, 945, 3990, 9122, 2534, 6503, 801, - 4546, 9606, 4000, 423, 1736, 9985, 1148, 2335, 7831, - 6418, 5795, 4772, 3085, 4567, 9424, 9199, 1602, 3787, - 2401, 134, 8406, 2295, 669, 5302, 4685, 2831, 9206, - 5953, 8642, 6783, 4364, 5623, 5922, 4400, 182, 6780, - 3164, 7962, 7425, 7450, 2210, 2644, 4595, 9000, 5484, - 8275, 6908, 1260, 7687, 7011, 850, 3542, 6494, 6570, - 9252, 2517, 9855, 4639, 8836, 4166, 7444, 2996, 5443, - 6683, 8792, 7565, 6568, 7854, 5982, 9574, 447, 4447, - 1152, 6553, 8135, 8350, 3279, 6520, 6126, 9016, 4545, - 9707, 549, 9260, 4295, 1739, 5248, 8488, 815, 4625, - 1538, 6205, 7404, 5094, 8968, 3581, 701, 5037, 9983, - 1509, 9664, 7190, 3024, 6043, 4996, 1289, 7652, 3860, - 7698, 2124, 4131, 2301, 9077, 9575, 7704, 7820, 6871, - 8104]), - values=tensor([-1.9760e-01, 6.4096e-01, 1.3643e+00, 2.3439e+00, - -2.8164e-01, -1.9023e+00, 2.2650e-01, 5.7673e-01, - -1.2980e-01, 6.4327e-01, -1.3703e+00, 1.8138e+00, - 1.4550e+00, -1.2559e+00, 6.4061e-01, -6.6084e-01, - 9.2728e-01, -3.8635e-01, -8.3858e-01, -1.1401e+00, - 8.5172e-01, -8.3878e-01, 6.3389e-01, -9.2436e-02, - -4.9040e-01, 1.0345e+00, -5.7681e-01, 5.3447e-01, - 1.2205e+00, 7.0777e-01, -5.3465e-01, 2.5229e-01, - -1.3485e-01, -1.5071e+00, 1.0322e+00, -3.5338e+00, - -4.9578e-01, -8.7236e-01, 2.3736e+00, 8.5941e-01, - 1.0080e+00, -4.4911e-01, -1.2279e-01, -8.9887e-01, - -1.0519e-01, 2.7657e+00, 2.1516e+00, 2.5527e-01, - 2.1098e-01, 1.6028e-01, 7.2117e-01, 2.5399e-01, - -6.1631e-01, -1.1625e+00, 3.7053e-01, 9.9026e-01, - -1.5712e+00, -7.8123e-01, 9.7665e-01, -4.3398e-01, - -3.8215e-01, 1.7915e+00, 7.2419e-01, -1.8492e-01, - 1.8231e+00, 2.5177e-01, -2.4143e-01, 1.0597e+00, - 2.9717e-01, 1.8160e+00, 1.6230e+00, 1.4485e+00, - 1.4020e-01, 2.5533e-01, -1.8627e-02, 4.8474e-01, - 1.0112e+00, 1.8916e-01, 8.2067e-01, -1.7579e+00, - -5.6177e-01, 1.0869e-01, 9.5218e-01, -8.4882e-01, - 3.8933e-01, -2.8384e-01, -8.5247e-01, 1.7983e+00, - 3.4672e-01, 1.4093e+00, -1.0102e-01, 9.1282e-01, - 1.4482e-01, 1.8045e-01, -5.8953e-02, 1.6210e+00, - -6.1824e-02, 5.6159e-01, -2.3113e-01, 5.9863e-01, - -5.7066e-01, 1.3256e+00, -4.5417e-01, 1.3636e-01, - 5.3985e-01, -1.4110e+00, 2.3628e+00, 7.4711e-01, - 3.9322e-01, -1.3513e+00, -8.3512e-01, 5.3044e-01, - 1.4441e+00, -6.4950e-01, 1.2576e+00, 8.9427e-01, - -1.5011e+00, -1.5454e+00, -1.2757e+00, -1.5568e+00, - 1.5590e+00, 2.0001e+00, -9.2973e-01, -3.5509e-01, - -4.0735e-02, 7.5340e-01, 7.7519e-01, 2.2388e-02, - 8.3283e-01, 1.5616e+00, 1.7198e+00, 9.7950e-01, - 1.1094e+00, 5.9947e-02, -9.6332e-01, -1.5984e-01, - 1.3780e+00, 1.2496e+00, 1.7239e+00, -8.3722e-01, - -1.2023e+00, 1.6423e+00, 3.1516e-01, -5.9656e-01, - -4.6487e-01, -3.2584e-01, 1.2273e+00, 3.3121e-01, - -4.2621e-01, -1.3773e+00, 1.0395e+00, -4.1636e-01, - 1.4167e-01, 9.9053e-01, -2.7978e-01, -7.3668e-01, - 1.6112e+00, 6.9281e-01, -9.6398e-01, -5.5458e-01, - -7.2825e-01, 5.7242e-02, 8.0884e-01, 3.0926e-01, - 3.3094e-01, 1.0491e+00, -2.2683e+00, 5.6971e-01, - 2.2712e-01, 9.6005e-01, -2.0918e+00, 1.3531e+00, - 1.5450e+00, -3.4949e-01, 9.6152e-01, -1.7996e-02, - 2.1087e+00, -4.7124e-01, -2.0099e+00, -2.3026e+00, - -2.1437e+00, -1.0999e+00, -8.7525e-01, -4.6286e-01, - -7.5740e-01, -7.5276e-02, -4.5197e-01, -1.2038e+00, - -1.1990e+00, 8.2983e-01, 9.1293e-01, 4.4981e-01, - 3.7107e-01, -4.7336e-01, -4.6136e-02, 1.2271e+00, - 7.3290e-01, 8.9206e-01, 8.5726e-01, -9.3704e-01, - 7.9485e-01, -1.3612e-01, 3.7689e-01, 2.0678e-01, - -1.6774e+00, -9.6500e-01, 1.4544e+00, 3.2751e-01, - 1.1872e-01, 1.7015e+00, -7.3316e-01, -3.1850e-01, - 1.7247e-01, -1.6327e-01, 1.9938e+00, -4.2732e-01, - -8.5448e-01, 1.6192e+00, 8.4211e-01, 4.9533e-01, - -2.7449e-01, -1.2750e+00, -2.5297e-02, 1.4379e+00, - -1.8980e+00, -1.2617e+00, 1.1595e+00, 5.9512e-01, - 1.2734e+00, -1.2362e+00, 6.9635e-01, 4.8155e-01, - 1.6620e+00, -2.9371e-01, 1.6065e+00, 4.5978e-01, - -6.0584e-01, -1.5273e-02, 7.4894e-01, 1.1045e+00, - -2.8325e-01, -4.6025e-01, -3.6721e-01, 1.6018e-01, - 3.2961e-01, -8.6346e-01, -2.7529e-01, -9.0048e-01, - 1.2783e-01, -1.6834e+00, -1.1307e+00, -7.8918e-01, - 7.1064e-01, 3.5494e-01, -7.4824e-02, -1.0514e+00, - 2.5575e-01, 2.1687e+00, 2.0772e-02, 2.5195e-01, - 2.1104e-01, -1.1604e+00, -6.1347e-01, -1.2151e-01, - -2.7716e+00, 4.0392e-01, -1.6238e-02, 1.6368e+00, - 1.5227e+00, 1.1349e+00, 1.6371e+00, -9.7254e-01, - 1.6971e+00, 1.9878e-01, -1.0561e+00, -4.8666e-01, - 9.1279e-01, -2.0977e-01, -1.8251e-01, -7.1013e-01, - 6.1196e-01, -5.0142e-01, -3.4800e-01, 2.5261e-01, - -3.8520e-02, 7.0301e-01, 1.5072e+00, -9.1373e-01, - -3.3263e-01, 1.0754e+00, 1.0836e+00, -1.0620e+00, - 5.5571e-01, 4.7527e-01, 4.1458e-01, 4.3709e-01, - 1.4032e+00, -7.2058e-01, -1.3565e-02, -1.6793e-01, - 1.3823e+00, -2.3561e-01, 1.2484e-01, -3.3855e-01, - -8.4662e-01, 1.8742e-01, -9.5541e-01, 6.2568e-01, - 1.0020e+00, 1.5835e+00, 5.3985e-01, -1.3309e-01, - 9.1621e-01, -1.6420e-01, -9.1680e-01, -1.1786e+00, - 2.9317e-01, -6.2077e-01, 1.0995e+00, -5.2638e-01, - -2.0707e-01, -1.1171e+00, 8.2191e-01, -2.1130e-01, - -2.0900e+00, -1.0844e+00, 1.0028e+00, 3.0449e-01, - -1.0873e-01, 1.0566e+00, 1.4327e-01, -3.9507e-01, - 5.3810e-01, 1.0913e+00, -3.2749e-01, 1.2102e+00, - 8.7418e-02, -2.4179e+00, 1.3275e-01, -1.6451e-02, - -3.5230e-01, 2.8527e-02, -2.5781e-01, -4.0711e-01, - 1.3159e+00, 3.4631e-02, -1.9383e+00, -2.9809e-01, - -1.1575e+00, -8.1022e-02, -2.3226e+00, 6.1064e-01, - -1.1310e-01, 1.5594e-01, -1.4807e+00, 9.1252e-01, - 3.9788e-01, -1.2681e+00, -2.0172e-02, 6.9961e-01, - 1.9790e+00, -3.3777e-01, -5.8520e-02, 2.9987e-01, - 4.1436e-01, 5.7513e-01, 7.8936e-01, 3.3517e-02, - 2.8771e-01, 6.4523e-01, -1.4805e+00, 1.3149e+00, - 2.6662e-01, 1.6328e+00, -1.8281e-01, -4.5781e-02, - 1.0231e-01, 9.1638e-01, 4.6743e-01, -1.1693e+00, - -2.1104e-01, -2.1476e-02, -8.1140e-01, 6.1757e-03, - -1.5202e+00, -1.2391e+00, -9.7496e-01, 2.8315e-01, - 1.2454e+00, 1.0689e+00, 1.4078e+00, -5.8334e-01, - -1.4823e+00, -3.8828e-01, -1.2230e+00, -4.7556e-01, - -3.0486e-01, -1.2881e-02, -1.1029e+00, 5.6410e-01, - -3.7604e-01, -5.3030e-01, 5.6084e-02, 1.1954e+00, - 1.6224e+00, 7.7618e-01, -1.4713e-01, 1.4128e+00, - -2.3494e-01, -6.3982e-01, -9.9847e-01, 1.9720e-01, - -5.8430e-02, -6.8479e-01, -1.7603e+00, 1.5197e+00, - -9.7919e-01, 1.0324e+00, -1.2903e+00, -2.5108e-01, - 1.6444e+00, -1.0087e+00, 3.0708e-01, 8.8106e-01, - 2.1274e+00, -3.8955e-01, -6.4138e-01, -2.0187e+00, - -5.6976e-01, 1.1965e+00, -8.1490e-01, -2.3041e-01, - 7.3205e-01, -4.8880e-01, 1.6163e+00, -1.6182e-01, - 1.0679e+00, 9.2740e-02, -4.1311e-01, -7.3057e-01, - -6.6746e-01, -7.8728e-01, 4.0541e-01, -1.1579e+00, - -9.3368e-01, -1.8808e-01, -1.7036e+00, 4.8618e-02, - 1.6189e+00, 3.2913e-02, 4.7682e-01, -2.2390e-02, - 8.2106e-02, 1.6162e+00, -1.4618e+00, -1.2356e+00, - -3.3254e-01, -2.9206e-01, -5.1747e-01, -6.1563e-01, - -1.3016e+00, -1.8300e+00, -4.0154e-02, 1.6115e+00, - 9.9130e-01, 7.0968e-01, 2.1449e+00, -8.7691e-01, - 3.2218e-01, 4.8296e-01, -1.2422e+00, -4.6265e-01, - 2.8571e-01, 5.9947e-01, 5.9701e-02, -5.3837e-01, - -9.1317e-01, 6.4379e-01, 8.0148e-01, 1.0196e-01, - 6.0857e-01, 1.7978e+00, 5.3638e-01, -1.1961e+00, - 3.5445e-01, -1.0886e+00, -1.1759e+00, -6.0373e-01, - -1.4530e+00, 7.9976e-02, -1.0131e+00, -1.7685e+00, - -2.6177e-01, -1.4082e+00, 2.8645e-01, -3.8211e-01, - 7.8952e-01, -5.0863e-01, 3.4115e-01, 8.1527e-01, - 1.0624e+00, 3.9113e-01, 1.7423e+00, 1.5253e+00, - 2.8509e-01, -7.2126e-01, 1.7533e+00, 8.8596e-01, - 7.4643e-01, 4.8277e-02, 6.0579e-01, -4.0529e-01, - 5.5835e-01, -3.2465e-01, -4.4891e-01, -6.0124e-01, - -4.4405e-02, 3.8620e-02, -2.0497e+00, -1.4665e+00, - -5.4733e-01, -1.4851e-01, -1.4475e-01, 9.0675e-01, - -6.7580e-01, 1.8157e-01, 4.3009e-01, 1.2594e+00, - 4.0838e-01, 7.3406e-01, 1.1625e+00, -1.3254e+00, - -3.5945e-01, 1.3647e-01, 2.4102e+00, -1.1129e+00, - 7.0409e-01, 9.1249e-01, 3.1718e-01, -1.6505e-01, - -1.3492e+00, 7.4113e-01, -1.7061e+00, -1.3822e+00, - -1.0191e+00, -3.1157e-01, 9.4060e-01, 8.3730e-01, - 2.3741e+00, 7.3592e-02, -3.2071e-01, -2.5468e-01, - -1.4361e+00, -2.2662e+00, 6.9334e-01, 4.0650e-01, - -1.0315e+00, 9.4583e-01, 1.5552e+00, -8.4255e-01, - 2.0710e+00, -4.8539e-01, 1.8221e+00, 4.6159e-01, - 2.8312e-01, 1.4807e+00, -6.7305e-01, 1.8391e-01, - 1.8566e+00, -7.5069e-01, 8.3420e-01, -3.7021e-02, - -1.5014e-01, -2.2106e-01, -2.7033e-01, 9.2046e-01, - 6.8317e-02, 9.7127e-01, 1.2836e+00, -1.5273e-01, - 1.8062e+00, 1.3095e+00, -1.1569e+00, 1.3732e-01, - 5.4085e-01, 8.5322e-01, -1.3817e-01, -7.7627e-01, - -1.5645e-01, 1.4149e+00, -2.2321e+00, 9.4387e-02, - -3.9437e-01, 2.8057e-01, -4.8920e-01, -1.0579e+00, - 4.4727e-02, -7.9011e-01, -1.5448e+00, 7.8059e-01, - 2.2551e-01, 2.3462e+00, 3.7282e-03, 9.2293e-01, - -5.1842e-01, -8.5185e-02, -4.1661e-01, -1.7805e-01, - -1.2578e+00, 1.4837e+00, 6.3735e-01, 3.1162e-01, - 8.7764e-01, 3.7087e-02, 1.2741e+00, 8.5389e-01, - 1.1519e+00, 6.9842e-01, 8.1200e-01, 1.8176e+00, - -1.0876e+00, -1.7972e-02, -1.1044e+00, 1.6295e+00, - -7.1473e-01, 1.1297e+00, -5.1901e-02, 1.4703e-01, - 1.1298e+00, -1.1363e+00, -2.8831e-01, 8.3240e-01, - -5.7399e-01, 1.4380e+00, -1.7126e-01, 4.5734e-01, - 1.1870e+00, 1.3241e+00, -1.6352e+00, 8.6067e-03, - -8.3560e-01, 2.7180e+00, -8.5533e-01, -1.0424e+00, - -9.8182e-01, -3.3826e-01, -6.0425e-01, 1.8530e+00, - -3.7288e-01, -7.3301e-01, -4.9151e-01, 1.5527e+00, - -2.0313e-01, 5.4065e-01, -9.3419e-01, 6.3592e-01, - -1.1908e+00, -1.6777e-01, -6.5962e-01, -4.9789e-01, - -1.4845e+00, -1.4626e+00, 6.9084e-01, -6.4463e-01, - -5.4738e-01, 1.0585e+00, 2.2511e-01, 7.5573e-01, - 1.7103e+00, 2.1262e+00, -1.2571e+00, -2.3801e-01, - 1.1417e+00, -3.6661e-01, 1.3808e-01, 5.4799e-03, - -6.7784e-01, 3.7412e-01, -7.3764e-01, 5.0620e-01, - -1.8664e-01, -4.2497e-02, -2.0571e+00, -7.1049e-01, - -6.2065e-01, 1.2764e-02, 3.7436e-01, 8.0082e-01, - 1.2101e+00, -4.7629e-01, -5.6124e-01, 1.8291e-01, - -5.7879e-01, -2.8331e-02, -4.3178e-01, -1.1636e+00, - 1.9769e-01, -1.0897e+00, 8.9953e-01, -5.2554e-01, - 1.2640e-01, 1.2881e-01, -2.6213e-01, 1.2322e+00, - 6.3053e-01, 1.2388e-01, -1.1613e+00, 1.4940e+00, - -5.7215e-01, -1.0776e-01, -8.6174e-01, 1.2595e+00, - 1.0592e-01, 1.2195e+00, 1.0346e+00, -8.0969e-01, - 9.4180e-01, 1.6698e+00, 1.7788e-01, 5.2594e-01, - 1.3726e+00, -1.1793e+00, 1.1066e+00, 1.4411e+00, - -1.3709e-01, 6.8235e-01, 1.0555e+00, -6.7591e-01, - 1.4269e-01, -7.3093e-01, -2.0690e-01, -5.8578e-01, - 1.0957e+00, 2.1122e+00, -8.1912e-01, 1.8230e+00, - 7.1116e-01, 2.3177e+00, 3.0643e-01, -1.1997e+00, - 1.9532e+00, 8.5848e-01, -1.9330e-01, -5.9033e-01, - 8.0870e-01, 9.2275e-01, -8.4001e-01, -5.7338e-01, - -1.6528e-01, -6.8793e-01, 4.0004e-01, -1.0942e+00, - -2.5925e+00, -5.5078e-01, -2.5878e+00, 9.9932e-01, - 1.4759e+00, 8.4004e-01, -8.7380e-01, 6.0633e-01, - 2.6259e+00, -2.1084e+00, 1.6126e+00, 2.4940e+00, - 5.9335e-01, 6.2431e-01, -2.4166e-01, -5.4610e-01, - 1.7907e+00, -1.0236e+00, 1.0485e+00, -1.8231e+00, - 4.0843e-01, -1.9159e+00, -4.5802e-01, -7.9590e-01, - -1.2345e+00, 7.2664e-01, 3.6250e-01, -4.0583e-02, - -1.1002e+00, 6.4379e-01, -3.8783e-01, 6.1304e-01, - 1.2415e+00, 1.9650e-01, -5.3664e-01, 7.6158e-01, - 1.2469e+00, 1.3650e+00, -4.7875e-01, 5.6018e-01, - -3.6852e-01, 7.3243e-02, 2.6126e-01, 1.2987e+00, - -3.0739e-01, 1.2487e+00, 1.0874e+00, 6.6263e-01, - 7.3308e-02, -6.5339e-01, 2.6516e+00, 7.7251e-01, - 1.2795e+00, -4.0839e-01, 9.4449e-01, -1.7179e-01, - -1.0801e-01, 4.9394e-02, 1.9292e+00, 4.8038e-01, - -5.2517e-01, 4.4143e-01, 6.7315e-01, 1.1055e+00, - 3.8821e-01, 4.8405e-02, -7.3226e-01, 3.2232e-01, - 2.0891e-01, 5.6748e-01, -1.5152e+00, 1.6077e+00, - 8.6761e-01, -7.0734e-01, -1.8723e-01, -2.0541e-01, - -1.5958e+00, -1.5522e+00, 5.8051e-01, 5.7165e-01, - -1.2166e+00, 1.0582e+00, -1.2930e+00, -8.9756e-02, - -9.1336e-01, 1.3936e-01, 3.2574e-01, -3.0765e-01, - -1.3701e-01, -4.9361e-01, -8.0259e-01, -1.3662e+00, - -6.4827e-01, -7.3622e-01, 8.4916e-01, -6.4938e-01, - 6.1925e-02, -3.2184e-01, 7.0946e-01, -5.6569e-01, - 8.3332e-02, -1.2243e+00, -4.5096e-02, -4.1677e-01, - 1.9349e+00, -1.1662e+00, -9.4504e-01, 5.7083e-01, - 5.0233e-02, 2.7505e-02, -3.4654e-01, -3.9123e-01, - 1.9029e-01, -1.3239e+00, 1.6214e+00, -1.9587e+00, - 4.9583e-01, -4.8207e-02, 2.1844e+00, 8.3668e-01, - 1.2388e+00, 1.3655e+00, -4.5208e-01, 4.0134e-01, - -1.7364e-01, -9.1503e-01, 1.7346e-01, -9.4430e-01, - 8.7302e-01, 8.6873e-01, 1.3281e+00, -8.4983e-01, - -4.5923e-01, 1.0647e+00, 3.7866e-01, -3.7418e-01, - -1.7489e-01, -2.7452e+00, -1.3706e+00, 2.0277e-01, - 1.0572e+00, -2.0664e-01, -3.6881e-01, -1.1923e-01, - -5.5120e-02, 1.3441e-01, -9.3099e-01, -1.0569e+00, - -1.3109e+00, 1.7577e+00, 2.9089e+00, -4.2934e+00, - -5.2852e-01, 1.0002e+00, 1.3676e-01, -2.4166e-01, - 1.4560e+00, -1.4940e+00, 8.1970e-01, -2.2231e+00, - -7.0830e-01, 5.9988e-01, -3.4693e-01, -2.0635e+00, - -1.5485e+00, 9.7066e-01, 3.0016e-02, 9.5624e-01, - 6.3356e-01, 1.7547e+00, 4.5024e-02, 1.1388e-01, - -5.0603e-01, -1.2314e+00, -1.5119e+00, 6.5196e-01, - -7.1418e-01, 5.7526e-01, -9.6615e-01, 2.6053e-01, - 5.9587e-01, -7.9295e-01, 1.0869e+00, -5.8065e-01, - 2.6226e-01, 6.7052e-01, 5.4349e-01, 1.2822e+00, - 2.3056e+00, 2.1974e-02, -7.2454e-01, 1.8346e+00, - -9.4998e-01, -7.2237e-02, 1.0567e+00, -7.3361e-01, - -4.4554e-01, 1.7385e+00, 1.4845e-01, 3.1316e-01, - 9.3846e-01, -1.1606e+00, 1.3682e+00, -2.5747e+00, - -1.1958e+00, -3.2288e-01, -9.0688e-01, 8.0598e-01, - -3.2489e-01, 7.0164e-01, 6.1922e-01, -1.4097e+00, - -7.2650e-02, -3.6262e-01, -2.7288e-01, 3.8641e-01, - -8.5457e-01, -6.7968e-01, -3.0292e+00, 1.3536e+00, - 3.1129e-01, -2.1322e-01, 2.9086e+00, 4.8068e-01, - 2.4557e-01, 1.4283e-01, -1.5053e+00, -7.6502e-01, - 7.3286e-01, 4.3034e-01, 9.5952e-01, 1.1775e+00, - 3.3505e-01, 1.1538e+00, -4.0668e-01, -1.7309e+00, - 1.0316e+00, -1.2439e+00, 3.1265e-02, -8.5406e-01, - 1.1281e+00, -6.6693e-01, 1.3558e+00, -1.6726e+00, - 2.0199e-01, 1.2513e-02, 7.0968e-01, -1.4085e-01]), - size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.8076, 0.4470, 0.6640, ..., 0.7535, 0.1126, 0.3506]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 1000 -Density: 1e-05 -Time: 0.045130252838134766 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '232659', '-ss', '10000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 6.941063165664673} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 999, 999, 1000]), - col_indices=tensor([5190, 1374, 2038, 7364, 5682, 1909, 2516, 6027, 9066, - 3052, 6031, 8065, 6138, 7737, 1599, 1541, 68, 7756, - 4774, 4184, 5614, 6132, 9395, 1964, 2754, 3392, 8387, - 1698, 6606, 6205, 7341, 1998, 2868, 6762, 4834, 2191, - 7815, 4312, 2554, 8744, 7560, 2026, 8805, 2281, 1530, - 372, 6965, 319, 9744, 8349, 1280, 9879, 4868, 7929, - 8060, 7753, 7804, 3456, 5314, 2965, 2301, 7748, 6979, - 9032, 7458, 5365, 7624, 6257, 5501, 4861, 6935, 4915, - 6514, 6859, 4475, 6453, 6819, 9086, 2763, 6350, 88, - 5356, 1269, 1308, 42, 6895, 2415, 7873, 3569, 9659, - 751, 325, 7647, 5256, 8373, 6694, 2551, 4476, 4497, - 2981, 8211, 4778, 8586, 9603, 8771, 4936, 347, 1556, - 7164, 3093, 7904, 6109, 2113, 879, 5418, 2259, 5710, - 6742, 5546, 6882, 3915, 2491, 3566, 4132, 3890, 1295, - 849, 5317, 3708, 7418, 7517, 6274, 6516, 9021, 3478, - 2772, 491, 1253, 8826, 5296, 8119, 119, 5326, 2279, - 4535, 4488, 9034, 1649, 5005, 6684, 7575, 9326, 5592, - 3193, 672, 671, 3577, 5494, 5274, 4091, 2005, 8686, - 5392, 2729, 5314, 5181, 2434, 6283, 8806, 3273, 7260, - 4882, 6392, 4558, 8041, 1705, 4277, 2954, 218, 3606, - 2338, 7030, 4058, 1746, 5661, 9982, 1430, 7055, 4983, - 6029, 9611, 7433, 7790, 7298, 4517, 795, 1559, 7369, - 6619, 393, 4138, 1148, 3202, 5221, 1423, 6340, 4893, - 8128, 9539, 6856, 4546, 6509, 7935, 687, 6492, 4339, - 7014, 1825, 2591, 7400, 4621, 5088, 5473, 7901, 7590, - 5106, 8926, 1120, 6861, 5081, 2263, 9669, 3266, 3658, - 5489, 8336, 9228, 9050, 2710, 959, 503, 9388, 8894, - 1125, 4132, 9055, 1339, 950, 9377, 3652, 8305, 4569, - 2140, 5050, 2394, 834, 3887, 1577, 9158, 4037, 6498, - 1765, 8357, 5270, 5346, 6960, 8934, 3424, 3057, 5564, - 3928, 6894, 9056, 1140, 7825, 7499, 2144, 6301, 6174, - 3778, 3759, 4101, 4572, 7384, 9536, 7805, 4297, 3370, - 4495, 5486, 6604, 3150, 9581, 5008, 6239, 8027, 2278, - 3096, 5923, 5986, 1731, 1046, 5876, 9172, 7424, 9749, - 6233, 7825, 8115, 8646, 3079, 9386, 9720, 5295, 8921, - 8772, 2793, 8062, 8806, 2168, 2489, 9391, 6601, 2690, - 2317, 9855, 1291, 587, 2921, 8169, 605, 9266, 6087, - 749, 2288, 6877, 2077, 6448, 9217, 9388, 7701, 7469, - 5109, 7589, 7190, 5336, 4592, 7276, 9969, 5869, 2136, - 148, 3039, 2333, 2199, 9759, 4525, 1740, 3847, 675, - 4333, 1792, 1684, 4707, 5642, 4706, 5175, 2176, 2048, - 5814, 1348, 1564, 585, 2533, 6008, 9171, 1069, 3428, - 7194, 297, 7160, 6259, 9416, 862, 4619, 9340, 8651, - 1227, 3697, 3943, 8276, 1295, 8504, 486, 1436, 9051, - 6387, 6702, 2437, 2463, 100, 3108, 4645, 5096, 6950, - 6702, 3423, 8352, 7371, 5571, 5323, 967, 8695, 352, - 8666, 9464, 1756, 1741, 5059, 1790, 1616, 1141, 3853, - 4851, 5461, 7072, 7615, 7386, 5452, 7460, 2144, 1142, - 3116, 9151, 5076, 8651, 8156, 4334, 4476, 9336, 6801, - 2569, 7418, 4473, 7158, 8661, 8781, 5682, 5071, 9322, - 2077, 7344, 2317, 4202, 2939, 2138, 419, 650, 161, - 8447, 2413, 4290, 93, 2006, 5354, 9160, 3419, 8196, - 3617, 5556, 9463, 2666, 9886, 2540, 1632, 8943, 2616, - 4221, 2826, 21, 1205, 2757, 5055, 4807, 2314, 9875, - 9065, 1604, 8148, 1235, 6956, 7492, 9194, 6365, 2575, - 2777, 7237, 6421, 9371, 6994, 4528, 5375, 7498, 8452, - 1818, 2982, 2882, 6751, 9553, 1727, 3250, 8348, 5205, - 7115, 7957, 4302, 989, 3969, 5855, 7465, 896, 6406, - 2829, 5406, 5406, 6124, 6756, 5512, 6495, 5246, 5801, - 5099, 6364, 6928, 2287, 7768, 8732, 5334, 5339, 3851, - 6480, 8762, 9316, 4802, 5394, 6864, 7568, 1041, 309, - 8689, 426, 1949, 4745, 6318, 5066, 8944, 2063, 1253, - 828, 2789, 79, 3368, 7584, 3122, 6099, 6323, 591, - 169, 9621, 6552, 871, 6792, 5970, 6249, 5495, 7951, - 478, 4865, 4997, 1273, 792, 2631, 4784, 7168, 6809, - 7440, 8631, 7587, 6166, 3994, 202, 6371, 7344, 6284, - 4930, 6855, 8043, 138, 7141, 2833, 2191, 9158, 9844, - 9803, 9641, 7304, 2477, 4026, 9034, 2384, 8496, 552, - 1793, 6758, 2071, 3406, 3107, 4389, 865, 1916, 9102, - 4725, 1504, 2866, 6811, 4775, 2462, 3523, 3052, 6620, - 2103, 7293, 7195, 5534, 6484, 2872, 261, 8485, 3507, - 4618, 2808, 6740, 3594, 4564, 2194, 2942, 5063, 7157, - 954, 204, 1145, 9550, 1345, 2385, 6397, 2686, 4473, - 4007, 3582, 4340, 2257, 6486, 3542, 7569, 7058, 867, - 168, 223, 3460, 7801, 3144, 8830, 4455, 6189, 7782, - 471, 6357, 1740, 6475, 8783, 5384, 5830, 3948, 197, - 5530, 5709, 5846, 9859, 7409, 87, 4631, 8807, 4308, - 4563, 8913, 1687, 4989, 1745, 4150, 8104, 2367, 482, - 9454, 6535, 7451, 8092, 2999, 3117, 461, 5329, 3407, - 7945, 285, 9466, 9314, 2106, 5051, 5274, 2181, 2416, - 6960, 7522, 8968, 9252, 9692, 1165, 7928, 5957, 5068, - 6812, 6092, 8476, 8463, 8374, 577, 275, 1342, 7300, - 3551, 3413, 665, 7296, 4890, 4406, 2250, 6573, 1036, - 6003, 9121, 2204, 457, 7832, 98, 4560, 1647, 3986, - 8639, 2318, 855, 7627, 4864, 7612, 5306, 2295, 1857, - 1494, 5441, 1535, 4653, 4575, 7763, 3809, 1255, 9645, - 1167, 3100, 6787, 1384, 1588, 8094, 6656, 9217, 1125, - 6546, 8429, 2950, 1151, 9959, 1125, 2866, 2541, 1010, - 7891, 8736, 4044, 5457, 4221, 9212, 3478, 8216, 5873, - 8544, 7550, 7946, 1192, 6703, 4668, 1139, 3686, 5232, - 8044, 583, 4894, 9570, 801, 9433, 8277, 7072, 436, - 2572, 4544, 4540, 5185, 4622, 6002, 7736, 8424, 162, - 2177, 2550, 6026, 1482, 1361, 6549, 5794, 6342, 7486, - 1696, 3140, 9177, 1767, 1150, 6108, 7892, 929, 1582, - 7309, 8021, 8137, 2229, 5718, 9411, 9280, 7261, 4021, - 574, 1837, 7544, 9845, 5132, 3736, 8269, 5172, 6896, - 4149, 5336, 1680, 8277, 6818, 3207, 4899, 6770, 8115, - 6285, 9242, 8070, 6964, 3620, 3694, 1188, 2057, 65, - 1145, 7404, 7344, 1990, 7473, 7397, 3017, 567, 7376, - 1464, 630, 9675, 9489, 7031, 7355, 8826, 9458, 6561, - 2107, 9717, 1505, 8747, 6840, 411, 3558, 4273, 4460, - 8213, 2493, 3948, 9037, 5533, 1891, 3404, 8392, 167, - 8493, 5443, 738, 9593, 5895, 6736, 6822, 8228, 9734, - 34, 4692, 9165, 9045, 9537, 5517, 9889, 7476, 1421, - 9810, 5515, 2913, 5417, 9051, 3229, 1975, 4287, 4827, - 704, 6374, 1442, 5230, 3370, 3925, 3308, 8948, 8990, - 6875, 6883, 4157, 792, 1095, 9265, 7439, 3319, 9560, - 7283, 1384, 7524, 5267, 1326, 4886, 3290, 9018, 5873, - 3728, 7592, 6537, 1150, 9990, 2026, 9913, 1784, 6323, - 9438, 8079, 1078, 1702, 2394, 7624, 7147, 2744, 764, - 9125, 2259, 1122, 2224, 3030, 3654, 1390, 2052, 5324, - 8946]), - values=tensor([ 4.6510e-01, -1.1851e+00, -7.8766e-01, 2.5625e+00, - -1.3056e+00, 1.3710e-01, -8.6824e-01, -2.1557e-01, - -1.6566e+00, 1.3935e+00, -2.4106e-01, 5.1370e-01, - 2.0386e+00, -9.6592e-01, -1.0779e+00, -3.0469e-01, - -4.8065e-01, 1.7751e+00, -1.3349e+00, -1.8457e+00, - 1.6793e+00, -7.9022e-01, 4.6276e-01, -4.2176e-01, - 2.1416e-01, -1.2179e+00, -1.7962e+00, -3.0778e-01, - 1.6842e-01, 7.3482e-02, -9.6866e-01, 1.5084e+00, - 1.1559e-01, -7.6094e-01, 1.1392e-01, 2.3571e+00, - -2.0428e+00, 1.7643e-01, 3.6252e-01, -5.2374e-01, - 1.8242e-02, -2.5628e-04, -2.4863e-01, -1.9469e+00, - -1.3525e-01, 7.6616e-01, -6.1827e-01, 1.4623e+00, - 4.0234e-01, -9.3102e-01, 5.5970e-01, 9.9390e-01, - -1.0789e+00, -9.2461e-01, 9.9863e-01, -1.9075e+00, - 4.2533e-01, -2.5632e-01, -5.1717e-01, -2.3034e+00, - 1.5035e+00, -3.4099e-01, 1.6393e+00, -1.4271e+00, - -1.3065e+00, 5.6064e-01, -6.6536e-01, 1.2163e+00, - -8.3082e-01, 7.3182e-01, -1.8915e+00, -1.4913e-01, - -1.5364e+00, -4.4707e-01, -7.1439e-01, 1.2311e-01, - -8.8605e-01, 6.8646e-01, -7.4860e-01, -1.6668e-01, - -4.1733e-01, 2.9552e-01, 6.2446e-02, -5.5017e-01, - 2.1276e-01, 1.3525e+00, 1.1381e+00, 2.5799e+00, - 8.4653e-01, -1.5049e+00, -1.2432e-01, -1.2748e+00, - 9.9541e-01, -1.7299e+00, 7.5813e-01, -3.3953e-01, - 5.7321e-01, 1.1939e+00, 1.3373e+00, -3.3937e-01, - -1.8734e-01, -5.5667e-01, 2.7102e-01, -2.8104e-01, - -9.4119e-01, 1.3152e+00, -6.0468e-01, 6.5856e-01, - -1.8993e-01, 1.2984e+00, 6.9415e-01, -1.6218e+00, - -1.5279e+00, -3.6317e-01, -3.5990e-02, -9.0025e-02, - -3.7306e-01, 1.3424e+00, -5.4473e-01, -1.7208e-01, - 3.9200e-01, 3.4948e-01, 1.4900e+00, -1.0014e+00, - 5.8705e-01, -1.3112e+00, 6.4468e-02, 8.9720e-01, - 3.4237e-01, 6.4027e-01, -3.0478e-01, 7.7653e-02, - -1.5911e+00, -9.3125e-01, -5.5052e-01, 1.6122e+00, - -1.0818e+00, -1.4921e+00, 1.6598e+00, 6.7445e-01, - 1.2376e+00, 4.9106e-01, 9.9617e-02, -6.4962e-01, - -1.5110e+00, -6.9969e-01, -1.6815e+00, -2.0031e+00, - 1.9607e+00, 1.2342e+00, 2.0612e-01, 2.3349e+00, - 2.8007e-01, 9.3527e-01, -5.8251e-02, 1.0193e-01, - -7.4400e-01, -1.5165e+00, 4.2463e-01, -2.6424e+00, - -9.6145e-01, 6.0624e-01, 3.0544e-01, -1.3511e+00, - 3.8611e-01, -1.9443e-01, 9.1952e-01, 4.6169e-01, - -4.9093e-01, -1.4309e-01, 1.7911e+00, 1.5542e+00, - 1.5857e+00, 1.1222e+00, -9.9359e-01, -8.3758e-01, - -2.3053e-01, -3.2939e-01, -2.6618e-01, -9.9266e-02, - -8.4101e-01, -3.5059e-01, -1.5422e-01, 9.7134e-01, - 6.6765e-02, 4.6862e-01, 1.3485e-01, 1.4273e+00, - -9.6728e-02, -3.3021e-01, 4.2498e-01, 9.1982e-02, - -1.4613e+00, -1.0817e+00, -2.1758e-01, -7.4627e-01, - -8.4526e-01, 4.6238e-01, 2.2618e+00, -2.0822e+00, - -4.2901e-01, 1.2017e+00, 1.3996e-02, 2.1383e+00, - -6.7047e-01, 6.3203e-02, -6.6501e-01, 1.0461e+00, - 3.4558e-01, -3.3683e-01, -3.2935e-01, 2.8500e-01, - -2.3508e+00, 2.1721e+00, -1.3352e+00, 5.5005e-01, - -2.1018e+00, 4.1358e-01, 1.9004e+00, 4.0393e-01, - 2.7356e-01, 3.2737e-01, 4.9669e-01, 2.0839e-01, - -3.1224e+00, 8.2476e-01, -2.9251e-01, -1.2889e-02, - 8.8645e-01, -3.4657e-02, 1.2931e+00, -3.8898e-01, - 1.3940e+00, -3.1794e-01, 9.7072e-01, -1.6855e+00, - 1.7872e-01, -8.6922e-01, 8.7606e-01, 2.0865e-01, - 4.8069e-01, 4.1133e-01, -1.0355e+00, 3.3931e-01, - -8.1359e-01, -5.4044e-01, 6.7072e-02, 2.5251e-01, - -2.9709e-02, -6.6077e-01, -4.6594e-03, -5.5289e-01, - 2.2312e-01, -3.4924e-01, 8.8982e-01, -9.8482e-02, - -3.1412e-01, -8.1787e-02, -1.2792e+00, 1.9663e+00, - 2.4078e+00, -5.4678e-01, -1.1004e+00, -2.3360e-01, - -1.6026e+00, -7.2600e-02, -3.1949e-01, -5.5524e-01, - 1.2097e+00, 1.0391e+00, 1.6608e-01, 8.7306e-01, - -2.9864e-01, -6.4541e-01, -1.2980e+00, 3.2152e-01, - -1.2146e+00, 1.9389e+00, 1.7949e+00, -9.5070e-01, - -5.2639e-02, 2.0406e+00, 5.9254e-01, 1.8495e+00, - -7.1500e-02, -1.3881e+00, 1.6326e+00, 6.1461e-01, - -2.8580e+00, -1.0870e+00, 2.3112e-01, -2.7500e+00, - 6.1071e-01, 3.1063e-01, -3.6584e-01, -1.4505e+00, - -2.1226e-01, -2.9592e-01, -1.0961e+00, -2.6386e-01, - -1.5796e+00, -1.8495e-01, 5.7606e-01, -1.0098e+00, - 1.7051e+00, -5.8284e-01, -2.4981e+00, -1.0028e+00, - 5.2956e-01, 8.4592e-01, 1.5253e-01, -2.6530e-01, - -4.2062e-02, 2.0331e+00, -3.5523e-01, -1.0064e+00, - -1.2726e+00, 3.1157e-01, 1.5859e+00, 5.9777e-01, - 1.3458e+00, 6.7957e-01, 1.6234e+00, 4.2879e-01, - -2.4038e-02, -2.0399e+00, 3.5459e-01, 1.0082e+00, - -9.0602e-02, -5.6061e-01, 8.4482e-01, 1.4504e+00, - 2.8413e-01, -9.9820e-01, 6.3463e-02, -4.2296e-01, - 1.3435e+00, 7.5189e-01, -4.2847e-01, 1.3946e-02, - -2.0318e+00, 1.3569e+00, 6.7385e-01, -5.8254e-01, - -6.4101e-01, 6.5693e-01, -1.3562e+00, -7.5293e-01, - 1.7744e+00, 2.7262e+00, 1.0672e+00, 1.4232e+00, - 1.2699e+00, 1.2202e+00, 1.7319e+00, 1.9947e+00, - -3.7200e-01, -1.5130e+00, -2.7145e-02, -4.1812e-02, - -4.5648e-01, -1.8564e+00, 1.5628e+00, -1.4863e+00, - 1.6788e-01, 2.7034e+00, 1.2454e+00, -4.8560e-01, - 4.7980e-01, -9.0261e-01, 6.3992e-01, 1.6418e+00, - 2.2604e-01, -4.5253e-01, 7.0416e-01, -4.4859e-01, - 1.6996e-01, -1.6427e+00, -5.4902e-01, -3.5843e-01, - -3.3485e-01, -1.5966e-01, 5.6852e-01, 1.0143e+00, - -4.0687e-01, -1.2097e-01, 9.1750e-01, 1.1129e+00, - 1.9108e+00, -7.7336e-01, 8.5297e-01, -1.0537e+00, - 2.1927e+00, 2.8277e+00, -2.8654e-01, -2.3710e-01, - 6.4486e-01, 2.9973e-01, 7.7804e-01, -4.8105e-01, - -7.1006e-01, 7.2931e-01, 1.0084e+00, -5.4372e-01, - 1.4271e-02, 1.5389e+00, -1.0072e-01, -1.1397e+00, - 3.5270e-01, 5.6743e-01, -3.8694e-03, -1.4513e+00, - -1.8761e+00, -6.2158e-01, -9.7086e-01, -1.4257e+00, - 1.2812e+00, -1.2515e+00, 1.2771e+00, 1.5960e+00, - -5.4477e-01, -6.7320e-01, 8.2737e-01, -6.9054e-04, - 5.2598e-01, 3.4109e-01, -4.3173e-02, 2.6663e-01, - -6.0262e-02, 5.7675e-01, -2.0535e+00, 1.0110e+00, - -6.4484e-01, 2.9912e-01, 3.0629e-01, -8.8359e-01, - 1.3729e+00, -1.4320e-02, 2.5526e-01, 5.3380e-01, - -2.3759e+00, -3.6657e-01, 1.3237e+00, -8.8799e-01, - -4.1736e-01, 5.5523e-01, -7.2616e-01, 5.3034e-02, - -1.2744e+00, -1.4502e+00, -1.2044e+00, 1.9259e+00, - 4.0487e-02, -1.6896e+00, 3.2004e-01, -1.8499e+00, - 1.2196e-01, 3.6399e-01, -1.4809e-01, -1.9344e-01, - 4.1822e-01, 2.5301e+00, -4.1782e-01, 5.4533e-01, - -2.0027e+00, -1.2544e+00, 5.6186e-01, -5.1698e-01, - -2.0740e+00, 2.8366e-01, 4.7901e-01, -3.0873e-01, - -1.2224e+00, 5.0038e-01, -2.4972e-01, 8.8917e-01, - -1.3118e-01, 4.9827e-01, 1.3804e-01, 2.3682e+00, - -5.8729e-01, 1.6821e+00, 1.7120e+00, -8.2441e-01, - -6.5006e-02, -5.0628e-01, -8.9307e-02, -1.1689e+00, - 1.3133e+00, 7.6207e-01, 4.4029e-01, -2.4626e-02, - 1.2594e+00, 5.5366e-01, -4.4957e-01, -8.8519e-01, - 5.3272e-01, -5.6950e-01, -1.1879e+00, -4.0753e-01, - -1.3605e+00, 1.4351e+00, 1.2678e+00, -3.7471e-01, - 1.2134e+00, 1.0563e+00, 2.7591e+00, 2.1069e-01, - -5.2524e-01, 1.0408e+00, 2.0875e+00, -8.5105e-01, - 7.1802e-01, -5.2618e-01, -2.8479e-01, 9.1976e-01, - 1.5595e+00, 8.5354e-01, 1.4565e+00, -7.8867e-02, - -1.1734e+00, 7.8866e-01, -1.8300e-01, 6.9874e-01, - 3.8721e-01, 3.9251e-01, 2.1914e-01, 1.2157e+00, - -8.4949e-01, -2.8779e+00, 2.7838e-01, 3.8143e-01, - -1.2546e+00, 2.3883e-01, 3.0101e-01, 4.1151e-01, - -8.3385e-01, 1.8968e-01, 5.5420e-01, -1.4531e+00, - 8.2937e-01, -5.9848e-01, -8.4489e-01, 3.9736e-01, - -6.2621e-01, 5.4022e-01, 1.1975e+00, -7.7338e-01, - 1.4631e-01, 2.6802e-01, -8.0721e-01, -1.0243e+00, - 1.6447e+00, -8.2279e-01, -9.2380e-01, -1.2093e+00, - -1.7817e+00, 1.8235e+00, 1.1365e+00, -7.3141e-01, - -2.0308e-01, -2.3958e-01, -4.2187e-01, 9.0008e-01, - -4.4552e-01, -2.6791e+00, -4.7896e-01, 5.7690e-01, - -3.1421e-01, 1.7375e+00, -3.3393e-01, 8.6627e-01, - 7.0231e-01, 9.7815e-01, 1.4766e+00, -1.3486e+00, - -1.0032e+00, -3.8465e-01, 2.3794e-02, -1.6844e-04, - 1.4177e+00, 1.7396e+00, 3.2822e-01, -1.4240e+00, - -1.3949e+00, 1.4693e+00, -4.1925e-01, -3.1590e-01, - 3.2256e-01, -1.0909e+00, -9.5775e-01, 8.1493e-01, - 3.7626e-01, 2.0354e+00, 2.4574e-01, -4.8121e-01, - -1.7903e+00, -1.4595e-01, -5.0273e-01, -8.1455e-01, - 1.8040e-01, -2.9982e-01, -3.9859e-01, -5.9056e-01, - -1.1520e+00, -2.3978e-01, -6.6977e-01, 8.9959e-01, - 4.6632e-02, 1.0803e+00, 1.4454e+00, 1.7208e+00, - 6.6509e-01, -1.7004e+00, 4.1450e-01, -2.1059e-01, - 4.2464e-01, 5.5136e-01, 2.0131e-01, -7.1460e-01, - -8.3729e-01, -1.0243e-02, 1.3632e+00, -1.7551e+00, - -1.3588e+00, 3.9636e-01, -1.2554e+00, 1.9948e+00, - -2.7400e-01, -3.0458e-01, 1.1301e+00, 5.3418e-01, - 2.6124e-01, -4.7098e-01, 4.5674e-01, -3.2176e-01, - -1.6362e+00, 6.3503e-01, 8.1287e-01, -3.6234e-02, - -4.6137e-01, 4.6736e-01, 5.7798e-01, -9.6505e-01, - 3.9737e-01, -1.5885e-01, 1.0166e+00, 5.5412e-01, - 1.5664e+00, 1.3366e+00, 6.6714e-01, -7.2879e-01, - -6.9770e-01, 1.0456e+00, -1.2405e+00, -1.3069e+00, - 1.5084e+00, 1.6906e+00, -1.7410e+00, 5.7193e-01, - 3.6425e-01, -1.1068e+00, -1.6687e+00, 9.7081e-01, - -1.2083e+00, -6.8171e-01, -1.3103e+00, -1.7611e+00, - -3.2198e-01, 3.3919e-01, 4.6307e-01, -2.3675e-01, - 1.8750e-03, -1.3305e+00, -9.7221e-01, 1.5825e+00, - -1.9664e-02, -1.1319e+00, -8.0552e-01, 2.2075e-03, - -1.2696e+00, 5.5452e-01, 5.5817e-01, 8.5820e-01, - 1.1077e+00, 3.8363e-01, 1.1283e+00, 3.0133e-01, - 9.5559e-01, 1.2601e+00, 1.0213e+00, -7.5643e-01, - 3.6932e-01, -1.0699e+00, 1.1802e+00, -9.9758e-01, - 1.6096e-01, -4.0341e-01, 2.4509e-01, -1.1656e+00, - -3.2101e-01, 4.9809e-01, 1.6830e+00, 3.2177e+00, - -5.5622e-01, -8.0158e-01, -1.7405e-01, -9.7176e-01, - 1.3623e+00, 1.4433e+00, 3.1411e-01, -3.6951e-01, - -5.7557e-01, -7.3684e-01, 1.7358e+00, 7.0020e-01, - 9.8040e-01, -4.2710e-01, -2.4180e+00, -1.0714e-01, - -2.2693e-01, -2.2323e+00, 1.2529e-01, -3.8214e-01, - -1.1311e+00, -2.4574e-01, 1.6119e-01, 2.0683e+00, - -2.0826e-02, 1.6209e+00, -7.0642e-01, -8.6061e-01, - -7.4345e-01, 2.4163e-01, -1.4367e+00, -1.8891e-01, - -5.7743e-01, 9.0990e-01, 4.5630e-01, -9.5048e-01, - 2.9866e-01, 8.5177e-01, -1.6545e-01, -4.3229e-01, - 1.1022e+00, 2.4279e+00, -3.3327e-01, 1.2134e+00, - 8.3405e-02, 2.8986e-01, 2.9581e-01, 3.6428e-01, - -9.1768e-02, -1.4431e-01, -2.4550e-01, 8.0447e-01, - -3.5499e-01, -1.6950e-01, 8.5095e-01, 3.2193e-01, - -2.1767e+00, -6.5858e-02, -1.9022e-01, 7.4427e-01, - 2.8915e-01, -1.2071e+00, -5.5746e-01, 6.0034e-01, - -4.8426e-01, -4.6946e-01, 2.3717e+00, 7.2015e-01, - -6.7589e-01, 1.1921e+00, 9.1697e-02, 1.8339e+00, - 5.9323e-01, 1.0148e+00, 5.4519e-01, -3.2006e-01, - 2.0007e+00, -3.9809e-01, -1.6017e-01, -8.8781e-02, - -8.8215e-01, 3.3085e-01, -1.0708e+00, -8.5103e-01, - -3.1377e-01, -2.8602e+00, 3.9455e-01, -4.4294e-02, - 1.3608e+00, 6.2185e-01, 2.1400e-01, 1.2079e-01, - -7.5215e-01, -5.3939e-01, 7.9616e-01, 2.0160e+00, - -8.0832e-01, 9.5000e-01, -3.2049e-01, -1.8067e+00, - -6.2976e-01, -4.6083e-01, -3.6318e-01, -4.0357e-01, - 3.4849e-01, -1.8474e-01, -1.0399e-01, -5.2158e-01, - -1.2544e+00, 1.0219e+00, 5.9551e-01, 1.4205e+00, - 4.8084e-01, -1.3859e+00, -6.3750e-01, -1.4468e+00, - 6.6115e-01, 1.6369e+00, 1.1775e+00, -1.7773e-01, - 4.1258e-01, 3.6533e-02, -2.0452e-01, -8.9974e-01, - 3.8512e-01, 1.4013e+00, 5.9964e-01, 1.6013e+00, - -6.6337e-01, -6.6180e-01, 4.6830e-01, -6.1817e-01, - 1.2435e-01, -3.5529e-01, -1.4814e+00, 6.0047e-01, - 1.1348e+00, 8.4244e-01, -7.4512e-01, -5.0537e-01, - 7.2761e-01, 1.8805e-01, -5.8428e-01, 2.2769e+00, - 2.2407e-01, 1.0591e+00, -1.0339e+00, -5.2887e-01, - 7.7017e-01, 1.9396e-01, -7.6432e-01, 1.2796e-01, - 1.1807e+00, -1.2203e+00, -1.8810e+00, -1.5632e-01, - -5.4480e-01, -4.0401e-01, 1.5412e+00, 1.5190e+00, - -3.2628e+00, 4.2086e-01, 6.4145e-02, 1.5052e+00, - 6.5965e-01, -1.2032e+00, -2.3329e+00, -8.5109e-01, - 7.7122e-01, -2.2139e-01, 1.8133e+00, 1.6688e+00, - -5.1603e-01, 1.4417e+00, 2.3846e-01, -1.6655e+00, - -1.4235e+00, -8.7032e-01, 1.9825e+00, 8.9877e-01, - -1.1745e+00, -3.2422e-01, -1.7078e+00, 1.4855e+00, - -7.4355e-01, -1.3620e-01, 1.4825e-01, -1.2406e-01, - 1.2080e+00, -9.2723e-01, -1.4178e+00, -3.3201e-01, - 1.9063e+00, 1.5561e+00, 9.0969e-01, 5.0557e-01, - 3.2097e-01, 8.7150e-02, -1.0035e+00, -3.4907e-01, - 4.9610e-01, -9.4817e-01, 1.0936e-01, 4.5905e-01, - 2.7621e-01, -6.4801e-01, 7.6763e-02, -8.4970e-01, - 2.8799e+00, 1.2696e+00, 7.3653e-01, 8.5562e-01, - 3.5026e-01, 5.6897e-01, -1.0671e+00, 4.2879e-01, - 1.1348e+00, -4.7680e-01, -5.6290e-01, 3.0131e-01, - -1.0602e+00, 2.1315e+00, -6.9024e-01, 4.2030e-01, - -7.7252e-01, 4.1459e-01, -2.0817e-01, -2.0772e+00, - -1.1131e+00, -3.4672e-02, 1.8619e-01, 4.1222e-01, - -3.1646e-01, -1.7544e-02, 1.9845e+00, 7.6486e-01, - 1.7350e+00, -5.2143e-01, -2.6930e-01, -1.7636e+00, - -1.1702e+00, -6.5986e-01, 1.7690e-01, 1.9004e-01, - -1.6739e-01, 2.0325e+00, -1.3582e+00, 3.6466e-01, - -1.1404e-01, 1.3273e+00, 8.9265e-01, 2.3433e-01, - -8.3326e-01, -8.0115e-01, -3.0232e-01, -1.4472e+00, - -9.5061e-01, 1.4773e+00, 6.1270e-01, -3.1029e-01, - 1.1950e-01, -8.7306e-01, 3.0622e-02, 1.1238e+00, - 4.1020e-01, -2.4690e-01, -2.2241e-01, -3.7523e-01, - -4.4591e-01, -2.1795e+00, -2.2944e-01, 1.1993e+00, - -6.4699e-01, -4.0099e-01, 7.1939e-01, 1.8280e+00, - -4.7294e-02, -7.0904e-01, 1.5356e-01, 4.5862e-01, - 9.3818e-01, 7.3160e-01, -2.3434e-02, 1.6996e-01, - 1.8246e+00, 1.3146e+00, 1.0100e+00, 1.8521e-01, - -1.3975e+00, 1.7305e+00, 3.3503e-01, -5.7823e-01, - 7.2256e-01, -5.1645e-01, -7.0953e-01, -8.8243e-01]), - size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.1758, 0.8311, 0.4311, ..., 0.7769, 0.5609, 0.5839]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 1000 -Density: 1e-05 -Time: 6.941063165664673 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '351951', '-ss', '10000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.568303346633911} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 1000, 1000, 1000]), - col_indices=tensor([7502, 9380, 4152, 9206, 7602, 8392, 6141, 7366, 4275, - 9634, 1887, 4677, 6803, 5316, 6347, 4951, 8273, 7658, - 729, 1836, 5732, 5438, 1862, 1714, 3878, 4804, 5327, - 1978, 6926, 2420, 8871, 4501, 881, 3976, 1455, 7862, - 5912, 9592, 1859, 4138, 3057, 9408, 5497, 9859, 2947, - 9714, 4498, 2523, 8297, 7540, 8435, 1356, 3458, 1989, - 8226, 6550, 610, 2056, 3239, 4293, 5461, 1918, 8747, - 5638, 6141, 7643, 6245, 2220, 4169, 358, 8501, 7977, - 5813, 119, 5933, 2761, 4193, 3673, 6607, 1351, 1266, - 39, 4127, 7683, 2542, 4186, 2086, 7641, 8100, 1030, - 2514, 6154, 5036, 1324, 9775, 6708, 4448, 8978, 5813, - 1882, 32, 1175, 4976, 4232, 8227, 9510, 4431, 6868, - 8351, 2776, 8157, 9763, 6702, 2421, 9255, 8081, 7794, - 757, 5181, 6522, 7877, 3272, 8027, 716, 9583, 3007, - 396, 408, 8201, 8375, 4467, 4773, 1821, 5170, 1211, - 8659, 6598, 3524, 6371, 5271, 1148, 6513, 1618, 6827, - 9455, 6818, 3164, 9036, 5577, 6505, 482, 4296, 4836, - 2376, 6108, 4494, 9105, 342, 7434, 3159, 4215, 4531, - 7014, 3683, 6074, 8015, 401, 9292, 3222, 9512, 4023, - 4725, 7891, 6679, 3292, 2745, 191, 5857, 4952, 1186, - 3966, 6732, 724, 618, 2792, 6515, 5491, 9163, 300, - 6250, 7737, 7499, 5334, 3314, 1388, 3396, 7337, 6, - 6429, 511, 9313, 1183, 2175, 3071, 9064, 806, 9562, - 337, 7757, 7024, 4213, 1512, 7950, 8780, 6140, 6901, - 7345, 8665, 819, 2713, 7555, 5878, 9818, 869, 994, - 244, 5929, 8963, 3453, 9248, 3572, 3695, 4889, 8448, - 4286, 7616, 9348, 2879, 9556, 4603, 2799, 2908, 7987, - 3156, 7576, 2871, 5080, 3448, 1995, 400, 1901, 8565, - 4026, 8400, 3172, 8763, 9390, 6814, 7013, 1960, 6278, - 3776, 3943, 1931, 2787, 8598, 9550, 3895, 9104, 6316, - 2726, 4221, 2283, 8559, 8538, 9013, 2861, 5738, 2203, - 896, 1531, 3787, 9695, 2879, 8200, 5209, 5225, 5695, - 3385, 9726, 438, 4482, 3973, 9512, 7524, 8274, 3149, - 8489, 3692, 3814, 1742, 3084, 1790, 805, 1418, 7591, - 7190, 9378, 4016, 7025, 327, 9599, 2103, 5693, 7503, - 765, 4010, 1883, 2421, 4359, 6473, 5417, 2157, 7803, - 3061, 6727, 2286, 4192, 9680, 61, 7700, 6351, 8756, - 6323, 1417, 103, 1941, 9405, 597, 5491, 9950, 8988, - 485, 9729, 785, 1806, 1010, 282, 5724, 3766, 6943, - 2865, 3977, 7429, 874, 2610, 4153, 2383, 7161, 1349, - 5024, 7824, 5211, 7907, 6453, 7807, 4335, 6, 1406, - 7435, 8004, 9289, 2452, 4179, 6604, 4042, 3462, 2305, - 9996, 160, 9895, 825, 8746, 879, 960, 8851, 3063, - 976, 2680, 7689, 3451, 4283, 6910, 5841, 8979, 4271, - 1036, 1144, 4111, 3728, 401, 7580, 3393, 1746, 6515, - 5427, 9913, 4210, 3080, 8036, 7480, 5568, 6528, 3282, - 650, 2634, 8480, 8700, 2040, 5189, 4471, 7607, 6382, - 5648, 7190, 1869, 7393, 6867, 9876, 1484, 6916, 147, - 2460, 6284, 2857, 6702, 590, 2456, 2828, 9712, 7319, - 9238, 1096, 7348, 4837, 9296, 7151, 5966, 5309, 9201, - 4771, 5047, 7850, 8195, 3966, 5970, 8449, 8234, 7147, - 4822, 4998, 7547, 3031, 9062, 7800, 7624, 8687, 9638, - 2149, 1028, 5882, 9814, 2252, 203, 7017, 8174, 3149, - 9344, 1731, 6434, 2761, 7146, 3305, 6398, 3428, 2709, - 6951, 4164, 5126, 1034, 3656, 776, 2739, 3417, 2271, - 8060, 65, 4689, 9818, 9603, 5814, 8225, 398, 9112, - 1964, 5063, 5747, 2, 6410, 804, 7356, 2045, 6804, - 9568, 2776, 189, 1437, 4238, 4032, 1410, 9877, 4304, - 3500, 2334, 5203, 5354, 4476, 1446, 3091, 672, 9063, - 5217, 8607, 283, 2878, 2562, 1147, 1027, 9549, 6237, - 5344, 3546, 9129, 97, 1582, 6240, 7948, 4781, 8763, - 9968, 6022, 1993, 4851, 339, 4432, 5144, 6521, 2496, - 4819, 8663, 7720, 8852, 844, 5175, 4585, 971, 2320, - 2981, 1269, 2795, 7050, 3810, 2303, 7950, 3298, 3319, - 4699, 9817, 8657, 7456, 65, 920, 9552, 2273, 5339, - 6137, 115, 8299, 8808, 5493, 9722, 6248, 3538, 6412, - 1389, 6, 8260, 1885, 842, 5573, 2698, 3090, 2800, - 2921, 3739, 2293, 2710, 1611, 3158, 2070, 8406, 3015, - 6726, 7435, 4345, 9136, 5984, 1060, 9009, 9251, 72, - 5505, 7237, 8787, 975, 9505, 908, 822, 4448, 7693, - 6356, 2653, 5621, 8148, 9198, 7465, 6973, 3587, 2130, - 897, 1607, 7456, 2192, 7464, 6519, 4634, 5937, 6298, - 1791, 8423, 3982, 4246, 9101, 2582, 2031, 2228, 6965, - 7107, 2366, 8535, 2639, 8466, 4098, 459, 150, 5856, - 1912, 964, 8433, 1624, 8808, 7023, 7864, 7521, 815, - 6922, 1220, 3036, 1328, 3530, 4761, 8404, 256, 9072, - 1115, 3943, 7302, 3124, 1654, 8578, 3009, 6915, 9182, - 4380, 9821, 5307, 2554, 5291, 144, 8487, 8360, 6441, - 9740, 1644, 3437, 6837, 2455, 2078, 1936, 8420, 1991, - 5409, 3771, 4869, 6289, 4915, 6886, 877, 3586, 9557, - 8263, 6130, 9991, 5328, 4036, 5503, 4642, 9267, 8162, - 7742, 7316, 8655, 6294, 8521, 7154, 7219, 563, 5545, - 5616, 9021, 3586, 5561, 3290, 1598, 2860, 2783, 3983, - 6641, 4924, 8108, 2753, 1497, 7966, 6426, 9602, 8617, - 1605, 3867, 2988, 4313, 173, 7776, 7611, 4603, 7159, - 9296, 9220, 1927, 6714, 3154, 7543, 6055, 7089, 9494, - 5918, 4799, 903, 494, 2395, 3449, 1711, 6546, 4011, - 2947, 1612, 3708, 358, 6190, 3954, 3577, 8911, 9868, - 6251, 8055, 6089, 7963, 8840, 4185, 6755, 355, 6352, - 5311, 1059, 610, 7958, 4850, 2499, 9711, 3806, 6250, - 8375, 8505, 8212, 4529, 5903, 779, 6298, 843, 2580, - 2440, 8346, 8684, 1928, 735, 9671, 9759, 3161, 3871, - 916, 5753, 9604, 8843, 1124, 4123, 7198, 2671, 3627, - 8088, 1464, 8008, 6214, 5921, 5326, 1986, 5777, 4405, - 2420, 5995, 1834, 7139, 3926, 7935, 8098, 3310, 4314, - 4272, 4364, 8323, 1501, 8907, 3647, 5194, 9153, 5620, - 3776, 3885, 320, 4474, 3998, 9326, 2496, 7575, 3635, - 9821, 6912, 1430, 9274, 8332, 8750, 3666, 4886, 3514, - 5684, 7022, 7225, 4670, 6145, 9400, 6955, 669, 9314, - 5042, 9957, 522, 2088, 3120, 6051, 814, 2768, 7866, - 1760, 8040, 94, 6091, 5096, 7195, 1370, 6937, 3280, - 5126, 9403, 8489, 4972, 7860, 4371, 7683, 9949, 2216, - 7380, 3573, 6442, 6367, 9355, 6244, 3541, 6659, 7982, - 3718, 7635, 5639, 6623, 9943, 1105, 2414, 8374, 645, - 2997, 695, 3363, 2434, 1834, 1918, 2808, 118, 5131, - 2293, 5340, 7580, 6903, 2854, 2831, 1839, 3671, 4266, - 870, 7134, 8524, 2828, 6020, 1177, 3862, 9275, 4728, - 3816, 1273, 8697, 4773, 2723, 595, 490, 5292, 2210, - 6928, 8693, 1355, 5024, 3482, 1518, 5400, 4099, 1127, - 1419, 8205, 5821, 9904, 9882, 273, 1299, 6646, 4904, - 4155, 5241, 7405, 1312, 4296, 9010, 6357, 6602, 6138, - 9034]), - values=tensor([ 1.3297e-01, 9.0849e-01, 1.8092e+00, 8.0206e-01, - -4.5551e-01, -7.6866e-01, 6.3766e-01, -3.0822e-01, - 1.0564e-02, 1.4327e+00, 3.2609e-01, 4.7462e-01, - 8.8278e-02, -1.9409e-01, -1.0782e+00, 6.6638e-01, - -1.0486e+00, -1.8225e-01, 6.1771e-01, -5.6818e-01, - -1.2272e+00, -5.7143e-02, 6.8652e-01, -1.1087e+00, - 1.2571e+00, 2.5477e-01, 5.4997e-03, -1.9981e+00, - 5.6593e-01, 4.1133e-01, 2.4267e+00, 4.1591e-01, - -3.7050e-01, 9.3574e-01, 3.3172e-01, 3.2410e-02, - 5.9685e-01, -1.1622e+00, 3.9498e-01, -1.0049e+00, - -9.0504e-01, -2.0467e-01, -8.5424e-01, 8.1236e-01, - 3.1720e-01, 9.8217e-01, 2.8819e-02, 1.6535e+00, - -1.0838e+00, 1.2877e-01, 4.1395e-01, -2.5200e+00, - -5.4347e-01, -2.5896e-01, 2.5096e-01, -5.3203e-01, - 1.9152e+00, -1.5833e-01, 1.4159e+00, -1.1105e+00, - 5.9711e-01, -5.6073e-02, 5.5415e-02, 7.8036e-02, - 6.2419e-01, 9.5551e-02, 8.7368e-01, 9.3345e-01, - 5.2727e-01, 9.5391e-01, 2.5492e-01, -9.6762e-02, - 1.0802e+00, 1.3122e+00, -1.6240e+00, 2.3697e-01, - -5.4113e-02, 7.6388e-01, -6.4595e-01, -5.2535e-02, - 7.3346e-01, 5.0813e-01, -1.5740e+00, 3.3960e-01, - -2.9510e-01, 1.7841e+00, 4.8756e-01, -4.5259e-01, - -1.8444e+00, -2.1357e-01, 1.1029e-01, -1.7170e-01, - -5.2037e-01, -1.1596e+00, 2.6354e-01, -1.6632e+00, - 4.0846e-01, -1.1808e+00, -1.0450e+00, -1.4962e+00, - -1.6788e+00, -1.1995e-01, 5.6411e-01, 1.1903e+00, - 5.0231e-01, 2.1936e-01, -5.6346e-01, 1.5764e+00, - -1.2601e+00, -9.2481e-01, -4.7580e-01, -8.5261e-01, - -5.7967e-01, 1.7259e+00, -8.9352e-01, 8.9318e-01, - -1.3694e+00, 2.3923e-02, -1.0432e-01, -2.0033e+00, - 9.1463e-01, -9.0454e-01, -3.1114e-01, -7.4375e-01, - -4.3003e-01, 1.5056e+00, 7.8687e-03, -6.1210e-01, - 2.1620e+00, 1.7638e+00, 5.2404e-01, -9.3079e-01, - -1.1023e+00, -8.3680e-01, 6.4900e-02, 1.1635e+00, - -6.0633e-01, -1.5557e-01, 3.6461e-01, 8.2103e-01, - 1.0447e-01, -7.3858e-01, 3.3201e-01, -2.4104e-01, - -6.9673e-01, -2.8446e+00, 6.1967e-01, 1.1656e+00, - -7.1321e-01, -2.0136e+00, -1.0190e+00, -1.0390e+00, - -2.8866e-01, 6.4077e-01, 2.6263e-01, 4.2757e-02, - 1.5566e+00, 1.2000e+00, 3.6485e-01, 1.1036e-01, - 8.1248e-01, -2.1197e-02, -5.7540e-01, -6.0713e-01, - 6.3466e-01, 1.0460e+00, -1.1099e+00, -7.2814e-01, - 1.1725e+00, -4.0501e-01, 7.8776e-01, 9.8517e-01, - -1.4285e+00, -5.8503e-01, -9.1038e-01, -7.9574e-01, - -1.1846e+00, -5.8986e-01, 1.2349e+00, 8.0958e-02, - -3.1895e-01, 1.4251e-02, -5.6799e-01, 6.6427e-02, - 2.2719e-01, -6.0950e-01, -4.1164e-01, -2.4372e+00, - -1.4070e+00, 1.5584e+00, 5.2180e-01, 2.7513e-01, - -2.9970e-01, -1.8697e+00, 1.6909e+00, 7.4602e-01, - -7.0784e-02, 3.7053e-02, -5.2767e-01, -5.9305e-01, - 5.2620e-01, 2.8019e-01, -2.9377e-01, -4.7461e-01, - -1.5665e+00, 1.0476e+00, -1.0441e+00, 7.3423e-01, - 2.1548e-01, 7.3230e-01, 1.1592e+00, -1.8343e+00, - -9.8880e-01, 1.1552e+00, -2.6919e+00, 1.1854e+00, - 4.7003e-01, 4.4456e-01, 7.8471e-01, 1.5144e-01, - -8.5405e-01, -7.0411e-02, 1.3788e+00, 1.1369e+00, - 8.9437e-01, -1.6914e+00, -9.5504e-01, 6.4915e-01, - 3.9823e-01, -1.1050e-01, -5.4955e-01, 8.2386e-01, - -1.2410e+00, -4.0700e-01, -2.7036e-01, -2.9186e-01, - -8.5945e-01, 5.6768e-01, -1.4587e+00, 7.8769e-01, - 3.2238e-01, -1.5819e-01, 3.2982e-01, 1.4037e+00, - 8.8634e-01, -1.0269e+00, 1.0176e+00, -1.3746e+00, - -5.1208e-01, 6.4701e-01, -5.2470e-01, -1.7717e+00, - 1.8699e-01, -5.4338e-01, -4.1919e-01, -1.5675e+00, - -7.1253e-01, -5.7763e-03, -2.7710e-01, -9.4082e-01, - 2.7434e-03, 2.0787e-01, -1.8595e-01, 7.3730e-01, - 5.7293e-01, 1.9967e+00, -1.5934e-01, 4.3268e-01, - -1.4715e+00, 1.0482e-01, 3.4588e-01, 1.6261e-01, - -1.1423e+00, 4.5188e-01, -4.9737e-01, -7.2952e-01, - -1.7259e+00, -1.5784e+00, 1.4417e+00, 1.2186e+00, - 2.5223e-01, 1.7406e+00, 1.4400e-01, 3.7990e-01, - 7.5243e-01, 1.3841e+00, 1.1362e+00, 1.5712e+00, - -8.7206e-01, 1.1318e+00, -1.9833e-01, 5.9034e-01, - -1.7236e-01, -1.5032e+00, -2.8552e-01, -1.1716e-01, - -1.6424e-01, -1.1008e+00, 1.5305e-01, 6.9615e-01, - -5.8633e-01, 1.4421e-01, 1.2450e+00, 4.3681e-01, - 1.5092e-01, 2.6360e-01, -1.0523e+00, -6.2697e-01, - -3.6902e-01, -3.0877e-01, 6.4047e-01, 6.4782e-02, - 1.1856e+00, 4.2417e-01, 1.7686e+00, -4.5680e-01, - 2.0995e-01, -2.1633e+00, 1.1540e+00, -1.7442e+00, - 9.6339e-01, -1.6156e+00, 2.3688e-01, 2.5577e-01, - -8.9686e-01, 3.4455e-01, 8.1875e-01, -1.5136e-01, - 3.9369e-02, 7.0183e-01, 3.3034e-02, 8.2130e-01, - 7.0307e-01, -9.2346e-01, -4.5889e-01, -2.6156e-01, - -4.3084e-01, 8.8781e-01, 9.9804e-01, 3.3388e-01, - 1.0757e-01, 5.9697e-01, 1.2427e+00, 3.2869e-01, - -1.5097e+00, -1.9083e-01, 2.8416e-01, -5.2638e-01, - 1.4738e+00, -1.5979e-01, 3.4783e-01, 3.6421e-01, - 4.4399e-01, 6.3267e-01, 1.3901e-01, -9.2403e-01, - 1.7771e+00, 6.5199e-02, 1.5269e+00, 1.6496e+00, - 7.2533e-02, -6.8676e-01, 1.3414e+00, 9.6359e-01, - 2.9568e+00, -8.5358e-01, -6.7427e-01, 1.3732e-01, - -1.0654e+00, 2.3110e-01, 7.2931e-01, 1.7194e+00, - -1.8936e+00, -1.5423e+00, -9.9066e-01, -2.6539e-01, - -7.8393e-01, -5.5044e-01, -3.5931e-01, 1.8318e-01, - 8.2474e-02, -1.9846e+00, 6.2423e-01, 7.7254e-01, - -9.8151e-01, 9.3887e-02, -5.3173e-01, -1.5621e+00, - -1.1312e+00, 1.3492e-01, 2.5026e+00, -7.9721e-01, - 3.1173e-03, -8.6647e-01, -7.1077e-01, 1.3134e+00, - -5.3493e-01, -1.1850e+00, -5.1230e-01, 1.9353e+00, - -4.5254e-01, -3.7063e-01, 1.6567e+00, 4.7380e-01, - -2.9907e-01, -2.4430e-01, 3.3897e-01, 1.0867e-01, - 2.3405e-01, 1.8532e+00, -2.5639e-01, 2.0512e+00, - -8.5014e-01, -8.0865e-01, 1.4545e+00, -2.5015e+00, - -9.3566e-01, -1.2448e-01, -2.2573e-01, -1.4660e+00, - 1.1722e+00, -1.1998e+00, 1.6788e+00, -1.0405e+00, - 1.2098e+00, -1.1788e+00, -4.9755e-01, 1.7964e+00, - -3.1077e-01, -1.4468e-01, 3.4519e-01, 1.6835e+00, - 3.4984e-01, -2.1993e-01, 1.3576e+00, -8.1823e-01, - 1.1165e+00, 9.5583e-01, -3.3059e-01, 1.0923e+00, - 8.5440e-01, -9.3519e-01, 4.0902e-01, 1.3268e+00, - -1.2174e+00, 1.4653e+00, 2.5019e-01, 4.5097e-03, - -7.5048e-01, 5.9750e-01, -1.1422e+00, -3.5960e-01, - 1.0927e+00, -3.1503e-01, -2.4308e+00, -2.2413e+00, - -1.4103e+00, -3.0158e-01, 1.6243e+00, 5.1640e-01, - -9.5662e-01, 1.0604e-01, -1.0351e+00, -1.1975e+00, - 6.2117e-01, 1.5310e+00, -4.3492e-01, -9.1871e-01, - 8.9716e-01, -9.1954e-01, -3.8931e-01, -7.7023e-01, - 3.6372e-01, 1.3470e+00, 1.4388e+00, 1.3091e+00, - 1.1106e+00, 1.5007e+00, 1.6660e+00, -4.8697e-01, - -9.4078e-01, 7.6510e-01, -6.8051e-01, -1.6474e+00, - 2.2544e-01, -1.8447e+00, -5.3103e-02, -1.2185e+00, - 6.9555e-01, 3.6630e+00, -6.4696e-01, -7.2782e-01, - -5.0910e-01, 4.9129e-01, 3.3216e-01, 1.0406e+00, - 5.5768e-01, 1.5851e+00, 2.1537e+00, -9.0319e-01, - -1.9001e+00, 6.7718e-01, -8.4279e-02, -1.4888e-01, - -4.7245e-01, -1.9197e+00, 7.3433e-01, -1.6878e+00, - 3.1570e-01, 2.1447e-01, 1.5316e-01, -1.3019e+00, - -6.3478e-01, -4.5551e-01, 8.4241e-01, -3.9209e-01, - 1.5644e+00, 4.9729e-01, 2.0649e-01, -2.5577e-01, - -1.4501e+00, 2.3429e+00, 8.7758e-01, 3.2614e-01, - -1.8522e+00, 1.8349e+00, 9.3899e-01, 1.1318e+00, - 5.3172e-01, 7.2352e-01, 1.4439e+00, 5.7875e-01, - -3.6690e-01, -1.1905e+00, -1.9647e+00, 7.6827e-01, - -2.3615e+00, 1.2002e+00, -3.5475e-01, -1.1161e-01, - -1.0536e+00, -3.8826e-01, -1.0184e-01, -5.5691e-01, - -7.4732e-01, -2.5919e+00, 9.1701e-01, -9.3255e-01, - -2.1214e-02, -3.6747e-01, 1.6556e+00, 2.6905e-01, - 5.1609e-01, 2.3354e+00, 5.9431e-02, -1.6283e+00, - 2.8759e-01, -1.3781e+00, 2.4710e+00, 6.1824e-01, - 2.2045e-01, 6.7652e-01, 1.3821e-02, 1.7212e-01, - 1.4158e+00, -8.1908e-01, 1.4151e+00, 1.4381e+00, - 4.5847e-01, -7.8703e-03, 7.8215e-01, 5.3806e-01, - -2.9901e-01, 3.9370e-01, 1.4362e+00, -2.2438e+00, - 3.0226e-01, -5.8754e-01, 1.5191e+00, -4.8316e-01, - -8.0640e-01, -1.6484e+00, -5.6154e-01, -1.5894e+00, - -1.3882e+00, -2.4549e-03, -1.5414e+00, -1.4992e+00, - -5.4732e-01, -1.0648e+00, 1.8636e-01, -1.1982e+00, - 5.1991e-03, -1.1696e+00, -6.8966e-01, -1.5766e-01, - 8.7436e-01, 3.2097e-01, -9.2122e-01, 1.1185e+00, - -8.4992e-01, -1.3039e+00, 1.9983e-01, -9.6774e-02, - 3.7448e-01, -7.9689e-01, 1.5311e+00, 1.0261e-01, - -1.0185e+00, 2.1525e+00, 7.8248e-01, -2.3518e-01, - 2.1233e-01, 1.4207e-02, 1.2566e-01, -1.9384e+00, - -1.2048e+00, 1.5981e+00, 6.9756e-01, -5.6596e-01, - 5.1634e-01, 9.3072e-01, 1.1388e+00, -9.9768e-01, - -9.4170e-01, 7.9742e-01, -2.9637e-01, 9.0352e-02, - 9.4303e-01, -7.1913e-03, 1.9184e-01, 9.1883e-01, - -9.7573e-01, 2.3804e-01, 7.4936e-02, -5.8641e-02, - -9.8420e-01, -5.0700e-02, 1.7817e-01, -1.5865e+00, - -2.4564e+00, -4.7479e-01, 1.2967e+00, 1.4371e-01, - 1.5005e-01, -7.8392e-01, -1.7234e-01, 8.9071e-01, - -2.4219e-01, -4.7119e-01, 1.4971e-01, -7.8751e-01, - 8.0110e-01, 6.2573e-01, -1.1733e+00, 1.6007e+00, - -3.8459e-01, -5.2098e-01, 2.1626e+00, 8.8267e-01, - 1.0954e+00, 5.7656e-01, -2.2267e+00, 1.4519e+00, - -1.5851e+00, -5.9257e-01, -4.5129e-01, 7.0686e-01, - 6.0109e-01, 1.6994e+00, 8.6925e-01, -1.5427e+00, - -7.9651e-01, 1.8621e-01, -3.1212e+00, 1.4901e-02, - 1.6922e-01, 1.2513e+00, -4.2448e-01, 1.0561e+00, - -8.6462e-01, 1.4342e+00, 4.1248e-01, 6.6247e-01, - -3.1983e-01, -1.8106e-02, -9.5803e-01, -1.4268e+00, - -1.8884e-01, 1.3878e-01, 1.6822e+00, -7.8309e-01, - -1.1472e+00, 1.8898e-01, -3.1205e-01, 2.8555e+00, - 2.6930e-03, 7.8696e-02, 1.1022e+00, -9.1744e-01, - -1.7009e+00, -1.5098e+00, 2.2910e-01, -8.8691e-01, - 7.5633e-02, -7.9249e-01, 1.5384e+00, -5.9327e-02, - 6.1279e-01, 4.3493e-01, -9.5282e-01, 6.0943e-01, - 6.5050e-02, -2.4542e-01, 1.2162e+00, -8.0156e-01, - -6.9050e-01, -1.6580e+00, 4.4322e-01, 8.2145e-01, - -1.0268e-01, 5.5264e-01, -1.9286e-01, -5.5476e-01, - -7.8615e-01, -6.5843e-01, 7.1259e-01, 5.6129e-01, - -8.3640e-01, 1.0283e+00, 4.8384e-01, 6.8375e-01, - 5.0181e-01, -3.5927e-01, -1.2558e+00, 5.2600e-02, - -4.8078e-01, -1.0970e+00, -5.5662e-01, -1.5449e-01, - -3.8639e-01, 5.6022e-01, -1.0058e-01, 6.0612e-01, - 2.7150e-01, 1.0561e+00, -2.6207e-01, -1.5548e+00, - 7.8230e-01, 1.1712e+00, -5.6296e-03, -2.0520e-01, - 5.9818e-01, -8.2466e-01, 1.9089e-01, 2.7547e-01, - 1.7971e+00, 1.2536e-01, 5.2932e-01, 4.9101e-01, - 6.0462e-01, -2.4329e-01, 7.5199e-01, -6.8705e-01, - 3.6761e-01, 3.6483e-02, 1.4891e-02, -1.9413e+00, - 2.9578e-01, 9.9359e-01, 1.4413e+00, -2.1064e+00, - 9.3117e-01, -1.0072e-01, 2.6773e+00, 2.2882e-01, - -4.1157e-01, 1.0738e+00, -6.8263e-01, 1.2002e+00, - 3.1006e+00, -8.7404e-01, 9.0200e-01, 2.0398e+00, - -5.7111e-01, 5.4812e-01, -7.6108e-01, -5.2308e-01, - 1.4593e+00, 2.6750e-02, 9.7504e-02, 7.8603e-01, - 3.9986e-01, 7.9704e-01, 3.3566e-01, -2.0831e+00, - -4.2611e-01, -6.2602e-01, -5.6343e-01, 5.3650e-01, - -2.8015e-01, -1.4510e+00, -1.2542e+00, 1.3278e+00, - -1.8985e+00, 1.6613e+00, 5.1065e-01, 1.4230e-01, - 1.6969e-01, -3.3262e-01, 1.2468e-01, -8.9060e-01, - -1.6869e+00, -9.5599e-01, 2.1929e-01, -1.9725e+00, - 7.8296e-01, -2.2028e+00, 4.3836e-01, 1.4662e+00, - 3.9112e-01, -9.9149e-01, -1.9531e-01, -7.1714e-01, - -7.8987e-01, 1.1565e+00, 9.0068e-01, -1.5761e+00, - -1.3562e+00, -1.3920e+00, 1.2390e+00, -1.7055e+00, - 1.4268e-01, 1.7303e+00, 7.5134e-01, -4.8038e-01, - 3.8487e-02, 2.1008e+00, 1.7315e+00, 1.0098e+00, - 1.4817e+00, -2.1972e-01, 9.3268e-01, 4.9045e-01, - -4.0701e-01, 1.1468e+00, 2.0466e+00, -3.1001e-01, - -1.8649e+00, 3.1769e-01, 4.9814e-01, -5.3245e-01, - 1.1688e+00, -1.5140e+00, -1.1835e+00, 4.0259e-01, - -1.1849e+00, -2.0930e-01, -4.2964e-01, -1.7172e-01, - -1.5387e+00, 8.8588e-01, -4.9526e-01, -1.5400e+00, - 6.3536e-01, 2.6739e-01, -1.0050e+00, 1.0043e+00, - -5.8422e-01, -8.4750e-01, -1.0945e+00, 2.8788e-01, - -1.1867e+00, 8.4570e-01, -1.2528e+00, 7.0623e-01, - 2.0538e+00, 3.6141e-01, -6.0493e-01, -9.8517e-01, - -2.3043e+00, 9.0285e-01, -3.4727e-01, 5.0794e-01, - 2.5091e-01, -3.6726e-01, 9.3557e-01, -5.2361e-01, - -1.8882e+00, 4.5424e-01, 6.7875e-01, 1.2379e+00, - -1.5805e+00, -7.8262e-01, -1.1040e+00, 1.0546e+00, - 2.7874e-01, -1.5022e-01, 1.9719e-01, -1.7536e+00, - -7.7400e-01, 1.5210e-02, 7.6396e-01, -7.5197e-01, - -3.8214e-01, -1.5777e-01, 1.3004e+00, -4.5149e-01, - -3.1946e-01, 5.1256e-01, -1.2595e+00, 1.2095e+00, - -9.0147e-02, 2.6269e+00, -1.1671e+00, 9.5037e-01, - -1.6088e-01, 1.1225e+00, 7.9420e-01, -8.1975e-01, - 5.6791e-01, -1.2193e+00, 1.8191e+00, -1.3930e+00, - -1.1907e+00, -2.6001e-01, -4.4940e-01, -1.6400e-01, - -4.0858e-01, 1.3220e+00, 1.4310e-01, -5.9821e-01, - -2.0409e+00, 5.4306e-01, 3.2210e-02, 7.7063e-01, - 4.1895e-02, -1.1647e+00, -9.4491e-01, -5.1099e-01, - -2.6572e+00, -2.3200e-01, 7.1144e-01, -8.8903e-01, - -6.9510e-01, 4.7389e-01, -5.7160e-01, 7.8597e-01, - 1.5643e+00, -6.0747e-01, -5.2934e-01, 1.8096e+00, - -3.8622e-01, 6.7489e-01, -1.2660e+00, -8.0505e-01, - 7.8886e-01, 8.5037e-01, 1.7826e+00, -1.7251e+00, - -3.3299e-02, -7.9316e-01, -1.0186e+00, -7.3455e-01, - -1.3291e+00, 2.3412e-02, -1.9621e-01, -1.5976e-01, - -1.0141e+00, 1.2968e+00, 3.2055e-02, -1.2980e+00, - -4.8181e-01, 3.2760e-01, -3.5702e-01, 3.2413e-01, - 5.3971e-01, 7.0470e-02, -2.6942e-01, 1.1506e+00, - 1.3213e+00, 4.8988e-01, 1.5174e+00, 8.0358e-01, - -1.1400e+00, 1.8778e+00, 4.5567e-01, -1.2671e+00, - 1.3682e+00, -1.1556e+00, -2.3813e-01, 5.9625e-01, - -3.9434e-01, 9.9466e-01, -1.1281e+00, -1.1223e+00, - -1.4276e-01, -2.6829e+00, 5.2359e-02, 4.0542e-01]), - size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.4959, 0.2009, 0.6905, ..., 0.0310, 0.9833, 0.5457]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 1000 -Density: 1e-05 -Time: 10.568303346633911 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 1000, 1000, 1000]), - col_indices=tensor([7502, 9380, 4152, 9206, 7602, 8392, 6141, 7366, 4275, - 9634, 1887, 4677, 6803, 5316, 6347, 4951, 8273, 7658, - 729, 1836, 5732, 5438, 1862, 1714, 3878, 4804, 5327, - 1978, 6926, 2420, 8871, 4501, 881, 3976, 1455, 7862, - 5912, 9592, 1859, 4138, 3057, 9408, 5497, 9859, 2947, - 9714, 4498, 2523, 8297, 7540, 8435, 1356, 3458, 1989, - 8226, 6550, 610, 2056, 3239, 4293, 5461, 1918, 8747, - 5638, 6141, 7643, 6245, 2220, 4169, 358, 8501, 7977, - 5813, 119, 5933, 2761, 4193, 3673, 6607, 1351, 1266, - 39, 4127, 7683, 2542, 4186, 2086, 7641, 8100, 1030, - 2514, 6154, 5036, 1324, 9775, 6708, 4448, 8978, 5813, - 1882, 32, 1175, 4976, 4232, 8227, 9510, 4431, 6868, - 8351, 2776, 8157, 9763, 6702, 2421, 9255, 8081, 7794, - 757, 5181, 6522, 7877, 3272, 8027, 716, 9583, 3007, - 396, 408, 8201, 8375, 4467, 4773, 1821, 5170, 1211, - 8659, 6598, 3524, 6371, 5271, 1148, 6513, 1618, 6827, - 9455, 6818, 3164, 9036, 5577, 6505, 482, 4296, 4836, - 2376, 6108, 4494, 9105, 342, 7434, 3159, 4215, 4531, - 7014, 3683, 6074, 8015, 401, 9292, 3222, 9512, 4023, - 4725, 7891, 6679, 3292, 2745, 191, 5857, 4952, 1186, - 3966, 6732, 724, 618, 2792, 6515, 5491, 9163, 300, - 6250, 7737, 7499, 5334, 3314, 1388, 3396, 7337, 6, - 6429, 511, 9313, 1183, 2175, 3071, 9064, 806, 9562, - 337, 7757, 7024, 4213, 1512, 7950, 8780, 6140, 6901, - 7345, 8665, 819, 2713, 7555, 5878, 9818, 869, 994, - 244, 5929, 8963, 3453, 9248, 3572, 3695, 4889, 8448, - 4286, 7616, 9348, 2879, 9556, 4603, 2799, 2908, 7987, - 3156, 7576, 2871, 5080, 3448, 1995, 400, 1901, 8565, - 4026, 8400, 3172, 8763, 9390, 6814, 7013, 1960, 6278, - 3776, 3943, 1931, 2787, 8598, 9550, 3895, 9104, 6316, - 2726, 4221, 2283, 8559, 8538, 9013, 2861, 5738, 2203, - 896, 1531, 3787, 9695, 2879, 8200, 5209, 5225, 5695, - 3385, 9726, 438, 4482, 3973, 9512, 7524, 8274, 3149, - 8489, 3692, 3814, 1742, 3084, 1790, 805, 1418, 7591, - 7190, 9378, 4016, 7025, 327, 9599, 2103, 5693, 7503, - 765, 4010, 1883, 2421, 4359, 6473, 5417, 2157, 7803, - 3061, 6727, 2286, 4192, 9680, 61, 7700, 6351, 8756, - 6323, 1417, 103, 1941, 9405, 597, 5491, 9950, 8988, - 485, 9729, 785, 1806, 1010, 282, 5724, 3766, 6943, - 2865, 3977, 7429, 874, 2610, 4153, 2383, 7161, 1349, - 5024, 7824, 5211, 7907, 6453, 7807, 4335, 6, 1406, - 7435, 8004, 9289, 2452, 4179, 6604, 4042, 3462, 2305, - 9996, 160, 9895, 825, 8746, 879, 960, 8851, 3063, - 976, 2680, 7689, 3451, 4283, 6910, 5841, 8979, 4271, - 1036, 1144, 4111, 3728, 401, 7580, 3393, 1746, 6515, - 5427, 9913, 4210, 3080, 8036, 7480, 5568, 6528, 3282, - 650, 2634, 8480, 8700, 2040, 5189, 4471, 7607, 6382, - 5648, 7190, 1869, 7393, 6867, 9876, 1484, 6916, 147, - 2460, 6284, 2857, 6702, 590, 2456, 2828, 9712, 7319, - 9238, 1096, 7348, 4837, 9296, 7151, 5966, 5309, 9201, - 4771, 5047, 7850, 8195, 3966, 5970, 8449, 8234, 7147, - 4822, 4998, 7547, 3031, 9062, 7800, 7624, 8687, 9638, - 2149, 1028, 5882, 9814, 2252, 203, 7017, 8174, 3149, - 9344, 1731, 6434, 2761, 7146, 3305, 6398, 3428, 2709, - 6951, 4164, 5126, 1034, 3656, 776, 2739, 3417, 2271, - 8060, 65, 4689, 9818, 9603, 5814, 8225, 398, 9112, - 1964, 5063, 5747, 2, 6410, 804, 7356, 2045, 6804, - 9568, 2776, 189, 1437, 4238, 4032, 1410, 9877, 4304, - 3500, 2334, 5203, 5354, 4476, 1446, 3091, 672, 9063, - 5217, 8607, 283, 2878, 2562, 1147, 1027, 9549, 6237, - 5344, 3546, 9129, 97, 1582, 6240, 7948, 4781, 8763, - 9968, 6022, 1993, 4851, 339, 4432, 5144, 6521, 2496, - 4819, 8663, 7720, 8852, 844, 5175, 4585, 971, 2320, - 2981, 1269, 2795, 7050, 3810, 2303, 7950, 3298, 3319, - 4699, 9817, 8657, 7456, 65, 920, 9552, 2273, 5339, - 6137, 115, 8299, 8808, 5493, 9722, 6248, 3538, 6412, - 1389, 6, 8260, 1885, 842, 5573, 2698, 3090, 2800, - 2921, 3739, 2293, 2710, 1611, 3158, 2070, 8406, 3015, - 6726, 7435, 4345, 9136, 5984, 1060, 9009, 9251, 72, - 5505, 7237, 8787, 975, 9505, 908, 822, 4448, 7693, - 6356, 2653, 5621, 8148, 9198, 7465, 6973, 3587, 2130, - 897, 1607, 7456, 2192, 7464, 6519, 4634, 5937, 6298, - 1791, 8423, 3982, 4246, 9101, 2582, 2031, 2228, 6965, - 7107, 2366, 8535, 2639, 8466, 4098, 459, 150, 5856, - 1912, 964, 8433, 1624, 8808, 7023, 7864, 7521, 815, - 6922, 1220, 3036, 1328, 3530, 4761, 8404, 256, 9072, - 1115, 3943, 7302, 3124, 1654, 8578, 3009, 6915, 9182, - 4380, 9821, 5307, 2554, 5291, 144, 8487, 8360, 6441, - 9740, 1644, 3437, 6837, 2455, 2078, 1936, 8420, 1991, - 5409, 3771, 4869, 6289, 4915, 6886, 877, 3586, 9557, - 8263, 6130, 9991, 5328, 4036, 5503, 4642, 9267, 8162, - 7742, 7316, 8655, 6294, 8521, 7154, 7219, 563, 5545, - 5616, 9021, 3586, 5561, 3290, 1598, 2860, 2783, 3983, - 6641, 4924, 8108, 2753, 1497, 7966, 6426, 9602, 8617, - 1605, 3867, 2988, 4313, 173, 7776, 7611, 4603, 7159, - 9296, 9220, 1927, 6714, 3154, 7543, 6055, 7089, 9494, - 5918, 4799, 903, 494, 2395, 3449, 1711, 6546, 4011, - 2947, 1612, 3708, 358, 6190, 3954, 3577, 8911, 9868, - 6251, 8055, 6089, 7963, 8840, 4185, 6755, 355, 6352, - 5311, 1059, 610, 7958, 4850, 2499, 9711, 3806, 6250, - 8375, 8505, 8212, 4529, 5903, 779, 6298, 843, 2580, - 2440, 8346, 8684, 1928, 735, 9671, 9759, 3161, 3871, - 916, 5753, 9604, 8843, 1124, 4123, 7198, 2671, 3627, - 8088, 1464, 8008, 6214, 5921, 5326, 1986, 5777, 4405, - 2420, 5995, 1834, 7139, 3926, 7935, 8098, 3310, 4314, - 4272, 4364, 8323, 1501, 8907, 3647, 5194, 9153, 5620, - 3776, 3885, 320, 4474, 3998, 9326, 2496, 7575, 3635, - 9821, 6912, 1430, 9274, 8332, 8750, 3666, 4886, 3514, - 5684, 7022, 7225, 4670, 6145, 9400, 6955, 669, 9314, - 5042, 9957, 522, 2088, 3120, 6051, 814, 2768, 7866, - 1760, 8040, 94, 6091, 5096, 7195, 1370, 6937, 3280, - 5126, 9403, 8489, 4972, 7860, 4371, 7683, 9949, 2216, - 7380, 3573, 6442, 6367, 9355, 6244, 3541, 6659, 7982, - 3718, 7635, 5639, 6623, 9943, 1105, 2414, 8374, 645, - 2997, 695, 3363, 2434, 1834, 1918, 2808, 118, 5131, - 2293, 5340, 7580, 6903, 2854, 2831, 1839, 3671, 4266, - 870, 7134, 8524, 2828, 6020, 1177, 3862, 9275, 4728, - 3816, 1273, 8697, 4773, 2723, 595, 490, 5292, 2210, - 6928, 8693, 1355, 5024, 3482, 1518, 5400, 4099, 1127, - 1419, 8205, 5821, 9904, 9882, 273, 1299, 6646, 4904, - 4155, 5241, 7405, 1312, 4296, 9010, 6357, 6602, 6138, - 9034]), - values=tensor([ 1.3297e-01, 9.0849e-01, 1.8092e+00, 8.0206e-01, - -4.5551e-01, -7.6866e-01, 6.3766e-01, -3.0822e-01, - 1.0564e-02, 1.4327e+00, 3.2609e-01, 4.7462e-01, - 8.8278e-02, -1.9409e-01, -1.0782e+00, 6.6638e-01, - -1.0486e+00, -1.8225e-01, 6.1771e-01, -5.6818e-01, - -1.2272e+00, -5.7143e-02, 6.8652e-01, -1.1087e+00, - 1.2571e+00, 2.5477e-01, 5.4997e-03, -1.9981e+00, - 5.6593e-01, 4.1133e-01, 2.4267e+00, 4.1591e-01, - -3.7050e-01, 9.3574e-01, 3.3172e-01, 3.2410e-02, - 5.9685e-01, -1.1622e+00, 3.9498e-01, -1.0049e+00, - -9.0504e-01, -2.0467e-01, -8.5424e-01, 8.1236e-01, - 3.1720e-01, 9.8217e-01, 2.8819e-02, 1.6535e+00, - -1.0838e+00, 1.2877e-01, 4.1395e-01, -2.5200e+00, - -5.4347e-01, -2.5896e-01, 2.5096e-01, -5.3203e-01, - 1.9152e+00, -1.5833e-01, 1.4159e+00, -1.1105e+00, - 5.9711e-01, -5.6073e-02, 5.5415e-02, 7.8036e-02, - 6.2419e-01, 9.5551e-02, 8.7368e-01, 9.3345e-01, - 5.2727e-01, 9.5391e-01, 2.5492e-01, -9.6762e-02, - 1.0802e+00, 1.3122e+00, -1.6240e+00, 2.3697e-01, - -5.4113e-02, 7.6388e-01, -6.4595e-01, -5.2535e-02, - 7.3346e-01, 5.0813e-01, -1.5740e+00, 3.3960e-01, - -2.9510e-01, 1.7841e+00, 4.8756e-01, -4.5259e-01, - -1.8444e+00, -2.1357e-01, 1.1029e-01, -1.7170e-01, - -5.2037e-01, -1.1596e+00, 2.6354e-01, -1.6632e+00, - 4.0846e-01, -1.1808e+00, -1.0450e+00, -1.4962e+00, - -1.6788e+00, -1.1995e-01, 5.6411e-01, 1.1903e+00, - 5.0231e-01, 2.1936e-01, -5.6346e-01, 1.5764e+00, - -1.2601e+00, -9.2481e-01, -4.7580e-01, -8.5261e-01, - -5.7967e-01, 1.7259e+00, -8.9352e-01, 8.9318e-01, - -1.3694e+00, 2.3923e-02, -1.0432e-01, -2.0033e+00, - 9.1463e-01, -9.0454e-01, -3.1114e-01, -7.4375e-01, - -4.3003e-01, 1.5056e+00, 7.8687e-03, -6.1210e-01, - 2.1620e+00, 1.7638e+00, 5.2404e-01, -9.3079e-01, - -1.1023e+00, -8.3680e-01, 6.4900e-02, 1.1635e+00, - -6.0633e-01, -1.5557e-01, 3.6461e-01, 8.2103e-01, - 1.0447e-01, -7.3858e-01, 3.3201e-01, -2.4104e-01, - -6.9673e-01, -2.8446e+00, 6.1967e-01, 1.1656e+00, - -7.1321e-01, -2.0136e+00, -1.0190e+00, -1.0390e+00, - -2.8866e-01, 6.4077e-01, 2.6263e-01, 4.2757e-02, - 1.5566e+00, 1.2000e+00, 3.6485e-01, 1.1036e-01, - 8.1248e-01, -2.1197e-02, -5.7540e-01, -6.0713e-01, - 6.3466e-01, 1.0460e+00, -1.1099e+00, -7.2814e-01, - 1.1725e+00, -4.0501e-01, 7.8776e-01, 9.8517e-01, - -1.4285e+00, -5.8503e-01, -9.1038e-01, -7.9574e-01, - -1.1846e+00, -5.8986e-01, 1.2349e+00, 8.0958e-02, - -3.1895e-01, 1.4251e-02, -5.6799e-01, 6.6427e-02, - 2.2719e-01, -6.0950e-01, -4.1164e-01, -2.4372e+00, - -1.4070e+00, 1.5584e+00, 5.2180e-01, 2.7513e-01, - -2.9970e-01, -1.8697e+00, 1.6909e+00, 7.4602e-01, - -7.0784e-02, 3.7053e-02, -5.2767e-01, -5.9305e-01, - 5.2620e-01, 2.8019e-01, -2.9377e-01, -4.7461e-01, - -1.5665e+00, 1.0476e+00, -1.0441e+00, 7.3423e-01, - 2.1548e-01, 7.3230e-01, 1.1592e+00, -1.8343e+00, - -9.8880e-01, 1.1552e+00, -2.6919e+00, 1.1854e+00, - 4.7003e-01, 4.4456e-01, 7.8471e-01, 1.5144e-01, - -8.5405e-01, -7.0411e-02, 1.3788e+00, 1.1369e+00, - 8.9437e-01, -1.6914e+00, -9.5504e-01, 6.4915e-01, - 3.9823e-01, -1.1050e-01, -5.4955e-01, 8.2386e-01, - -1.2410e+00, -4.0700e-01, -2.7036e-01, -2.9186e-01, - -8.5945e-01, 5.6768e-01, -1.4587e+00, 7.8769e-01, - 3.2238e-01, -1.5819e-01, 3.2982e-01, 1.4037e+00, - 8.8634e-01, -1.0269e+00, 1.0176e+00, -1.3746e+00, - -5.1208e-01, 6.4701e-01, -5.2470e-01, -1.7717e+00, - 1.8699e-01, -5.4338e-01, -4.1919e-01, -1.5675e+00, - -7.1253e-01, -5.7763e-03, -2.7710e-01, -9.4082e-01, - 2.7434e-03, 2.0787e-01, -1.8595e-01, 7.3730e-01, - 5.7293e-01, 1.9967e+00, -1.5934e-01, 4.3268e-01, - -1.4715e+00, 1.0482e-01, 3.4588e-01, 1.6261e-01, - -1.1423e+00, 4.5188e-01, -4.9737e-01, -7.2952e-01, - -1.7259e+00, -1.5784e+00, 1.4417e+00, 1.2186e+00, - 2.5223e-01, 1.7406e+00, 1.4400e-01, 3.7990e-01, - 7.5243e-01, 1.3841e+00, 1.1362e+00, 1.5712e+00, - -8.7206e-01, 1.1318e+00, -1.9833e-01, 5.9034e-01, - -1.7236e-01, -1.5032e+00, -2.8552e-01, -1.1716e-01, - -1.6424e-01, -1.1008e+00, 1.5305e-01, 6.9615e-01, - -5.8633e-01, 1.4421e-01, 1.2450e+00, 4.3681e-01, - 1.5092e-01, 2.6360e-01, -1.0523e+00, -6.2697e-01, - -3.6902e-01, -3.0877e-01, 6.4047e-01, 6.4782e-02, - 1.1856e+00, 4.2417e-01, 1.7686e+00, -4.5680e-01, - 2.0995e-01, -2.1633e+00, 1.1540e+00, -1.7442e+00, - 9.6339e-01, -1.6156e+00, 2.3688e-01, 2.5577e-01, - -8.9686e-01, 3.4455e-01, 8.1875e-01, -1.5136e-01, - 3.9369e-02, 7.0183e-01, 3.3034e-02, 8.2130e-01, - 7.0307e-01, -9.2346e-01, -4.5889e-01, -2.6156e-01, - -4.3084e-01, 8.8781e-01, 9.9804e-01, 3.3388e-01, - 1.0757e-01, 5.9697e-01, 1.2427e+00, 3.2869e-01, - -1.5097e+00, -1.9083e-01, 2.8416e-01, -5.2638e-01, - 1.4738e+00, -1.5979e-01, 3.4783e-01, 3.6421e-01, - 4.4399e-01, 6.3267e-01, 1.3901e-01, -9.2403e-01, - 1.7771e+00, 6.5199e-02, 1.5269e+00, 1.6496e+00, - 7.2533e-02, -6.8676e-01, 1.3414e+00, 9.6359e-01, - 2.9568e+00, -8.5358e-01, -6.7427e-01, 1.3732e-01, - -1.0654e+00, 2.3110e-01, 7.2931e-01, 1.7194e+00, - -1.8936e+00, -1.5423e+00, -9.9066e-01, -2.6539e-01, - -7.8393e-01, -5.5044e-01, -3.5931e-01, 1.8318e-01, - 8.2474e-02, -1.9846e+00, 6.2423e-01, 7.7254e-01, - -9.8151e-01, 9.3887e-02, -5.3173e-01, -1.5621e+00, - -1.1312e+00, 1.3492e-01, 2.5026e+00, -7.9721e-01, - 3.1173e-03, -8.6647e-01, -7.1077e-01, 1.3134e+00, - -5.3493e-01, -1.1850e+00, -5.1230e-01, 1.9353e+00, - -4.5254e-01, -3.7063e-01, 1.6567e+00, 4.7380e-01, - -2.9907e-01, -2.4430e-01, 3.3897e-01, 1.0867e-01, - 2.3405e-01, 1.8532e+00, -2.5639e-01, 2.0512e+00, - -8.5014e-01, -8.0865e-01, 1.4545e+00, -2.5015e+00, - -9.3566e-01, -1.2448e-01, -2.2573e-01, -1.4660e+00, - 1.1722e+00, -1.1998e+00, 1.6788e+00, -1.0405e+00, - 1.2098e+00, -1.1788e+00, -4.9755e-01, 1.7964e+00, - -3.1077e-01, -1.4468e-01, 3.4519e-01, 1.6835e+00, - 3.4984e-01, -2.1993e-01, 1.3576e+00, -8.1823e-01, - 1.1165e+00, 9.5583e-01, -3.3059e-01, 1.0923e+00, - 8.5440e-01, -9.3519e-01, 4.0902e-01, 1.3268e+00, - -1.2174e+00, 1.4653e+00, 2.5019e-01, 4.5097e-03, - -7.5048e-01, 5.9750e-01, -1.1422e+00, -3.5960e-01, - 1.0927e+00, -3.1503e-01, -2.4308e+00, -2.2413e+00, - -1.4103e+00, -3.0158e-01, 1.6243e+00, 5.1640e-01, - -9.5662e-01, 1.0604e-01, -1.0351e+00, -1.1975e+00, - 6.2117e-01, 1.5310e+00, -4.3492e-01, -9.1871e-01, - 8.9716e-01, -9.1954e-01, -3.8931e-01, -7.7023e-01, - 3.6372e-01, 1.3470e+00, 1.4388e+00, 1.3091e+00, - 1.1106e+00, 1.5007e+00, 1.6660e+00, -4.8697e-01, - -9.4078e-01, 7.6510e-01, -6.8051e-01, -1.6474e+00, - 2.2544e-01, -1.8447e+00, -5.3103e-02, -1.2185e+00, - 6.9555e-01, 3.6630e+00, -6.4696e-01, -7.2782e-01, - -5.0910e-01, 4.9129e-01, 3.3216e-01, 1.0406e+00, - 5.5768e-01, 1.5851e+00, 2.1537e+00, -9.0319e-01, - -1.9001e+00, 6.7718e-01, -8.4279e-02, -1.4888e-01, - -4.7245e-01, -1.9197e+00, 7.3433e-01, -1.6878e+00, - 3.1570e-01, 2.1447e-01, 1.5316e-01, -1.3019e+00, - -6.3478e-01, -4.5551e-01, 8.4241e-01, -3.9209e-01, - 1.5644e+00, 4.9729e-01, 2.0649e-01, -2.5577e-01, - -1.4501e+00, 2.3429e+00, 8.7758e-01, 3.2614e-01, - -1.8522e+00, 1.8349e+00, 9.3899e-01, 1.1318e+00, - 5.3172e-01, 7.2352e-01, 1.4439e+00, 5.7875e-01, - -3.6690e-01, -1.1905e+00, -1.9647e+00, 7.6827e-01, - -2.3615e+00, 1.2002e+00, -3.5475e-01, -1.1161e-01, - -1.0536e+00, -3.8826e-01, -1.0184e-01, -5.5691e-01, - -7.4732e-01, -2.5919e+00, 9.1701e-01, -9.3255e-01, - -2.1214e-02, -3.6747e-01, 1.6556e+00, 2.6905e-01, - 5.1609e-01, 2.3354e+00, 5.9431e-02, -1.6283e+00, - 2.8759e-01, -1.3781e+00, 2.4710e+00, 6.1824e-01, - 2.2045e-01, 6.7652e-01, 1.3821e-02, 1.7212e-01, - 1.4158e+00, -8.1908e-01, 1.4151e+00, 1.4381e+00, - 4.5847e-01, -7.8703e-03, 7.8215e-01, 5.3806e-01, - -2.9901e-01, 3.9370e-01, 1.4362e+00, -2.2438e+00, - 3.0226e-01, -5.8754e-01, 1.5191e+00, -4.8316e-01, - -8.0640e-01, -1.6484e+00, -5.6154e-01, -1.5894e+00, - -1.3882e+00, -2.4549e-03, -1.5414e+00, -1.4992e+00, - -5.4732e-01, -1.0648e+00, 1.8636e-01, -1.1982e+00, - 5.1991e-03, -1.1696e+00, -6.8966e-01, -1.5766e-01, - 8.7436e-01, 3.2097e-01, -9.2122e-01, 1.1185e+00, - -8.4992e-01, -1.3039e+00, 1.9983e-01, -9.6774e-02, - 3.7448e-01, -7.9689e-01, 1.5311e+00, 1.0261e-01, - -1.0185e+00, 2.1525e+00, 7.8248e-01, -2.3518e-01, - 2.1233e-01, 1.4207e-02, 1.2566e-01, -1.9384e+00, - -1.2048e+00, 1.5981e+00, 6.9756e-01, -5.6596e-01, - 5.1634e-01, 9.3072e-01, 1.1388e+00, -9.9768e-01, - -9.4170e-01, 7.9742e-01, -2.9637e-01, 9.0352e-02, - 9.4303e-01, -7.1913e-03, 1.9184e-01, 9.1883e-01, - -9.7573e-01, 2.3804e-01, 7.4936e-02, -5.8641e-02, - -9.8420e-01, -5.0700e-02, 1.7817e-01, -1.5865e+00, - -2.4564e+00, -4.7479e-01, 1.2967e+00, 1.4371e-01, - 1.5005e-01, -7.8392e-01, -1.7234e-01, 8.9071e-01, - -2.4219e-01, -4.7119e-01, 1.4971e-01, -7.8751e-01, - 8.0110e-01, 6.2573e-01, -1.1733e+00, 1.6007e+00, - -3.8459e-01, -5.2098e-01, 2.1626e+00, 8.8267e-01, - 1.0954e+00, 5.7656e-01, -2.2267e+00, 1.4519e+00, - -1.5851e+00, -5.9257e-01, -4.5129e-01, 7.0686e-01, - 6.0109e-01, 1.6994e+00, 8.6925e-01, -1.5427e+00, - -7.9651e-01, 1.8621e-01, -3.1212e+00, 1.4901e-02, - 1.6922e-01, 1.2513e+00, -4.2448e-01, 1.0561e+00, - -8.6462e-01, 1.4342e+00, 4.1248e-01, 6.6247e-01, - -3.1983e-01, -1.8106e-02, -9.5803e-01, -1.4268e+00, - -1.8884e-01, 1.3878e-01, 1.6822e+00, -7.8309e-01, - -1.1472e+00, 1.8898e-01, -3.1205e-01, 2.8555e+00, - 2.6930e-03, 7.8696e-02, 1.1022e+00, -9.1744e-01, - -1.7009e+00, -1.5098e+00, 2.2910e-01, -8.8691e-01, - 7.5633e-02, -7.9249e-01, 1.5384e+00, -5.9327e-02, - 6.1279e-01, 4.3493e-01, -9.5282e-01, 6.0943e-01, - 6.5050e-02, -2.4542e-01, 1.2162e+00, -8.0156e-01, - -6.9050e-01, -1.6580e+00, 4.4322e-01, 8.2145e-01, - -1.0268e-01, 5.5264e-01, -1.9286e-01, -5.5476e-01, - -7.8615e-01, -6.5843e-01, 7.1259e-01, 5.6129e-01, - -8.3640e-01, 1.0283e+00, 4.8384e-01, 6.8375e-01, - 5.0181e-01, -3.5927e-01, -1.2558e+00, 5.2600e-02, - -4.8078e-01, -1.0970e+00, -5.5662e-01, -1.5449e-01, - -3.8639e-01, 5.6022e-01, -1.0058e-01, 6.0612e-01, - 2.7150e-01, 1.0561e+00, -2.6207e-01, -1.5548e+00, - 7.8230e-01, 1.1712e+00, -5.6296e-03, -2.0520e-01, - 5.9818e-01, -8.2466e-01, 1.9089e-01, 2.7547e-01, - 1.7971e+00, 1.2536e-01, 5.2932e-01, 4.9101e-01, - 6.0462e-01, -2.4329e-01, 7.5199e-01, -6.8705e-01, - 3.6761e-01, 3.6483e-02, 1.4891e-02, -1.9413e+00, - 2.9578e-01, 9.9359e-01, 1.4413e+00, -2.1064e+00, - 9.3117e-01, -1.0072e-01, 2.6773e+00, 2.2882e-01, - -4.1157e-01, 1.0738e+00, -6.8263e-01, 1.2002e+00, - 3.1006e+00, -8.7404e-01, 9.0200e-01, 2.0398e+00, - -5.7111e-01, 5.4812e-01, -7.6108e-01, -5.2308e-01, - 1.4593e+00, 2.6750e-02, 9.7504e-02, 7.8603e-01, - 3.9986e-01, 7.9704e-01, 3.3566e-01, -2.0831e+00, - -4.2611e-01, -6.2602e-01, -5.6343e-01, 5.3650e-01, - -2.8015e-01, -1.4510e+00, -1.2542e+00, 1.3278e+00, - -1.8985e+00, 1.6613e+00, 5.1065e-01, 1.4230e-01, - 1.6969e-01, -3.3262e-01, 1.2468e-01, -8.9060e-01, - -1.6869e+00, -9.5599e-01, 2.1929e-01, -1.9725e+00, - 7.8296e-01, -2.2028e+00, 4.3836e-01, 1.4662e+00, - 3.9112e-01, -9.9149e-01, -1.9531e-01, -7.1714e-01, - -7.8987e-01, 1.1565e+00, 9.0068e-01, -1.5761e+00, - -1.3562e+00, -1.3920e+00, 1.2390e+00, -1.7055e+00, - 1.4268e-01, 1.7303e+00, 7.5134e-01, -4.8038e-01, - 3.8487e-02, 2.1008e+00, 1.7315e+00, 1.0098e+00, - 1.4817e+00, -2.1972e-01, 9.3268e-01, 4.9045e-01, - -4.0701e-01, 1.1468e+00, 2.0466e+00, -3.1001e-01, - -1.8649e+00, 3.1769e-01, 4.9814e-01, -5.3245e-01, - 1.1688e+00, -1.5140e+00, -1.1835e+00, 4.0259e-01, - -1.1849e+00, -2.0930e-01, -4.2964e-01, -1.7172e-01, - -1.5387e+00, 8.8588e-01, -4.9526e-01, -1.5400e+00, - 6.3536e-01, 2.6739e-01, -1.0050e+00, 1.0043e+00, - -5.8422e-01, -8.4750e-01, -1.0945e+00, 2.8788e-01, - -1.1867e+00, 8.4570e-01, -1.2528e+00, 7.0623e-01, - 2.0538e+00, 3.6141e-01, -6.0493e-01, -9.8517e-01, - -2.3043e+00, 9.0285e-01, -3.4727e-01, 5.0794e-01, - 2.5091e-01, -3.6726e-01, 9.3557e-01, -5.2361e-01, - -1.8882e+00, 4.5424e-01, 6.7875e-01, 1.2379e+00, - -1.5805e+00, -7.8262e-01, -1.1040e+00, 1.0546e+00, - 2.7874e-01, -1.5022e-01, 1.9719e-01, -1.7536e+00, - -7.7400e-01, 1.5210e-02, 7.6396e-01, -7.5197e-01, - -3.8214e-01, -1.5777e-01, 1.3004e+00, -4.5149e-01, - -3.1946e-01, 5.1256e-01, -1.2595e+00, 1.2095e+00, - -9.0147e-02, 2.6269e+00, -1.1671e+00, 9.5037e-01, - -1.6088e-01, 1.1225e+00, 7.9420e-01, -8.1975e-01, - 5.6791e-01, -1.2193e+00, 1.8191e+00, -1.3930e+00, - -1.1907e+00, -2.6001e-01, -4.4940e-01, -1.6400e-01, - -4.0858e-01, 1.3220e+00, 1.4310e-01, -5.9821e-01, - -2.0409e+00, 5.4306e-01, 3.2210e-02, 7.7063e-01, - 4.1895e-02, -1.1647e+00, -9.4491e-01, -5.1099e-01, - -2.6572e+00, -2.3200e-01, 7.1144e-01, -8.8903e-01, - -6.9510e-01, 4.7389e-01, -5.7160e-01, 7.8597e-01, - 1.5643e+00, -6.0747e-01, -5.2934e-01, 1.8096e+00, - -3.8622e-01, 6.7489e-01, -1.2660e+00, -8.0505e-01, - 7.8886e-01, 8.5037e-01, 1.7826e+00, -1.7251e+00, - -3.3299e-02, -7.9316e-01, -1.0186e+00, -7.3455e-01, - -1.3291e+00, 2.3412e-02, -1.9621e-01, -1.5976e-01, - -1.0141e+00, 1.2968e+00, 3.2055e-02, -1.2980e+00, - -4.8181e-01, 3.2760e-01, -3.5702e-01, 3.2413e-01, - 5.3971e-01, 7.0470e-02, -2.6942e-01, 1.1506e+00, - 1.3213e+00, 4.8988e-01, 1.5174e+00, 8.0358e-01, - -1.1400e+00, 1.8778e+00, 4.5567e-01, -1.2671e+00, - 1.3682e+00, -1.1556e+00, -2.3813e-01, 5.9625e-01, - -3.9434e-01, 9.9466e-01, -1.1281e+00, -1.1223e+00, - -1.4276e-01, -2.6829e+00, 5.2359e-02, 4.0542e-01]), - size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.4959, 0.2009, 0.6905, ..., 0.0310, 0.9833, 0.5457]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 1000 -Density: 1e-05 -Time: 10.568303346633911 seconds - -[19.38, 18.8, 18.75, 18.66, 19.13, 18.64, 20.06, 18.86, 19.25, 18.85] -[53.27] -10.638694286346436 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 351951, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 1e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.568303346633911, 'TIME_S_1KI': 0.030027769054879545, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 566.7232446336747, 'W': 53.27} -[19.38, 18.8, 18.75, 18.66, 19.13, 18.64, 20.06, 18.86, 19.25, 18.85, 44.2, 44.08, 45.11, 42.45, 43.72, 43.71, 43.65, 47.54, 51.43, 47.42] -578.7650000000001 -28.938250000000004 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 351951, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 1e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.568303346633911, 'TIME_S_1KI': 0.030027769054879545, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 566.7232446336747, 'W': 53.27, 'J_1KI': 1.6102333695135818, 'W_1KI': 0.15135629675721907, 'W_D': 24.33175, 'J_D': 258.8580497018099, 'W_D_1KI': 0.06913391352773539, 'J_D_1KI': 0.00019643050745056948} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_10000_2e-05.json b/pytorch/output_1core_after_test/xeon_4216_10_10_10_10000_2e-05.json deleted file mode 100644 index 6556931..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_10000_2e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 280255, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 2000, "MATRIX_DENSITY": 2e-05, "TIME_S": 10.594846248626709, "TIME_S_1KI": 0.03780430767917328, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 545.9777628755569, "W": 52.21999999999999, "J_1KI": 1.9481463769622556, "W_1KI": 0.1863303063281654, "W_D": 34.63874999999999, "J_D": 362.15984744936213, "W_D_1KI": 0.12359725963854341, "J_D_1KI": 0.00044101714381025644} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_10000_2e-05.output b/pytorch/output_1core_after_test/xeon_4216_10_10_10_10000_2e-05.output deleted file mode 100644 index 368e39a..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_10000_2e-05.output +++ /dev/null @@ -1,85 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '2e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 2000, "MATRIX_DENSITY": 2e-05, "TIME_S": 0.053742408752441406} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 2000, 2000, 2000]), - col_indices=tensor([9252, 7819, 5075, ..., 9209, 658, 875]), - values=tensor([ 0.9710, -0.5183, -1.5427, ..., -1.0993, -0.0500, - -0.6429]), size=(10000, 10000), nnz=2000, - layout=torch.sparse_csr) -tensor([0.1865, 0.1689, 0.2232, ..., 0.7118, 0.8977, 0.2307]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 2000 -Density: 2e-05 -Time: 0.053742408752441406 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '195376', '-ss', '10000', '-sd', '2e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 2000, "MATRIX_DENSITY": 2e-05, "TIME_S": 7.3199145793914795} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1999, 1999, 2000]), - col_indices=tensor([3460, 2789, 4161, ..., 2850, 3060, 3030]), - values=tensor([-0.4563, -0.3472, -0.6924, ..., -0.5655, 1.0332, - 0.6329]), size=(10000, 10000), nnz=2000, - layout=torch.sparse_csr) -tensor([0.8895, 0.8400, 0.6234, ..., 0.1689, 0.7333, 0.3658]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 2000 -Density: 2e-05 -Time: 7.3199145793914795 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '280255', '-ss', '10000', '-sd', '2e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 2000, "MATRIX_DENSITY": 2e-05, "TIME_S": 10.594846248626709} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 2000, 2000, 2000]), - col_indices=tensor([4690, 7516, 8650, ..., 5389, 9584, 7760]), - values=tensor([-0.2005, -1.0519, -1.2283, ..., -0.8584, -2.8088, - -1.7372]), size=(10000, 10000), nnz=2000, - layout=torch.sparse_csr) -tensor([0.9179, 0.8134, 0.2922, ..., 0.0892, 0.5448, 0.8268]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 2000 -Density: 2e-05 -Time: 10.594846248626709 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 2000, 2000, 2000]), - col_indices=tensor([4690, 7516, 8650, ..., 5389, 9584, 7760]), - values=tensor([-0.2005, -1.0519, -1.2283, ..., -0.8584, -2.8088, - -1.7372]), size=(10000, 10000), nnz=2000, - layout=torch.sparse_csr) -tensor([0.9179, 0.8134, 0.2922, ..., 0.0892, 0.5448, 0.8268]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 2000 -Density: 2e-05 -Time: 10.594846248626709 seconds - -[19.32, 18.7, 18.97, 18.65, 18.76, 22.2, 21.46, 19.2, 19.32, 18.6] -[52.22] -10.4553382396698 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 280255, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 2e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 2000, 'MATRIX_DENSITY': 2e-05, 'TIME_S': 10.594846248626709, 'TIME_S_1KI': 0.03780430767917328, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 545.9777628755569, 'W': 52.21999999999999} -[19.32, 18.7, 18.97, 18.65, 18.76, 22.2, 21.46, 19.2, 19.32, 18.6, 19.57, 22.53, 19.54, 19.13, 19.69, 18.86, 18.93, 18.68, 18.88, 18.76] -351.625 -17.58125 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 280255, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 2e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 2000, 'MATRIX_DENSITY': 2e-05, 'TIME_S': 10.594846248626709, 'TIME_S_1KI': 0.03780430767917328, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 545.9777628755569, 'W': 52.21999999999999, 'J_1KI': 1.9481463769622556, 'W_1KI': 0.1863303063281654, 'W_D': 34.63874999999999, 'J_D': 362.15984744936213, 'W_D_1KI': 0.12359725963854341, 'J_D_1KI': 0.00044101714381025644} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_10000_5e-05.json b/pytorch/output_1core_after_test/xeon_4216_10_10_10_10000_5e-05.json deleted file mode 100644 index a4356fe..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_10000_5e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 161829, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.560773134231567, "TIME_S_1KI": 0.06525884195188482, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 536.9792062926293, "W": 52.09000000000001, "J_1KI": 3.3181889914207545, "W_1KI": 0.3218829752392958, "W_D": 35.09000000000001, "J_D": 361.7316250491143, "W_D_1KI": 0.21683381841326343, "J_D_1KI": 0.0013398946938636674} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_10000_5e-05.output b/pytorch/output_1core_after_test/xeon_4216_10_10_10_10000_5e-05.output deleted file mode 100644 index f8d2458..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_10000_5e-05.output +++ /dev/null @@ -1,85 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.07913708686828613} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 5000, 5000, 5000]), - col_indices=tensor([8676, 2759, 3518, ..., 794, 1460, 8146]), - values=tensor([-1.3181, 0.5129, 1.2356, ..., 1.2374, -0.2237, - -1.3116]), size=(10000, 10000), nnz=5000, - layout=torch.sparse_csr) -tensor([0.0994, 0.0228, 0.8362, ..., 0.1979, 0.7363, 0.5142]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 5000 -Density: 5e-05 -Time: 0.07913708686828613 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '132681', '-ss', '10000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 8.608779668807983} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 4998, 5000, 5000]), - col_indices=tensor([5839, 704, 8053, ..., 7219, 4255, 4840]), - values=tensor([-1.0670, 2.1140, -0.3362, ..., 0.4929, -0.5106, - 0.1854]), size=(10000, 10000), nnz=5000, - layout=torch.sparse_csr) -tensor([0.8590, 0.9402, 0.5844, ..., 0.3396, 0.2772, 0.5330]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 5000 -Density: 5e-05 -Time: 8.608779668807983 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '161829', '-ss', '10000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.560773134231567} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 4999, 4999, 5000]), - col_indices=tensor([9575, 4353, 8375, ..., 9468, 9611, 5433]), - values=tensor([ 0.4049, 0.0939, -0.9771, ..., -1.7530, 0.0441, - -0.8668]), size=(10000, 10000), nnz=5000, - layout=torch.sparse_csr) -tensor([0.9157, 0.3468, 0.2259, ..., 0.5889, 0.7150, 0.1067]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 5000 -Density: 5e-05 -Time: 10.560773134231567 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 4999, 4999, 5000]), - col_indices=tensor([9575, 4353, 8375, ..., 9468, 9611, 5433]), - values=tensor([ 0.4049, 0.0939, -0.9771, ..., -1.7530, 0.0441, - -0.8668]), size=(10000, 10000), nnz=5000, - layout=torch.sparse_csr) -tensor([0.9157, 0.3468, 0.2259, ..., 0.5889, 0.7150, 0.1067]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 5000 -Density: 5e-05 -Time: 10.560773134231567 seconds - -[19.12, 19.43, 18.76, 18.62, 18.77, 18.87, 19.13, 18.74, 19.13, 18.93] -[52.09] -10.30868124961853 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 161829, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 5e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.560773134231567, 'TIME_S_1KI': 0.06525884195188482, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 536.9792062926293, 'W': 52.09000000000001} -[19.12, 19.43, 18.76, 18.62, 18.77, 18.87, 19.13, 18.74, 19.13, 18.93, 19.06, 18.76, 18.76, 18.73, 18.91, 18.76, 18.7, 19.31, 18.8, 18.53] -340.0 -17.0 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 161829, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 5e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.560773134231567, 'TIME_S_1KI': 0.06525884195188482, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 536.9792062926293, 'W': 52.09000000000001, 'J_1KI': 3.3181889914207545, 'W_1KI': 0.3218829752392958, 'W_D': 35.09000000000001, 'J_D': 361.7316250491143, 'W_D_1KI': 0.21683381841326343, 'J_D_1KI': 0.0013398946938636674} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_10000_8e-05.json b/pytorch/output_1core_after_test/xeon_4216_10_10_10_10000_8e-05.json deleted file mode 100644 index 6a32a6b..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_10000_8e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 132317, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 8000, "MATRIX_DENSITY": 8e-05, "TIME_S": 10.837324380874634, "TIME_S_1KI": 0.08190424798683943, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 564.7549686312675, "W": 52.23, "J_1KI": 4.268196593266682, "W_1KI": 0.39473385883900025, "W_D": 35.28375, "J_D": 381.51776994913814, "W_D_1KI": 0.266660746540505, "J_D_1KI": 0.002015317355596824} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_10000_8e-05.output b/pytorch/output_1core_after_test/xeon_4216_10_10_10_10000_8e-05.output deleted file mode 100644 index 74a98c4..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_10000_8e-05.output +++ /dev/null @@ -1,85 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '8e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 8000, "MATRIX_DENSITY": 8e-05, "TIME_S": 0.09673595428466797} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 3, ..., 7997, 7998, 8000]), - col_indices=tensor([3032, 3684, 2824, ..., 2897, 2141, 5706]), - values=tensor([ 0.0921, -0.5370, -1.8592, ..., -1.5071, -0.5268, - 0.8858]), size=(10000, 10000), nnz=8000, - layout=torch.sparse_csr) -tensor([0.9804, 0.0065, 0.0971, ..., 0.8398, 0.8724, 0.4418]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 8000 -Density: 8e-05 -Time: 0.09673595428466797 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '108542', '-ss', '10000', '-sd', '8e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 7999, "MATRIX_DENSITY": 7.999e-05, "TIME_S": 8.613329887390137} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 3, ..., 7997, 7997, 7999]), - col_indices=tensor([ 761, 4005, 1793, ..., 9189, 4269, 7090]), - values=tensor([-0.6525, 0.4976, 1.8459, ..., -1.0738, 1.0846, - -0.0873]), size=(10000, 10000), nnz=7999, - layout=torch.sparse_csr) -tensor([0.5623, 0.7145, 0.2637, ..., 0.7525, 0.4467, 0.1719]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 7999 -Density: 7.999e-05 -Time: 8.613329887390137 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '132317', '-ss', '10000', '-sd', '8e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 8000, "MATRIX_DENSITY": 8e-05, "TIME_S": 10.837324380874634} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 7999, 7999, 8000]), - col_indices=tensor([9710, 1094, 4050, ..., 8648, 5364, 1815]), - values=tensor([-1.2907, 0.8244, 0.9563, ..., -0.1319, -1.0579, - 1.0542]), size=(10000, 10000), nnz=8000, - layout=torch.sparse_csr) -tensor([0.9175, 0.2457, 0.9531, ..., 0.0123, 0.1169, 0.3308]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 8000 -Density: 8e-05 -Time: 10.837324380874634 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 7999, 7999, 8000]), - col_indices=tensor([9710, 1094, 4050, ..., 8648, 5364, 1815]), - values=tensor([-1.2907, 0.8244, 0.9563, ..., -0.1319, -1.0579, - 1.0542]), size=(10000, 10000), nnz=8000, - layout=torch.sparse_csr) -tensor([0.9175, 0.2457, 0.9531, ..., 0.0123, 0.1169, 0.3308]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 8000 -Density: 8e-05 -Time: 10.837324380874634 seconds - -[19.16, 18.94, 18.97, 18.74, 18.85, 18.92, 18.66, 18.57, 18.69, 18.72] -[52.23] -10.812846422195435 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 132317, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 8e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 8000, 'MATRIX_DENSITY': 8e-05, 'TIME_S': 10.837324380874634, 'TIME_S_1KI': 0.08190424798683943, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 564.7549686312675, 'W': 52.23} -[19.16, 18.94, 18.97, 18.74, 18.85, 18.92, 18.66, 18.57, 18.69, 18.72, 19.16, 18.54, 18.88, 18.99, 18.8, 18.71, 18.93, 18.82, 19.04, 18.71] -338.92500000000007 -16.946250000000003 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 132317, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 8e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 8000, 'MATRIX_DENSITY': 8e-05, 'TIME_S': 10.837324380874634, 'TIME_S_1KI': 0.08190424798683943, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 564.7549686312675, 'W': 52.23, 'J_1KI': 4.268196593266682, 'W_1KI': 0.39473385883900025, 'W_D': 35.28375, 'J_D': 381.51776994913814, 'W_D_1KI': 0.266660746540505, 'J_D_1KI': 0.002015317355596824} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_150000_0.0001.json b/pytorch/output_1core_after_test/xeon_4216_10_10_10_150000_0.0001.json deleted file mode 100644 index 1b50ccf..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_150000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 1927, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 2249879, "MATRIX_DENSITY": 9.999462222222222e-05, "TIME_S": 10.442464351654053, "TIME_S_1KI": 5.419026648497173, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 563.4650244927407, "W": 54.09, "J_1KI": 292.40530591216435, "W_1KI": 28.069538142189934, "W_D": 37.18625, "J_D": 387.37569360405206, "W_D_1KI": 19.297483134405812, "J_D_1KI": 10.014262135135347} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_150000_0.0001.output b/pytorch/output_1core_after_test/xeon_4216_10_10_10_150000_0.0001.output deleted file mode 100644 index bd889f0..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_150000_0.0001.output +++ /dev/null @@ -1,71 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '150000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 2249874, "MATRIX_DENSITY": 9.99944e-05, "TIME_S": 5.447489976882935} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 15, 34, ..., 2249840, - 2249861, 2249874]), - col_indices=tensor([ 2973, 4578, 13554, ..., 128086, 130816, - 139639]), - values=tensor([-1.4482, -0.8746, 0.6251, ..., 0.9397, -0.7475, - 1.1848]), size=(150000, 150000), nnz=2249874, - layout=torch.sparse_csr) -tensor([0.0430, 0.9101, 0.4805, ..., 0.2541, 0.2396, 0.1770]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 2249874 -Density: 9.99944e-05 -Time: 5.447489976882935 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1927', '-ss', '150000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 2249879, "MATRIX_DENSITY": 9.999462222222222e-05, "TIME_S": 10.442464351654053} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 16, 28, ..., 2249848, - 2249867, 2249879]), - col_indices=tensor([ 2827, 9854, 18696, ..., 124111, 136418, - 145737]), - values=tensor([-0.8019, -0.4460, -0.3896, ..., 0.2285, 0.8154, - -0.8082]), size=(150000, 150000), nnz=2249879, - layout=torch.sparse_csr) -tensor([0.8710, 0.8555, 0.2006, ..., 0.2667, 0.3349, 0.0757]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 2249879 -Density: 9.999462222222222e-05 -Time: 10.442464351654053 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 16, 28, ..., 2249848, - 2249867, 2249879]), - col_indices=tensor([ 2827, 9854, 18696, ..., 124111, 136418, - 145737]), - values=tensor([-0.8019, -0.4460, -0.3896, ..., 0.2285, 0.8154, - -0.8082]), size=(150000, 150000), nnz=2249879, - layout=torch.sparse_csr) -tensor([0.8710, 0.8555, 0.2006, ..., 0.2667, 0.3349, 0.0757]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 2249879 -Density: 9.999462222222222e-05 -Time: 10.442464351654053 seconds - -[19.03, 18.56, 18.62, 18.95, 18.66, 18.9, 18.85, 18.96, 18.79, 19.09] -[54.09] -10.417175531387329 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1927, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 0.0001, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [150000, 150000], 'MATRIX_ROWS': 150000, 'MATRIX_SIZE': 22500000000, 'MATRIX_NNZ': 2249879, 'MATRIX_DENSITY': 9.999462222222222e-05, 'TIME_S': 10.442464351654053, 'TIME_S_1KI': 5.419026648497173, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 563.4650244927407, 'W': 54.09} -[19.03, 18.56, 18.62, 18.95, 18.66, 18.9, 18.85, 18.96, 18.79, 19.09, 19.21, 18.62, 18.75, 18.55, 18.93, 18.63, 18.66, 18.85, 18.82, 18.62] -338.07500000000005 -16.903750000000002 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1927, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 0.0001, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [150000, 150000], 'MATRIX_ROWS': 150000, 'MATRIX_SIZE': 22500000000, 'MATRIX_NNZ': 2249879, 'MATRIX_DENSITY': 9.999462222222222e-05, 'TIME_S': 10.442464351654053, 'TIME_S_1KI': 5.419026648497173, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 563.4650244927407, 'W': 54.09, 'J_1KI': 292.40530591216435, 'W_1KI': 28.069538142189934, 'W_D': 37.18625, 'J_D': 387.37569360405206, 'W_D_1KI': 19.297483134405812, 'J_D_1KI': 10.014262135135347} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_150000_1e-05.json b/pytorch/output_1core_after_test/xeon_4216_10_10_10_150000_1e-05.json deleted file mode 100644 index 72d4bad..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_150000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 6415, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 224999, "MATRIX_DENSITY": 9.999955555555555e-06, "TIME_S": 10.435844421386719, "TIME_S_1KI": 1.6267879066853808, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 555.0937748908996, "W": 53.15999999999999, "J_1KI": 86.53059624176143, "W_1KI": 8.286827747466873, "W_D": 36.19574999999999, "J_D": 377.95401622474185, "W_D_1KI": 5.6423616523772395, "J_D_1KI": 0.8795575451874107} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_150000_1e-05.output b/pytorch/output_1core_after_test/xeon_4216_10_10_10_150000_1e-05.output deleted file mode 100644 index aae7c21..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_150000_1e-05.output +++ /dev/null @@ -1,69 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '150000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 225000, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.6365699768066406} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 4, ..., 224998, 224999, - 225000]), - col_indices=tensor([ 7043, 104026, 137793, ..., 126195, 110874, - 142470]), - values=tensor([-0.2857, -0.3146, -0.0976, ..., -0.2572, -0.3788, - 0.4438]), size=(150000, 150000), nnz=225000, - layout=torch.sparse_csr) -tensor([0.2985, 0.8583, 0.6653, ..., 0.0402, 0.7336, 0.1879]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 225000 -Density: 1e-05 -Time: 1.6365699768066406 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '6415', '-ss', '150000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 224999, "MATRIX_DENSITY": 9.999955555555555e-06, "TIME_S": 10.435844421386719} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 5, ..., 224997, 224998, - 224999]), - col_indices=tensor([ 5129, 83229, 97861, ..., 13919, 61541, 80521]), - values=tensor([ 0.0876, 0.8725, 1.4750, ..., -0.4659, -0.7167, - -0.7896]), size=(150000, 150000), nnz=224999, - layout=torch.sparse_csr) -tensor([0.7642, 0.9728, 0.3141, ..., 0.5132, 0.3950, 0.9962]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 224999 -Density: 9.999955555555555e-06 -Time: 10.435844421386719 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 5, ..., 224997, 224998, - 224999]), - col_indices=tensor([ 5129, 83229, 97861, ..., 13919, 61541, 80521]), - values=tensor([ 0.0876, 0.8725, 1.4750, ..., -0.4659, -0.7167, - -0.7896]), size=(150000, 150000), nnz=224999, - layout=torch.sparse_csr) -tensor([0.7642, 0.9728, 0.3141, ..., 0.5132, 0.3950, 0.9962]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 224999 -Density: 9.999955555555555e-06 -Time: 10.435844421386719 seconds - -[18.96, 18.62, 18.93, 18.74, 18.97, 18.61, 19.25, 18.64, 18.77, 18.57] -[53.16] -10.441944599151611 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 6415, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 1e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [150000, 150000], 'MATRIX_ROWS': 150000, 'MATRIX_SIZE': 22500000000, 'MATRIX_NNZ': 224999, 'MATRIX_DENSITY': 9.999955555555555e-06, 'TIME_S': 10.435844421386719, 'TIME_S_1KI': 1.6267879066853808, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 555.0937748908996, 'W': 53.15999999999999} -[18.96, 18.62, 18.93, 18.74, 18.97, 18.61, 19.25, 18.64, 18.77, 18.57, 19.44, 18.74, 18.98, 18.57, 19.2, 18.74, 18.84, 18.58, 19.26, 18.72] -339.285 -16.96425 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 6415, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 1e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [150000, 150000], 'MATRIX_ROWS': 150000, 'MATRIX_SIZE': 22500000000, 'MATRIX_NNZ': 224999, 'MATRIX_DENSITY': 9.999955555555555e-06, 'TIME_S': 10.435844421386719, 'TIME_S_1KI': 1.6267879066853808, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 555.0937748908996, 'W': 53.15999999999999, 'J_1KI': 86.53059624176143, 'W_1KI': 8.286827747466873, 'W_D': 36.19574999999999, 'J_D': 377.95401622474185, 'W_D_1KI': 5.6423616523772395, 'J_D_1KI': 0.8795575451874107} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_150000_2e-05.json b/pytorch/output_1core_after_test/xeon_4216_10_10_10_150000_2e-05.json deleted file mode 100644 index b7c8705..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_150000_2e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 4590, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 449998, "MATRIX_DENSITY": 1.999991111111111e-05, "TIME_S": 10.42506718635559, "TIME_S_1KI": 2.271256467615597, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 555.3637769150735, "W": 53.27, "J_1KI": 120.99428690960207, "W_1KI": 11.60566448801743, "W_D": 35.84, "J_D": 373.6481652832032, "W_D_1KI": 7.808278867102397, "J_D_1KI": 1.7011500799787356} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_150000_2e-05.output b/pytorch/output_1core_after_test/xeon_4216_10_10_10_150000_2e-05.output deleted file mode 100644 index 3e9ad81..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_150000_2e-05.output +++ /dev/null @@ -1,71 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '150000', '-sd', '2e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 449997, "MATRIX_DENSITY": 1.9999866666666668e-05, "TIME_S": 2.2872610092163086} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 4, ..., 449989, 449991, - 449997]), - col_indices=tensor([ 68012, 99634, 100782, ..., 36458, 75446, - 131988]), - values=tensor([ 1.6977, -0.4721, 0.3612, ..., 0.8187, 1.9383, - 1.0229]), size=(150000, 150000), nnz=449997, - layout=torch.sparse_csr) -tensor([0.3774, 0.5088, 0.0022, ..., 0.9247, 0.4282, 0.3501]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 449997 -Density: 1.9999866666666668e-05 -Time: 2.2872610092163086 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '4590', '-ss', '150000', '-sd', '2e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 449998, "MATRIX_DENSITY": 1.999991111111111e-05, "TIME_S": 10.42506718635559} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 5, ..., 449995, 449996, - 449998]), - col_indices=tensor([ 61894, 5419, 13967, ..., 148377, 77076, - 80555]), - values=tensor([-0.3818, -1.1777, 1.1611, ..., 0.2191, -0.4555, - -1.4335]), size=(150000, 150000), nnz=449998, - layout=torch.sparse_csr) -tensor([0.5703, 0.0992, 0.3579, ..., 0.5465, 0.1996, 0.4194]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 449998 -Density: 1.999991111111111e-05 -Time: 10.42506718635559 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 5, ..., 449995, 449996, - 449998]), - col_indices=tensor([ 61894, 5419, 13967, ..., 148377, 77076, - 80555]), - values=tensor([-0.3818, -1.1777, 1.1611, ..., 0.2191, -0.4555, - -1.4335]), size=(150000, 150000), nnz=449998, - layout=torch.sparse_csr) -tensor([0.5703, 0.0992, 0.3579, ..., 0.5465, 0.1996, 0.4194]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 449998 -Density: 1.999991111111111e-05 -Time: 10.42506718635559 seconds - -[19.02, 18.62, 18.76, 18.6, 18.88, 18.57, 23.82, 19.2, 19.3, 19.37] -[53.27] -10.425451040267944 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 4590, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 2e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [150000, 150000], 'MATRIX_ROWS': 150000, 'MATRIX_SIZE': 22500000000, 'MATRIX_NNZ': 449998, 'MATRIX_DENSITY': 1.999991111111111e-05, 'TIME_S': 10.42506718635559, 'TIME_S_1KI': 2.271256467615597, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 555.3637769150735, 'W': 53.27} -[19.02, 18.62, 18.76, 18.6, 18.88, 18.57, 23.82, 19.2, 19.3, 19.37, 19.06, 18.77, 22.83, 18.95, 18.86, 19.42, 18.64, 18.5, 18.78, 18.75] -348.59999999999997 -17.43 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 4590, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 2e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [150000, 150000], 'MATRIX_ROWS': 150000, 'MATRIX_SIZE': 22500000000, 'MATRIX_NNZ': 449998, 'MATRIX_DENSITY': 1.999991111111111e-05, 'TIME_S': 10.42506718635559, 'TIME_S_1KI': 2.271256467615597, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 555.3637769150735, 'W': 53.27, 'J_1KI': 120.99428690960207, 'W_1KI': 11.60566448801743, 'W_D': 35.84, 'J_D': 373.6481652832032, 'W_D_1KI': 7.808278867102397, 'J_D_1KI': 1.7011500799787356} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_150000_5e-05.json b/pytorch/output_1core_after_test/xeon_4216_10_10_10_150000_5e-05.json deleted file mode 100644 index 3dc341a..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_150000_5e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 3324, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 1124970, "MATRIX_DENSITY": 4.9998666666666666e-05, "TIME_S": 10.53529667854309, "TIME_S_1KI": 3.1694635013667543, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 565.4297393774987, "W": 54.09, "J_1KI": 170.10521641922344, "W_1KI": 16.272563176895307, "W_D": 37.15325, "J_D": 388.38144693154095, "W_D_1KI": 11.177271359807461, "J_D_1KI": 3.3625966786424373} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_150000_5e-05.output b/pytorch/output_1core_after_test/xeon_4216_10_10_10_150000_5e-05.output deleted file mode 100644 index 932c58d..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_150000_5e-05.output +++ /dev/null @@ -1,71 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '150000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 1124973, "MATRIX_DENSITY": 4.99988e-05, "TIME_S": 3.1586453914642334} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 8, 16, ..., 1124959, - 1124966, 1124973]), - col_indices=tensor([ 38899, 39848, 52046, ..., 89347, 109328, - 119838]), - values=tensor([-1.3534, -0.1003, 1.4354, ..., 0.0554, -0.6004, - -0.4270]), size=(150000, 150000), nnz=1124973, - layout=torch.sparse_csr) -tensor([0.3055, 0.9803, 0.7057, ..., 0.9429, 0.4751, 0.6952]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 1124973 -Density: 4.99988e-05 -Time: 3.1586453914642334 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '3324', '-ss', '150000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 1124970, "MATRIX_DENSITY": 4.9998666666666666e-05, "TIME_S": 10.53529667854309} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 11, ..., 1124949, - 1124958, 1124970]), - col_indices=tensor([ 1720, 28668, 35808, ..., 118486, 131090, - 142254]), - values=tensor([ 1.2424, -0.4036, 0.1101, ..., -1.2125, 0.5296, - 0.2263]), size=(150000, 150000), nnz=1124970, - layout=torch.sparse_csr) -tensor([0.4456, 0.1409, 0.7627, ..., 0.2424, 0.9406, 0.1856]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 1124970 -Density: 4.9998666666666666e-05 -Time: 10.53529667854309 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 11, ..., 1124949, - 1124958, 1124970]), - col_indices=tensor([ 1720, 28668, 35808, ..., 118486, 131090, - 142254]), - values=tensor([ 1.2424, -0.4036, 0.1101, ..., -1.2125, 0.5296, - 0.2263]), size=(150000, 150000), nnz=1124970, - layout=torch.sparse_csr) -tensor([0.4456, 0.1409, 0.7627, ..., 0.2424, 0.9406, 0.1856]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 1124970 -Density: 4.9998666666666666e-05 -Time: 10.53529667854309 seconds - -[18.92, 18.71, 18.99, 18.89, 18.76, 18.71, 19.05, 18.7, 18.62, 18.6] -[54.09] -10.453498601913452 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 3324, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 5e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [150000, 150000], 'MATRIX_ROWS': 150000, 'MATRIX_SIZE': 22500000000, 'MATRIX_NNZ': 1124970, 'MATRIX_DENSITY': 4.9998666666666666e-05, 'TIME_S': 10.53529667854309, 'TIME_S_1KI': 3.1694635013667543, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 565.4297393774987, 'W': 54.09} -[18.92, 18.71, 18.99, 18.89, 18.76, 18.71, 19.05, 18.7, 18.62, 18.6, 19.06, 18.47, 18.79, 18.94, 18.78, 18.71, 18.66, 19.02, 18.85, 19.59] -338.735 -16.93675 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 3324, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 5e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [150000, 150000], 'MATRIX_ROWS': 150000, 'MATRIX_SIZE': 22500000000, 'MATRIX_NNZ': 1124970, 'MATRIX_DENSITY': 4.9998666666666666e-05, 'TIME_S': 10.53529667854309, 'TIME_S_1KI': 3.1694635013667543, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 565.4297393774987, 'W': 54.09, 'J_1KI': 170.10521641922344, 'W_1KI': 16.272563176895307, 'W_D': 37.15325, 'J_D': 388.38144693154095, 'W_D_1KI': 11.177271359807461, 'J_D_1KI': 3.3625966786424373} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_150000_8e-05.json b/pytorch/output_1core_after_test/xeon_4216_10_10_10_150000_8e-05.json deleted file mode 100644 index 8ba1bd4..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_150000_8e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 2276, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 1799918, "MATRIX_DENSITY": 7.999635555555555e-05, "TIME_S": 10.411118507385254, "TIME_S_1KI": 4.574305143842379, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 558.852141866684, "W": 53.49, "J_1KI": 245.54136285882421, "W_1KI": 23.501757469244286, "W_D": 36.0805, "J_D": 376.9613891310692, "W_D_1KI": 15.852592267135327, "J_D_1KI": 6.9651108379329205} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_150000_8e-05.output b/pytorch/output_1core_after_test/xeon_4216_10_10_10_150000_8e-05.output deleted file mode 100644 index e5b521d..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_150000_8e-05.output +++ /dev/null @@ -1,71 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '150000', '-sd', '8e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 1799928, "MATRIX_DENSITY": 7.99968e-05, "TIME_S": 4.612187147140503} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 10, 23, ..., 1799912, - 1799921, 1799928]), - col_indices=tensor([ 11532, 21015, 31782, ..., 71255, 84604, - 133290]), - values=tensor([-1.7079, -1.0209, -0.3816, ..., 0.7888, 0.8995, - 0.6754]), size=(150000, 150000), nnz=1799928, - layout=torch.sparse_csr) -tensor([0.0240, 0.0647, 0.1004, ..., 0.4777, 0.4601, 0.4491]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 1799928 -Density: 7.99968e-05 -Time: 4.612187147140503 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2276', '-ss', '150000', '-sd', '8e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 1799918, "MATRIX_DENSITY": 7.999635555555555e-05, "TIME_S": 10.411118507385254} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 17, 31, ..., 1799893, - 1799909, 1799918]), - col_indices=tensor([ 4391, 23820, 27554, ..., 126002, 132896, - 137531]), - values=tensor([ 1.5239, 0.8745, -0.1510, ..., -0.5622, 2.0478, - 0.6295]), size=(150000, 150000), nnz=1799918, - layout=torch.sparse_csr) -tensor([0.1338, 0.2741, 0.5318, ..., 0.4585, 0.1492, 0.9447]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 1799918 -Density: 7.999635555555555e-05 -Time: 10.411118507385254 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 17, 31, ..., 1799893, - 1799909, 1799918]), - col_indices=tensor([ 4391, 23820, 27554, ..., 126002, 132896, - 137531]), - values=tensor([ 1.5239, 0.8745, -0.1510, ..., -0.5622, 2.0478, - 0.6295]), size=(150000, 150000), nnz=1799918, - layout=torch.sparse_csr) -tensor([0.1338, 0.2741, 0.5318, ..., 0.4585, 0.1492, 0.9447]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 1799918 -Density: 7.999635555555555e-05 -Time: 10.411118507385254 seconds - -[19.54, 18.56, 18.75, 18.64, 18.6, 18.88, 22.46, 19.31, 19.16, 19.27] -[53.49] -10.447787284851074 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 2276, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 8e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [150000, 150000], 'MATRIX_ROWS': 150000, 'MATRIX_SIZE': 22500000000, 'MATRIX_NNZ': 1799918, 'MATRIX_DENSITY': 7.999635555555555e-05, 'TIME_S': 10.411118507385254, 'TIME_S_1KI': 4.574305143842379, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 558.852141866684, 'W': 53.49} -[19.54, 18.56, 18.75, 18.64, 18.6, 18.88, 22.46, 19.31, 19.16, 19.27, 18.96, 18.48, 23.29, 19.47, 18.87, 19.11, 18.98, 18.79, 18.75, 18.41] -348.19000000000005 -17.4095 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 2276, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 8e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [150000, 150000], 'MATRIX_ROWS': 150000, 'MATRIX_SIZE': 22500000000, 'MATRIX_NNZ': 1799918, 'MATRIX_DENSITY': 7.999635555555555e-05, 'TIME_S': 10.411118507385254, 'TIME_S_1KI': 4.574305143842379, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 558.852141866684, 'W': 53.49, 'J_1KI': 245.54136285882421, 'W_1KI': 23.501757469244286, 'W_D': 36.0805, 'J_D': 376.9613891310692, 'W_D_1KI': 15.852592267135327, 'J_D_1KI': 6.9651108379329205} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_200000_0.0001.json b/pytorch/output_1core_after_test/xeon_4216_10_10_10_200000_0.0001.json deleted file mode 100644 index 7b77d24..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_200000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 3999797, "MATRIX_DENSITY": 9.9994925e-05, "TIME_S": 11.147696495056152, "TIME_S_1KI": 11.147696495056152, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 626.7295667529106, "W": 53.75, "J_1KI": 626.7295667529106, "W_1KI": 53.75, "W_D": 36.8215, "J_D": 429.3418184593916, "W_D_1KI": 36.8215, "J_D_1KI": 36.8215} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_200000_0.0001.output b/pytorch/output_1core_after_test/xeon_4216_10_10_10_200000_0.0001.output deleted file mode 100644 index 11f1cee..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_200000_0.0001.output +++ /dev/null @@ -1,49 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '200000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 3999797, "MATRIX_DENSITY": 9.9994925e-05, "TIME_S": 11.147696495056152} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 14, 33, ..., 3999757, - 3999776, 3999797]), - col_indices=tensor([ 11728, 28526, 32271, ..., 178372, 183326, - 184612]), - values=tensor([ 1.0472, -0.5329, -0.9142, ..., 0.2561, 0.9439, - -0.1336]), size=(200000, 200000), nnz=3999797, - layout=torch.sparse_csr) -tensor([0.0020, 0.6027, 0.5430, ..., 0.9784, 0.1939, 0.0521]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 3999797 -Density: 9.9994925e-05 -Time: 11.147696495056152 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 14, 33, ..., 3999757, - 3999776, 3999797]), - col_indices=tensor([ 11728, 28526, 32271, ..., 178372, 183326, - 184612]), - values=tensor([ 1.0472, -0.5329, -0.9142, ..., 0.2561, 0.9439, - -0.1336]), size=(200000, 200000), nnz=3999797, - layout=torch.sparse_csr) -tensor([0.0020, 0.6027, 0.5430, ..., 0.9784, 0.1939, 0.0521]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 3999797 -Density: 9.9994925e-05 -Time: 11.147696495056152 seconds - -[20.25, 18.63, 18.63, 18.64, 18.93, 18.55, 18.58, 18.56, 19.11, 18.61] -[53.75] -11.660084962844849 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 0.0001, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [200000, 200000], 'MATRIX_ROWS': 200000, 'MATRIX_SIZE': 40000000000, 'MATRIX_NNZ': 3999797, 'MATRIX_DENSITY': 9.9994925e-05, 'TIME_S': 11.147696495056152, 'TIME_S_1KI': 11.147696495056152, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 626.7295667529106, 'W': 53.75} -[20.25, 18.63, 18.63, 18.64, 18.93, 18.55, 18.58, 18.56, 19.11, 18.61, 19.7, 18.84, 18.75, 18.61, 19.11, 18.6, 18.9, 18.54, 18.99, 18.64] -338.57 -16.9285 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 0.0001, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [200000, 200000], 'MATRIX_ROWS': 200000, 'MATRIX_SIZE': 40000000000, 'MATRIX_NNZ': 3999797, 'MATRIX_DENSITY': 9.9994925e-05, 'TIME_S': 11.147696495056152, 'TIME_S_1KI': 11.147696495056152, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 626.7295667529106, 'W': 53.75, 'J_1KI': 626.7295667529106, 'W_1KI': 53.75, 'W_D': 36.8215, 'J_D': 429.3418184593916, 'W_D_1KI': 36.8215, 'J_D_1KI': 36.8215} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_200000_1e-05.json b/pytorch/output_1core_after_test/xeon_4216_10_10_10_200000_1e-05.json deleted file mode 100644 index e56a601..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_200000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 4402, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 399999, "MATRIX_DENSITY": 9.999975e-06, "TIME_S": 10.443501949310303, "TIME_S_1KI": 2.3724447863040217, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 555.5514586830138, "W": 53.36999999999999, "J_1KI": 126.20432955088911, "W_1KI": 12.124034529759198, "W_D": 36.17299999999999, "J_D": 376.540433107376, "W_D_1KI": 8.217401181281232, "J_D_1KI": 1.8667426581738376} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_200000_1e-05.output b/pytorch/output_1core_after_test/xeon_4216_10_10_10_200000_1e-05.output deleted file mode 100644 index 76af8f1..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_200000_1e-05.output +++ /dev/null @@ -1,71 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '200000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 399998, "MATRIX_DENSITY": 9.99995e-06, "TIME_S": 2.3849852085113525} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 6, ..., 399994, 399994, - 399998]), - col_indices=tensor([ 7320, 149236, 156084, ..., 26050, 42582, - 181085]), - values=tensor([ 0.8723, 1.0312, 0.5520, ..., -0.0019, 0.2200, - 0.0304]), size=(200000, 200000), nnz=399998, - layout=torch.sparse_csr) -tensor([0.3194, 0.3524, 0.5256, ..., 0.7204, 0.9201, 0.8791]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 399998 -Density: 9.99995e-06 -Time: 2.3849852085113525 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '4402', '-ss', '200000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 399999, "MATRIX_DENSITY": 9.999975e-06, "TIME_S": 10.443501949310303} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 4, ..., 399994, 399997, - 399999]), - col_indices=tensor([192850, 103441, 105823, ..., 109182, 30556, - 177729]), - values=tensor([-2.2730, -2.2086, 1.4122, ..., -2.1679, -0.9897, - 0.4728]), size=(200000, 200000), nnz=399999, - layout=torch.sparse_csr) -tensor([0.3260, 0.8889, 0.8069, ..., 0.6491, 0.1118, 0.7391]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 399999 -Density: 9.999975e-06 -Time: 10.443501949310303 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 4, ..., 399994, 399997, - 399999]), - col_indices=tensor([192850, 103441, 105823, ..., 109182, 30556, - 177729]), - values=tensor([-2.2730, -2.2086, 1.4122, ..., -2.1679, -0.9897, - 0.4728]), size=(200000, 200000), nnz=399999, - layout=torch.sparse_csr) -tensor([0.3260, 0.8889, 0.8069, ..., 0.6491, 0.1118, 0.7391]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 399999 -Density: 9.999975e-06 -Time: 10.443501949310303 seconds - -[19.15, 18.62, 18.63, 18.79, 18.71, 19.03, 18.7, 18.49, 18.67, 18.71] -[53.37] -10.409433364868164 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 4402, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 1e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [200000, 200000], 'MATRIX_ROWS': 200000, 'MATRIX_SIZE': 40000000000, 'MATRIX_NNZ': 399999, 'MATRIX_DENSITY': 9.999975e-06, 'TIME_S': 10.443501949310303, 'TIME_S_1KI': 2.3724447863040217, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 555.5514586830138, 'W': 53.36999999999999} -[19.15, 18.62, 18.63, 18.79, 18.71, 19.03, 18.7, 18.49, 18.67, 18.71, 19.1, 19.86, 19.17, 19.01, 18.9, 18.97, 18.87, 18.75, 22.63, 19.32] -343.94 -17.197 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 4402, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 1e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [200000, 200000], 'MATRIX_ROWS': 200000, 'MATRIX_SIZE': 40000000000, 'MATRIX_NNZ': 399999, 'MATRIX_DENSITY': 9.999975e-06, 'TIME_S': 10.443501949310303, 'TIME_S_1KI': 2.3724447863040217, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 555.5514586830138, 'W': 53.36999999999999, 'J_1KI': 126.20432955088911, 'W_1KI': 12.124034529759198, 'W_D': 36.17299999999999, 'J_D': 376.540433107376, 'W_D_1KI': 8.217401181281232, 'J_D_1KI': 1.8667426581738376} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_200000_2e-05.json b/pytorch/output_1core_after_test/xeon_4216_10_10_10_200000_2e-05.json deleted file mode 100644 index 7c06afb..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_200000_2e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 3106, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 799995, "MATRIX_DENSITY": 1.9999875e-05, "TIME_S": 10.422000646591187, "TIME_S_1KI": 3.355441289952088, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 557.4662446260453, "W": 53.70000000000001, "J_1KI": 179.48043935159217, "W_1KI": 17.289117836445595, "W_D": 36.76050000000001, "J_D": 381.61523064386853, "W_D_1KI": 11.835318737926595, "J_D_1KI": 3.81046965161835} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_200000_2e-05.output b/pytorch/output_1core_after_test/xeon_4216_10_10_10_200000_2e-05.output deleted file mode 100644 index 29f4112..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_200000_2e-05.output +++ /dev/null @@ -1,71 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '200000', '-sd', '2e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 799995, "MATRIX_DENSITY": 1.9999875e-05, "TIME_S": 3.3797810077667236} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 6, ..., 799987, 799991, - 799995]), - col_indices=tensor([ 34164, 181123, 18437, ..., 30843, 143393, - 164856]), - values=tensor([-0.4411, -0.4394, 0.8402, ..., 0.7898, -1.1250, - 0.7780]), size=(200000, 200000), nnz=799995, - layout=torch.sparse_csr) -tensor([0.6190, 0.7766, 0.4688, ..., 0.3207, 0.0417, 0.4453]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 799995 -Density: 1.9999875e-05 -Time: 3.3797810077667236 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '3106', '-ss', '200000', '-sd', '2e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 799995, "MATRIX_DENSITY": 1.9999875e-05, "TIME_S": 10.422000646591187} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 3, ..., 799991, 799993, - 799995]), - col_indices=tensor([136034, 62565, 134663, ..., 162509, 140910, - 164939]), - values=tensor([-1.2010, -0.0313, -0.0163, ..., 0.4614, 0.6236, - -2.2043]), size=(200000, 200000), nnz=799995, - layout=torch.sparse_csr) -tensor([0.7743, 0.0248, 0.7666, ..., 0.4535, 0.3182, 0.9147]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 799995 -Density: 1.9999875e-05 -Time: 10.422000646591187 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 3, ..., 799991, 799993, - 799995]), - col_indices=tensor([136034, 62565, 134663, ..., 162509, 140910, - 164939]), - values=tensor([-1.2010, -0.0313, -0.0163, ..., 0.4614, 0.6236, - -2.2043]), size=(200000, 200000), nnz=799995, - layout=torch.sparse_csr) -tensor([0.7743, 0.0248, 0.7666, ..., 0.4535, 0.3182, 0.9147]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 799995 -Density: 1.9999875e-05 -Time: 10.422000646591187 seconds - -[19.02, 18.62, 18.76, 18.93, 18.65, 18.58, 18.74, 19.03, 18.79, 18.88] -[53.7] -10.38112187385559 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 3106, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 2e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [200000, 200000], 'MATRIX_ROWS': 200000, 'MATRIX_SIZE': 40000000000, 'MATRIX_NNZ': 799995, 'MATRIX_DENSITY': 1.9999875e-05, 'TIME_S': 10.422000646591187, 'TIME_S_1KI': 3.355441289952088, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 557.4662446260453, 'W': 53.70000000000001} -[19.02, 18.62, 18.76, 18.93, 18.65, 18.58, 18.74, 19.03, 18.79, 18.88, 19.11, 19.05, 18.72, 19.05, 18.68, 18.92, 18.54, 18.57, 19.07, 19.17] -338.79 -16.939500000000002 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 3106, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 2e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [200000, 200000], 'MATRIX_ROWS': 200000, 'MATRIX_SIZE': 40000000000, 'MATRIX_NNZ': 799995, 'MATRIX_DENSITY': 1.9999875e-05, 'TIME_S': 10.422000646591187, 'TIME_S_1KI': 3.355441289952088, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 557.4662446260453, 'W': 53.70000000000001, 'J_1KI': 179.48043935159217, 'W_1KI': 17.289117836445595, 'W_D': 36.76050000000001, 'J_D': 381.61523064386853, 'W_D_1KI': 11.835318737926595, 'J_D_1KI': 3.81046965161835} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_200000_5e-05.json b/pytorch/output_1core_after_test/xeon_4216_10_10_10_200000_5e-05.json deleted file mode 100644 index f2b8c95..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_200000_5e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 1654, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 1999960, "MATRIX_DENSITY": 4.9999e-05, "TIME_S": 10.674341678619385, "TIME_S_1KI": 6.453652768210027, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 564.5803745055198, "W": 53.36999999999999, "J_1KI": 341.3424271496492, "W_1KI": 32.26723095525997, "W_D": 36.10624999999999, "J_D": 381.9539094433187, "W_D_1KI": 21.829655380894796, "J_D_1KI": 13.198098779259247} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_200000_5e-05.output b/pytorch/output_1core_after_test/xeon_4216_10_10_10_200000_5e-05.output deleted file mode 100644 index 457b0e0..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_200000_5e-05.output +++ /dev/null @@ -1,71 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '200000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 1999955, "MATRIX_DENSITY": 4.9998875e-05, "TIME_S": 6.346288681030273} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 24, ..., 1999940, - 1999945, 1999955]), - col_indices=tensor([ 7258, 17767, 26650, ..., 151589, 180312, - 197627]), - values=tensor([ 2.7940, -1.4813, 0.1081, ..., 0.0395, -0.5169, - 0.8708]), size=(200000, 200000), nnz=1999955, - layout=torch.sparse_csr) -tensor([0.7683, 0.3883, 0.0886, ..., 0.4818, 0.9581, 0.1704]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 1999955 -Density: 4.9998875e-05 -Time: 6.346288681030273 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1654', '-ss', '200000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 1999960, "MATRIX_DENSITY": 4.9999e-05, "TIME_S": 10.674341678619385} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 10, 20, ..., 1999940, - 1999951, 1999960]), - col_indices=tensor([ 23056, 39871, 78023, ..., 117252, 160095, - 163352]), - values=tensor([ 0.9981, -0.0409, 0.1434, ..., 1.1012, 0.7283, - -0.3726]), size=(200000, 200000), nnz=1999960, - layout=torch.sparse_csr) -tensor([0.3071, 0.5076, 0.9312, ..., 0.0160, 0.2096, 0.1451]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 1999960 -Density: 4.9999e-05 -Time: 10.674341678619385 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 10, 20, ..., 1999940, - 1999951, 1999960]), - col_indices=tensor([ 23056, 39871, 78023, ..., 117252, 160095, - 163352]), - values=tensor([ 0.9981, -0.0409, 0.1434, ..., 1.1012, 0.7283, - -0.3726]), size=(200000, 200000), nnz=1999960, - layout=torch.sparse_csr) -tensor([0.3071, 0.5076, 0.9312, ..., 0.0160, 0.2096, 0.1451]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 1999960 -Density: 4.9999e-05 -Time: 10.674341678619385 seconds - -[19.0, 18.53, 18.48, 19.07, 18.61, 19.6, 19.16, 18.94, 18.91, 19.04] -[53.37] -10.578609228134155 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1654, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 5e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [200000, 200000], 'MATRIX_ROWS': 200000, 'MATRIX_SIZE': 40000000000, 'MATRIX_NNZ': 1999960, 'MATRIX_DENSITY': 4.9999e-05, 'TIME_S': 10.674341678619385, 'TIME_S_1KI': 6.453652768210027, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 564.5803745055198, 'W': 53.36999999999999} -[19.0, 18.53, 18.48, 19.07, 18.61, 19.6, 19.16, 18.94, 18.91, 19.04, 19.37, 18.73, 18.81, 18.66, 18.89, 18.87, 18.65, 19.72, 23.27, 19.34] -345.27500000000003 -17.26375 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1654, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 5e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [200000, 200000], 'MATRIX_ROWS': 200000, 'MATRIX_SIZE': 40000000000, 'MATRIX_NNZ': 1999960, 'MATRIX_DENSITY': 4.9999e-05, 'TIME_S': 10.674341678619385, 'TIME_S_1KI': 6.453652768210027, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 564.5803745055198, 'W': 53.36999999999999, 'J_1KI': 341.3424271496492, 'W_1KI': 32.26723095525997, 'W_D': 36.10624999999999, 'J_D': 381.9539094433187, 'W_D_1KI': 21.829655380894796, 'J_D_1KI': 13.198098779259247} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_200000_8e-05.json b/pytorch/output_1core_after_test/xeon_4216_10_10_10_200000_8e-05.json deleted file mode 100644 index 75cab5e..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_200000_8e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 1110, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 3199871, "MATRIX_DENSITY": 7.9996775e-05, "TIME_S": 10.418358325958252, "TIME_S_1KI": 9.385908401764192, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 554.1568848896027, "W": 53.78, "J_1KI": 499.2404368374799, "W_1KI": 48.450450450450454, "W_D": 36.47375, "J_D": 375.83078617036347, "W_D_1KI": 32.859234234234236, "J_D_1KI": 29.602913724535348} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_200000_8e-05.output b/pytorch/output_1core_after_test/xeon_4216_10_10_10_200000_8e-05.output deleted file mode 100644 index 7b0168e..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_200000_8e-05.output +++ /dev/null @@ -1,71 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '200000', '-sd', '8e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 3199873, "MATRIX_DENSITY": 7.9996825e-05, "TIME_S": 9.457699060440063} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 19, 39, ..., 3199844, - 3199863, 3199873]), - col_indices=tensor([ 8156, 29107, 34906, ..., 125693, 150327, - 191584]), - values=tensor([ 0.9466, -0.7312, 0.4212, ..., -1.5445, -0.1200, - -1.4006]), size=(200000, 200000), nnz=3199873, - layout=torch.sparse_csr) -tensor([0.2847, 0.6729, 0.6896, ..., 0.1178, 0.7823, 0.9871]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 3199873 -Density: 7.9996825e-05 -Time: 9.457699060440063 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1110', '-ss', '200000', '-sd', '8e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 3199871, "MATRIX_DENSITY": 7.9996775e-05, "TIME_S": 10.418358325958252} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 15, 26, ..., 3199812, - 3199839, 3199871]), - col_indices=tensor([ 6111, 13006, 16880, ..., 179544, 185529, - 194217]), - values=tensor([-0.7130, 1.2621, -1.5929, ..., 1.5451, -1.8739, - -0.5836]), size=(200000, 200000), nnz=3199871, - layout=torch.sparse_csr) -tensor([0.8320, 0.3492, 0.3739, ..., 0.1724, 0.8330, 0.6232]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 3199871 -Density: 7.9996775e-05 -Time: 10.418358325958252 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 15, 26, ..., 3199812, - 3199839, 3199871]), - col_indices=tensor([ 6111, 13006, 16880, ..., 179544, 185529, - 194217]), - values=tensor([-0.7130, 1.2621, -1.5929, ..., 1.5451, -1.8739, - -0.5836]), size=(200000, 200000), nnz=3199871, - layout=torch.sparse_csr) -tensor([0.8320, 0.3492, 0.3739, ..., 0.1724, 0.8330, 0.6232]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 3199871 -Density: 7.9996775e-05 -Time: 10.418358325958252 seconds - -[19.67, 18.72, 23.9, 19.33, 19.24, 19.25, 18.9, 19.08, 18.98, 18.47] -[53.78] -10.304144382476807 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1110, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 8e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [200000, 200000], 'MATRIX_ROWS': 200000, 'MATRIX_SIZE': 40000000000, 'MATRIX_NNZ': 3199871, 'MATRIX_DENSITY': 7.9996775e-05, 'TIME_S': 10.418358325958252, 'TIME_S_1KI': 9.385908401764192, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 554.1568848896027, 'W': 53.78} -[19.67, 18.72, 23.9, 19.33, 19.24, 19.25, 18.9, 19.08, 18.98, 18.47, 19.6, 18.81, 18.81, 18.84, 18.84, 18.94, 18.78, 18.61, 18.87, 18.71] -346.125 -17.30625 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1110, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 8e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [200000, 200000], 'MATRIX_ROWS': 200000, 'MATRIX_SIZE': 40000000000, 'MATRIX_NNZ': 3199871, 'MATRIX_DENSITY': 7.9996775e-05, 'TIME_S': 10.418358325958252, 'TIME_S_1KI': 9.385908401764192, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 554.1568848896027, 'W': 53.78, 'J_1KI': 499.2404368374799, 'W_1KI': 48.450450450450454, 'W_D': 36.47375, 'J_D': 375.83078617036347, 'W_D_1KI': 32.859234234234236, 'J_D_1KI': 29.602913724535348} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_20000_0.0001.json b/pytorch/output_1core_after_test/xeon_4216_10_10_10_20000_0.0001.json deleted file mode 100644 index 012e2eb..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_20000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 44379, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 39997, "MATRIX_DENSITY": 9.99925e-05, "TIME_S": 10.551446914672852, "TIME_S_1KI": 0.2377576537252496, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 555.8343685770035, "W": 52.71, "J_1KI": 12.524715937200106, "W_1KI": 1.1877239234773203, "W_D": 35.692, "J_D": 376.37716340827944, "W_D_1KI": 0.8042542644043353, "J_D_1KI": 0.01812240619221558} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_20000_0.0001.output b/pytorch/output_1core_after_test/xeon_4216_10_10_10_20000_0.0001.output deleted file mode 100644 index ba5097f..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_20000_0.0001.output +++ /dev/null @@ -1,85 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '20000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 39997, "MATRIX_DENSITY": 9.99925e-05, "TIME_S": 0.2506370544433594} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 3, ..., 39993, 39995, 39997]), - col_indices=tensor([ 6946, 9752, 17458, ..., 19606, 14840, 16812]), - values=tensor([ 1.2114, 0.4786, 0.7481, ..., 2.2316, -1.3356, - 0.0494]), size=(20000, 20000), nnz=39997, - layout=torch.sparse_csr) -tensor([0.1940, 0.3245, 0.6685, ..., 0.9764, 0.3950, 0.5032]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 39997 -Density: 9.99925e-05 -Time: 0.2506370544433594 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '41893', '-ss', '20000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 39999, "MATRIX_DENSITY": 9.99975e-05, "TIME_S": 9.911674499511719} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 6, ..., 39993, 39994, 39999]), - col_indices=tensor([ 3254, 872, 4041, ..., 10014, 12933, 18899]), - values=tensor([-1.6022, 1.1095, -0.6606, ..., 0.0532, 0.0246, - -0.1741]), size=(20000, 20000), nnz=39999, - layout=torch.sparse_csr) -tensor([0.6695, 0.2661, 0.1797, ..., 0.7316, 0.8400, 0.8705]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 39999 -Density: 9.99975e-05 -Time: 9.911674499511719 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '44379', '-ss', '20000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 39997, "MATRIX_DENSITY": 9.99925e-05, "TIME_S": 10.551446914672852} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 4, ..., 39996, 39997, 39997]), - col_indices=tensor([ 2355, 6630, 8498, ..., 14868, 18936, 15519]), - values=tensor([-0.5894, 0.5137, 0.6041, ..., 3.6756, -0.6656, - 1.6246]), size=(20000, 20000), nnz=39997, - layout=torch.sparse_csr) -tensor([0.1964, 0.7243, 0.3521, ..., 0.3377, 0.9387, 0.6664]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 39997 -Density: 9.99925e-05 -Time: 10.551446914672852 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 4, ..., 39996, 39997, 39997]), - col_indices=tensor([ 2355, 6630, 8498, ..., 14868, 18936, 15519]), - values=tensor([-0.5894, 0.5137, 0.6041, ..., 3.6756, -0.6656, - 1.6246]), size=(20000, 20000), nnz=39997, - layout=torch.sparse_csr) -tensor([0.1964, 0.7243, 0.3521, ..., 0.3377, 0.9387, 0.6664]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 39997 -Density: 9.99925e-05 -Time: 10.551446914672852 seconds - -[18.93, 18.97, 18.85, 18.83, 18.81, 18.77, 18.79, 18.8, 18.67, 18.93] -[52.71] -10.545140743255615 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 44379, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 0.0001, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [20000, 20000], 'MATRIX_ROWS': 20000, 'MATRIX_SIZE': 400000000, 'MATRIX_NNZ': 39997, 'MATRIX_DENSITY': 9.99925e-05, 'TIME_S': 10.551446914672852, 'TIME_S_1KI': 0.2377576537252496, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 555.8343685770035, 'W': 52.71} -[18.93, 18.97, 18.85, 18.83, 18.81, 18.77, 18.79, 18.8, 18.67, 18.93, 19.15, 18.67, 18.67, 19.05, 18.86, 18.7, 18.91, 19.07, 19.74, 19.39] -340.36 -17.018 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 44379, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 0.0001, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [20000, 20000], 'MATRIX_ROWS': 20000, 'MATRIX_SIZE': 400000000, 'MATRIX_NNZ': 39997, 'MATRIX_DENSITY': 9.99925e-05, 'TIME_S': 10.551446914672852, 'TIME_S_1KI': 0.2377576537252496, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 555.8343685770035, 'W': 52.71, 'J_1KI': 12.524715937200106, 'W_1KI': 1.1877239234773203, 'W_D': 35.692, 'J_D': 376.37716340827944, 'W_D_1KI': 0.8042542644043353, 'J_D_1KI': 0.01812240619221558} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_20000_1e-05.json b/pytorch/output_1core_after_test/xeon_4216_10_10_10_20000_1e-05.json deleted file mode 100644 index f06dc2b..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_20000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 142521, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 4000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.508326530456543, "TIME_S_1KI": 0.07373177658349678, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 546.1413905405998, "W": 52.38999999999999, "J_1KI": 3.832006444949164, "W_1KI": 0.36759495091951355, "W_D": 35.04074999999999, "J_D": 365.2835260657667, "W_D_1KI": 0.24586376744479752, "J_D_1KI": 0.0017251055454620549} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_20000_1e-05.output b/pytorch/output_1core_after_test/xeon_4216_10_10_10_20000_1e-05.output deleted file mode 100644 index fd3b69b..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_20000_1e-05.output +++ /dev/null @@ -1,85 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '20000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 4000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.0873408317565918} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 4000, 4000, 4000]), - col_indices=tensor([ 8010, 19782, 19298, ..., 14609, 17406, 4659]), - values=tensor([ 1.8351, -1.5190, 0.1888, ..., 0.1167, 1.3405, - -0.6385]), size=(20000, 20000), nnz=4000, - layout=torch.sparse_csr) -tensor([0.9228, 0.8425, 0.3069, ..., 0.3610, 0.7820, 0.6502]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 4000 -Density: 1e-05 -Time: 0.0873408317565918 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '120218', '-ss', '20000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 4000, "MATRIX_DENSITY": 1e-05, "TIME_S": 8.856806516647339} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 4000, 4000, 4000]), - col_indices=tensor([ 9966, 11738, 6190, ..., 12804, 3064, 4806]), - values=tensor([-1.2230, -0.7207, 0.5787, ..., 0.3459, -2.3343, - -0.2556]), size=(20000, 20000), nnz=4000, - layout=torch.sparse_csr) -tensor([0.0620, 0.5691, 0.8867, ..., 0.9198, 0.5585, 0.5343]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 4000 -Density: 1e-05 -Time: 8.856806516647339 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '142521', '-ss', '20000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 4000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.508326530456543} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 4000, 4000, 4000]), - col_indices=tensor([ 8160, 17593, 13371, ..., 1652, 3994, 15566]), - values=tensor([-0.5718, -1.1883, 1.1406, ..., -0.3730, 1.5301, - -0.0157]), size=(20000, 20000), nnz=4000, - layout=torch.sparse_csr) -tensor([0.3221, 0.2397, 0.6495, ..., 0.9004, 0.2048, 0.2359]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 4000 -Density: 1e-05 -Time: 10.508326530456543 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 4000, 4000, 4000]), - col_indices=tensor([ 8160, 17593, 13371, ..., 1652, 3994, 15566]), - values=tensor([-0.5718, -1.1883, 1.1406, ..., -0.3730, 1.5301, - -0.0157]), size=(20000, 20000), nnz=4000, - layout=torch.sparse_csr) -tensor([0.3221, 0.2397, 0.6495, ..., 0.9004, 0.2048, 0.2359]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 4000 -Density: 1e-05 -Time: 10.508326530456543 seconds - -[19.28, 18.92, 18.78, 19.29, 19.22, 18.88, 18.92, 18.46, 19.01, 18.67] -[52.39] -10.424535036087036 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 142521, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 1e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [20000, 20000], 'MATRIX_ROWS': 20000, 'MATRIX_SIZE': 400000000, 'MATRIX_NNZ': 4000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.508326530456543, 'TIME_S_1KI': 0.07373177658349678, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 546.1413905405998, 'W': 52.38999999999999} -[19.28, 18.92, 18.78, 19.29, 19.22, 18.88, 18.92, 18.46, 19.01, 18.67, 19.34, 18.68, 18.71, 18.77, 19.13, 18.61, 22.8, 21.31, 19.2, 19.3] -346.985 -17.34925 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 142521, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 1e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [20000, 20000], 'MATRIX_ROWS': 20000, 'MATRIX_SIZE': 400000000, 'MATRIX_NNZ': 4000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.508326530456543, 'TIME_S_1KI': 0.07373177658349678, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 546.1413905405998, 'W': 52.38999999999999, 'J_1KI': 3.832006444949164, 'W_1KI': 0.36759495091951355, 'W_D': 35.04074999999999, 'J_D': 365.2835260657667, 'W_D_1KI': 0.24586376744479752, 'J_D_1KI': 0.0017251055454620549} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_20000_2e-05.json b/pytorch/output_1core_after_test/xeon_4216_10_10_10_20000_2e-05.json deleted file mode 100644 index b19b23e..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_20000_2e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 93365, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 8000, "MATRIX_DENSITY": 2e-05, "TIME_S": 10.743499755859375, "TIME_S_1KI": 0.1150698843877189, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 556.5670068073273, "W": 52.010000000000005, "J_1KI": 5.961195381645449, "W_1KI": 0.5570609971616773, "W_D": 34.435, "J_D": 368.49422955989843, "W_D_1KI": 0.36882129277566544, "J_D_1KI": 0.003950316422381679} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_20000_2e-05.output b/pytorch/output_1core_after_test/xeon_4216_10_10_10_20000_2e-05.output deleted file mode 100644 index 4674c69..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_20000_2e-05.output +++ /dev/null @@ -1,85 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '20000', '-sd', '2e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 8000, "MATRIX_DENSITY": 2e-05, "TIME_S": 0.12871932983398438} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 8000, 8000, 8000]), - col_indices=tensor([ 6232, 5020, 14784, ..., 19600, 13595, 19263]), - values=tensor([ 0.1006, 1.1590, -0.3220, ..., 0.3010, -0.8009, - -0.2712]), size=(20000, 20000), nnz=8000, - layout=torch.sparse_csr) -tensor([0.0359, 0.1101, 0.9981, ..., 0.8990, 0.5097, 0.9344]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 8000 -Density: 2e-05 -Time: 0.12871932983398438 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '81572', '-ss', '20000', '-sd', '2e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 8000, "MATRIX_DENSITY": 2e-05, "TIME_S": 9.173684358596802} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 7998, 8000, 8000]), - col_indices=tensor([ 9842, 9326, 14984, ..., 11799, 3427, 5394]), - values=tensor([-1.0874, -0.5411, -0.3571, ..., 0.6373, 0.9569, - -0.1896]), size=(20000, 20000), nnz=8000, - layout=torch.sparse_csr) -tensor([0.6846, 0.5466, 0.2757, ..., 0.2176, 0.9069, 0.6997]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 8000 -Density: 2e-05 -Time: 9.173684358596802 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '93365', '-ss', '20000', '-sd', '2e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 8000, "MATRIX_DENSITY": 2e-05, "TIME_S": 10.743499755859375} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 7999, 8000, 8000]), - col_indices=tensor([ 2867, 9675, 11558, ..., 5534, 13441, 13103]), - values=tensor([-0.5057, -1.2337, 0.0449, ..., -0.3860, -0.9610, - 0.7393]), size=(20000, 20000), nnz=8000, - layout=torch.sparse_csr) -tensor([0.7915, 0.7524, 0.8381, ..., 0.3492, 0.2103, 0.1752]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 8000 -Density: 2e-05 -Time: 10.743499755859375 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 7999, 8000, 8000]), - col_indices=tensor([ 2867, 9675, 11558, ..., 5534, 13441, 13103]), - values=tensor([-0.5057, -1.2337, 0.0449, ..., -0.3860, -0.9610, - 0.7393]), size=(20000, 20000), nnz=8000, - layout=torch.sparse_csr) -tensor([0.7915, 0.7524, 0.8381, ..., 0.3492, 0.2103, 0.1752]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 8000 -Density: 2e-05 -Time: 10.743499755859375 seconds - -[19.24, 18.69, 18.93, 18.9, 18.7, 25.63, 20.08, 18.84, 19.54, 18.84] -[52.01] -10.701153755187988 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 93365, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 2e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [20000, 20000], 'MATRIX_ROWS': 20000, 'MATRIX_SIZE': 400000000, 'MATRIX_NNZ': 8000, 'MATRIX_DENSITY': 2e-05, 'TIME_S': 10.743499755859375, 'TIME_S_1KI': 0.1150698843877189, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 556.5670068073273, 'W': 52.010000000000005} -[19.24, 18.69, 18.93, 18.9, 18.7, 25.63, 20.08, 18.84, 19.54, 18.84, 22.75, 19.56, 19.1, 19.2, 18.88, 18.76, 19.1, 18.91, 18.86, 18.81] -351.5 -17.575 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 93365, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 2e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [20000, 20000], 'MATRIX_ROWS': 20000, 'MATRIX_SIZE': 400000000, 'MATRIX_NNZ': 8000, 'MATRIX_DENSITY': 2e-05, 'TIME_S': 10.743499755859375, 'TIME_S_1KI': 0.1150698843877189, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 556.5670068073273, 'W': 52.010000000000005, 'J_1KI': 5.961195381645449, 'W_1KI': 0.5570609971616773, 'W_D': 34.435, 'J_D': 368.49422955989843, 'W_D_1KI': 0.36882129277566544, 'J_D_1KI': 0.003950316422381679} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_20000_5e-05.json b/pytorch/output_1core_after_test/xeon_4216_10_10_10_20000_5e-05.json deleted file mode 100644 index 974ab70..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_20000_5e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 57970, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 19998, "MATRIX_DENSITY": 4.9995e-05, "TIME_S": 10.538284063339233, "TIME_S_1KI": 0.18178858139277615, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 544.5686602807045, "W": 52.11000000000001, "J_1KI": 9.393973784383379, "W_1KI": 0.8989132309815423, "W_D": 35.073750000000004, "J_D": 366.53358373671773, "W_D_1KI": 0.6050327755735726, "J_D_1KI": 0.010436998026109584} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_20000_5e-05.output b/pytorch/output_1core_after_test/xeon_4216_10_10_10_20000_5e-05.output deleted file mode 100644 index 867df9a..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_20000_5e-05.output +++ /dev/null @@ -1,85 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '20000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 19999, "MATRIX_DENSITY": 4.99975e-05, "TIME_S": 0.19534778594970703} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 19997, 19998, 19999]), - col_indices=tensor([16547, 3567, 456, ..., 16591, 15722, 1589]), - values=tensor([ 0.1949, 0.0920, 0.2271, ..., -1.6377, -1.4449, - 0.7306]), size=(20000, 20000), nnz=19999, - layout=torch.sparse_csr) -tensor([0.9367, 0.6234, 0.3611, ..., 0.5123, 0.5335, 0.1164]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 19999 -Density: 4.99975e-05 -Time: 0.19534778594970703 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '53750', '-ss', '20000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 20000, "MATRIX_DENSITY": 5e-05, "TIME_S": 9.73549509048462} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 3, ..., 19997, 19997, 20000]), - col_indices=tensor([ 7901, 12543, 14457, ..., 7819, 11013, 18899]), - values=tensor([ 0.1128, -0.7652, -0.7733, ..., 0.5508, -0.0238, - 0.8435]), size=(20000, 20000), nnz=20000, - layout=torch.sparse_csr) -tensor([0.9901, 0.7877, 0.5269, ..., 0.7147, 0.3301, 0.1137]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 20000 -Density: 5e-05 -Time: 9.73549509048462 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '57970', '-ss', '20000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 19998, "MATRIX_DENSITY": 4.9995e-05, "TIME_S": 10.538284063339233} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 2, ..., 19997, 19998, 19998]), - col_indices=tensor([ 1537, 15941, 3225, ..., 6174, 1475, 17044]), - values=tensor([ 1.8079, 0.7034, 0.4181, ..., -0.8199, 0.6631, - -0.4583]), size=(20000, 20000), nnz=19998, - layout=torch.sparse_csr) -tensor([0.7172, 0.8915, 0.9905, ..., 0.4621, 0.4303, 0.4703]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 19998 -Density: 4.9995e-05 -Time: 10.538284063339233 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 2, ..., 19997, 19998, 19998]), - col_indices=tensor([ 1537, 15941, 3225, ..., 6174, 1475, 17044]), - values=tensor([ 1.8079, 0.7034, 0.4181, ..., -0.8199, 0.6631, - -0.4583]), size=(20000, 20000), nnz=19998, - layout=torch.sparse_csr) -tensor([0.7172, 0.8915, 0.9905, ..., 0.4621, 0.4303, 0.4703]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 19998 -Density: 4.9995e-05 -Time: 10.538284063339233 seconds - -[20.47, 19.24, 19.31, 18.77, 18.76, 18.71, 18.92, 18.6, 18.61, 18.53] -[52.11] -10.45036768913269 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 57970, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 5e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [20000, 20000], 'MATRIX_ROWS': 20000, 'MATRIX_SIZE': 400000000, 'MATRIX_NNZ': 19998, 'MATRIX_DENSITY': 4.9995e-05, 'TIME_S': 10.538284063339233, 'TIME_S_1KI': 0.18178858139277615, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 544.5686602807045, 'W': 52.11000000000001} -[20.47, 19.24, 19.31, 18.77, 18.76, 18.71, 18.92, 18.6, 18.61, 18.53, 19.26, 19.07, 18.81, 18.78, 19.0, 18.74, 19.06, 18.72, 19.1, 18.79] -340.72499999999997 -17.03625 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 57970, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 5e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [20000, 20000], 'MATRIX_ROWS': 20000, 'MATRIX_SIZE': 400000000, 'MATRIX_NNZ': 19998, 'MATRIX_DENSITY': 4.9995e-05, 'TIME_S': 10.538284063339233, 'TIME_S_1KI': 0.18178858139277615, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 544.5686602807045, 'W': 52.11000000000001, 'J_1KI': 9.393973784383379, 'W_1KI': 0.8989132309815423, 'W_D': 35.073750000000004, 'J_D': 366.53358373671773, 'W_D_1KI': 0.6050327755735726, 'J_D_1KI': 0.010436998026109584} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_20000_8e-05.json b/pytorch/output_1core_after_test/xeon_4216_10_10_10_20000_8e-05.json deleted file mode 100644 index ea1e934..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_20000_8e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 49178, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 31995, "MATRIX_DENSITY": 7.99875e-05, "TIME_S": 10.53342890739441, "TIME_S_1KI": 0.214189859437033, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 551.4711124801636, "W": 52.21, "J_1KI": 11.213776739195648, "W_1KI": 1.0616535849363538, "W_D": 35.26825, "J_D": 372.5229086904526, "W_D_1KI": 0.7171550286713572, "J_D_1KI": 0.014582842504196128} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_20000_8e-05.output b/pytorch/output_1core_after_test/xeon_4216_10_10_10_20000_8e-05.output deleted file mode 100644 index 2a5e193..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_20000_8e-05.output +++ /dev/null @@ -1,85 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '20000', '-sd', '8e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 31998, "MATRIX_DENSITY": 7.9995e-05, "TIME_S": 0.2279369831085205} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 31995, 31997, 31998]), - col_indices=tensor([ 2138, 3564, 4440, ..., 6020, 15856, 14552]), - values=tensor([-0.7903, 0.4575, -2.0739, ..., 0.7058, -0.3741, - -0.2904]), size=(20000, 20000), nnz=31998, - layout=torch.sparse_csr) -tensor([0.4365, 0.2036, 0.9555, ..., 0.8219, 0.7976, 0.9880]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 31998 -Density: 7.9995e-05 -Time: 0.2279369831085205 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '46065', '-ss', '20000', '-sd', '8e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 31999, "MATRIX_DENSITY": 7.99975e-05, "TIME_S": 9.83530044555664} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 5, ..., 31997, 31998, 31999]), - col_indices=tensor([ 522, 13000, 10649, ..., 19300, 3539, 11601]), - values=tensor([ 0.4030, -0.7428, 0.6132, ..., -0.1598, -0.1315, - 2.8497]), size=(20000, 20000), nnz=31999, - layout=torch.sparse_csr) -tensor([0.0843, 0.5678, 0.5849, ..., 0.1620, 0.9923, 0.8403]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 31999 -Density: 7.99975e-05 -Time: 9.83530044555664 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '49178', '-ss', '20000', '-sd', '8e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 31995, "MATRIX_DENSITY": 7.99875e-05, "TIME_S": 10.53342890739441} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 3, ..., 31992, 31995, 31995]), - col_indices=tensor([15022, 2095, 15271, ..., 6926, 13418, 16309]), - values=tensor([-0.3921, -0.4377, -0.5768, ..., -0.4653, 0.1567, - -1.2468]), size=(20000, 20000), nnz=31995, - layout=torch.sparse_csr) -tensor([0.0218, 0.8049, 0.1953, ..., 0.0542, 0.7841, 0.1235]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 31995 -Density: 7.99875e-05 -Time: 10.53342890739441 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 3, ..., 31992, 31995, 31995]), - col_indices=tensor([15022, 2095, 15271, ..., 6926, 13418, 16309]), - values=tensor([-0.3921, -0.4377, -0.5768, ..., -0.4653, 0.1567, - -1.2468]), size=(20000, 20000), nnz=31995, - layout=torch.sparse_csr) -tensor([0.0218, 0.8049, 0.1953, ..., 0.0542, 0.7841, 0.1235]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 31995 -Density: 7.99875e-05 -Time: 10.53342890739441 seconds - -[19.45, 18.83, 18.98, 18.74, 18.98, 18.74, 18.89, 18.75, 18.96, 18.58] -[52.21] -10.562557220458984 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 49178, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 8e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [20000, 20000], 'MATRIX_ROWS': 20000, 'MATRIX_SIZE': 400000000, 'MATRIX_NNZ': 31995, 'MATRIX_DENSITY': 7.99875e-05, 'TIME_S': 10.53342890739441, 'TIME_S_1KI': 0.214189859437033, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 551.4711124801636, 'W': 52.21} -[19.45, 18.83, 18.98, 18.74, 18.98, 18.74, 18.89, 18.75, 18.96, 18.58, 18.88, 18.82, 18.79, 18.75, 18.65, 18.77, 18.91, 18.73, 18.68, 18.82] -338.835 -16.94175 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 49178, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 8e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [20000, 20000], 'MATRIX_ROWS': 20000, 'MATRIX_SIZE': 400000000, 'MATRIX_NNZ': 31995, 'MATRIX_DENSITY': 7.99875e-05, 'TIME_S': 10.53342890739441, 'TIME_S_1KI': 0.214189859437033, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 551.4711124801636, 'W': 52.21, 'J_1KI': 11.213776739195648, 'W_1KI': 1.0616535849363538, 'W_D': 35.26825, 'J_D': 372.5229086904526, 'W_D_1KI': 0.7171550286713572, 'J_D_1KI': 0.014582842504196128} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_50000_0.0001.json b/pytorch/output_1core_after_test/xeon_4216_10_10_10_50000_0.0001.json deleted file mode 100644 index 1d2d77e..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_50000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 11296, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 249985, "MATRIX_DENSITY": 9.9994e-05, "TIME_S": 10.336112976074219, "TIME_S_1KI": 0.91502416572895, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 548.8240922570229, "W": 53.13000000000001, "J_1KI": 48.58570221822087, "W_1KI": 4.7034348441926355, "W_D": 35.883250000000004, "J_D": 370.6680238750577, "W_D_1KI": 3.1766333215297453, "J_D_1KI": 0.281217539087265} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_50000_0.0001.output b/pytorch/output_1core_after_test/xeon_4216_10_10_10_50000_0.0001.output deleted file mode 100644 index 70708db..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_50000_0.0001.output +++ /dev/null @@ -1,68 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '50000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 249994, "MATRIX_DENSITY": 9.99976e-05, "TIME_S": 0.9295320510864258} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 14, 22, ..., 249991, 249991, - 249994]), - col_indices=tensor([ 473, 1047, 2376, ..., 2631, 8294, 41891]), - values=tensor([ 0.0867, 0.2700, -0.5537, ..., -0.9378, -0.4600, - -1.5650]), size=(50000, 50000), nnz=249994, - layout=torch.sparse_csr) -tensor([0.9388, 0.5454, 0.9964, ..., 0.1311, 0.1618, 0.0276]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 249994 -Density: 9.99976e-05 -Time: 0.9295320510864258 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '11296', '-ss', '50000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 249985, "MATRIX_DENSITY": 9.9994e-05, "TIME_S": 10.336112976074219} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 8, ..., 249976, 249980, - 249985]), - col_indices=tensor([ 2197, 3462, 31469, ..., 13423, 23435, 25682]), - values=tensor([ 0.4325, -0.1471, -0.5200, ..., -0.3119, 0.2457, - 0.8252]), size=(50000, 50000), nnz=249985, - layout=torch.sparse_csr) -tensor([0.2771, 0.8438, 0.7361, ..., 0.4084, 0.2350, 0.6651]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 249985 -Density: 9.9994e-05 -Time: 10.336112976074219 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 8, ..., 249976, 249980, - 249985]), - col_indices=tensor([ 2197, 3462, 31469, ..., 13423, 23435, 25682]), - values=tensor([ 0.4325, -0.1471, -0.5200, ..., -0.3119, 0.2457, - 0.8252]), size=(50000, 50000), nnz=249985, - layout=torch.sparse_csr) -tensor([0.2771, 0.8438, 0.7361, ..., 0.4084, 0.2350, 0.6651]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 249985 -Density: 9.9994e-05 -Time: 10.336112976074219 seconds - -[19.5, 18.85, 22.67, 19.31, 19.18, 19.38, 18.76, 18.72, 18.9, 18.8] -[53.13] -10.329834222793579 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 11296, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 0.0001, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 249985, 'MATRIX_DENSITY': 9.9994e-05, 'TIME_S': 10.336112976074219, 'TIME_S_1KI': 0.91502416572895, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 548.8240922570229, 'W': 53.13000000000001} -[19.5, 18.85, 22.67, 19.31, 19.18, 19.38, 18.76, 18.72, 18.9, 18.8, 20.2, 19.04, 18.75, 18.81, 18.94, 18.7, 18.69, 18.73, 18.88, 18.75] -344.93500000000006 -17.246750000000002 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 11296, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 0.0001, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 249985, 'MATRIX_DENSITY': 9.9994e-05, 'TIME_S': 10.336112976074219, 'TIME_S_1KI': 0.91502416572895, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 548.8240922570229, 'W': 53.13000000000001, 'J_1KI': 48.58570221822087, 'W_1KI': 4.7034348441926355, 'W_D': 35.883250000000004, 'J_D': 370.6680238750577, 'W_D_1KI': 3.1766333215297453, 'J_D_1KI': 0.281217539087265} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_50000_1e-05.json b/pytorch/output_1core_after_test/xeon_4216_10_10_10_50000_1e-05.json deleted file mode 100644 index c0b06b4..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_50000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 26842, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.116822242736816, "TIME_S_1KI": 0.3769026988576416, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 523.8462572264671, "W": 52.18999999999999, "J_1KI": 19.51591748850559, "W_1KI": 1.9443409581998359, "W_D": 35.13474999999999, "J_D": 352.6577368478178, "W_D_1KI": 1.3089467997913715, "J_D_1KI": 0.0487648759329175} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_50000_1e-05.output b/pytorch/output_1core_after_test/xeon_4216_10_10_10_50000_1e-05.output deleted file mode 100644 index 1379d41..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_50000_1e-05.output +++ /dev/null @@ -1,65 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '50000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.39116597175598145} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 24999, 25000, 25000]), - col_indices=tensor([18337, 31501, 25221, ..., 43739, 24478, 39763]), - values=tensor([-0.5818, -0.6188, 0.3479, ..., -0.4811, 0.2734, - -0.3656]), size=(50000, 50000), nnz=25000, - layout=torch.sparse_csr) -tensor([0.1661, 0.1412, 0.6450, ..., 0.4075, 0.5489, 0.2946]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 25000 -Density: 1e-05 -Time: 0.39116597175598145 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '26842', '-ss', '50000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.116822242736816} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 24998, 24999, 25000]), - col_indices=tensor([39322, 17603, 28365, ..., 9970, 47193, 3623]), - values=tensor([ 0.2642, -1.0431, 0.6103, ..., 1.0202, 1.4487, - -0.3012]), size=(50000, 50000), nnz=25000, - layout=torch.sparse_csr) -tensor([0.7270, 0.4200, 0.6705, ..., 0.5052, 0.3503, 0.3630]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 25000 -Density: 1e-05 -Time: 10.116822242736816 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 24998, 24999, 25000]), - col_indices=tensor([39322, 17603, 28365, ..., 9970, 47193, 3623]), - values=tensor([ 0.2642, -1.0431, 0.6103, ..., 1.0202, 1.4487, - -0.3012]), size=(50000, 50000), nnz=25000, - layout=torch.sparse_csr) -tensor([0.7270, 0.4200, 0.6705, ..., 0.5052, 0.3503, 0.3630]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 25000 -Density: 1e-05 -Time: 10.116822242736816 seconds - -[19.72, 18.75, 19.05, 18.61, 19.05, 18.87, 19.25, 18.69, 19.97, 18.84] -[52.19] -10.037291765213013 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 26842, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 1e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.116822242736816, 'TIME_S_1KI': 0.3769026988576416, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 523.8462572264671, 'W': 52.18999999999999} -[19.72, 18.75, 19.05, 18.61, 19.05, 18.87, 19.25, 18.69, 19.97, 18.84, 19.24, 18.76, 19.05, 18.79, 19.15, 18.62, 18.7, 18.67, 18.93, 18.59] -341.105 -17.05525 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 26842, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 1e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.116822242736816, 'TIME_S_1KI': 0.3769026988576416, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 523.8462572264671, 'W': 52.18999999999999, 'J_1KI': 19.51591748850559, 'W_1KI': 1.9443409581998359, 'W_D': 35.13474999999999, 'J_D': 352.6577368478178, 'W_D_1KI': 1.3089467997913715, 'J_D_1KI': 0.0487648759329175} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_50000_2e-05.json b/pytorch/output_1core_after_test/xeon_4216_10_10_10_50000_2e-05.json deleted file mode 100644 index b9f3880..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_50000_2e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 20587, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 50000, "MATRIX_DENSITY": 2e-05, "TIME_S": 10.146229267120361, "TIME_S_1KI": 0.4928464209025289, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 533.6980026054383, "W": 52.59000000000001, "J_1KI": 25.92402985405539, "W_1KI": 2.5545247000534324, "W_D": 35.59100000000001, "J_D": 361.18740465354927, "W_D_1KI": 1.7288094428522858, "J_D_1KI": 0.08397578291408586} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_50000_2e-05.output b/pytorch/output_1core_after_test/xeon_4216_10_10_10_50000_2e-05.output deleted file mode 100644 index ca77c4e..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_50000_2e-05.output +++ /dev/null @@ -1,65 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '50000', '-sd', '2e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 50000, "MATRIX_DENSITY": 2e-05, "TIME_S": 0.5100221633911133} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 3, ..., 49999, 50000, 50000]), - col_indices=tensor([40869, 5149, 18504, ..., 7787, 3981, 15278]), - values=tensor([ 1.3982, -1.0552, -1.1130, ..., 1.1541, 0.4630, - 1.5192]), size=(50000, 50000), nnz=50000, - layout=torch.sparse_csr) -tensor([0.7764, 0.9026, 0.2352, ..., 0.3127, 0.4619, 0.5070]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 50000 -Density: 2e-05 -Time: 0.5100221633911133 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '20587', '-ss', '50000', '-sd', '2e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 50000, "MATRIX_DENSITY": 2e-05, "TIME_S": 10.146229267120361} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 49995, 49997, 50000]), - col_indices=tensor([ 5572, 5836, 28279, ..., 17749, 43890, 48615]), - values=tensor([-0.4735, 0.2382, 0.6330, ..., 0.6121, -0.1685, - 0.6070]), size=(50000, 50000), nnz=50000, - layout=torch.sparse_csr) -tensor([0.8964, 0.2723, 0.5282, ..., 0.1997, 0.6281, 0.9187]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 50000 -Density: 2e-05 -Time: 10.146229267120361 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 49995, 49997, 50000]), - col_indices=tensor([ 5572, 5836, 28279, ..., 17749, 43890, 48615]), - values=tensor([-0.4735, 0.2382, 0.6330, ..., 0.6121, -0.1685, - 0.6070]), size=(50000, 50000), nnz=50000, - layout=torch.sparse_csr) -tensor([0.8964, 0.2723, 0.5282, ..., 0.1997, 0.6281, 0.9187]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 50000 -Density: 2e-05 -Time: 10.146229267120361 seconds - -[18.86, 18.85, 18.73, 19.07, 18.64, 18.83, 19.03, 18.71, 18.79, 19.01] -[52.59] -10.148279190063477 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 20587, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 2e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 50000, 'MATRIX_DENSITY': 2e-05, 'TIME_S': 10.146229267120361, 'TIME_S_1KI': 0.4928464209025289, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 533.6980026054383, 'W': 52.59000000000001} -[18.86, 18.85, 18.73, 19.07, 18.64, 18.83, 19.03, 18.71, 18.79, 19.01, 19.4, 19.58, 18.93, 18.99, 18.78, 18.84, 18.67, 18.68, 18.9, 18.65] -339.98 -16.999000000000002 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 20587, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 2e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 50000, 'MATRIX_DENSITY': 2e-05, 'TIME_S': 10.146229267120361, 'TIME_S_1KI': 0.4928464209025289, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 533.6980026054383, 'W': 52.59000000000001, 'J_1KI': 25.92402985405539, 'W_1KI': 2.5545247000534324, 'W_D': 35.59100000000001, 'J_D': 361.18740465354927, 'W_D_1KI': 1.7288094428522858, 'J_D_1KI': 0.08397578291408586} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_50000_5e-05.json b/pytorch/output_1core_after_test/xeon_4216_10_10_10_50000_5e-05.json deleted file mode 100644 index 2967a1b..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_50000_5e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 14272, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 124997, "MATRIX_DENSITY": 4.99988e-05, "TIME_S": 10.296154499053955, "TIME_S_1KI": 0.7214233813799015, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 543.075786485672, "W": 52.76, "J_1KI": 38.051834815419845, "W_1KI": 3.696748878923767, "W_D": 35.2405, "J_D": 362.7418926013708, "W_D_1KI": 2.4692054372197307, "J_D_1KI": 0.17301047065721206} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_50000_5e-05.output b/pytorch/output_1core_after_test/xeon_4216_10_10_10_50000_5e-05.output deleted file mode 100644 index a4867c9..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_50000_5e-05.output +++ /dev/null @@ -1,68 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '50000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 124998, "MATRIX_DENSITY": 4.99992e-05, "TIME_S": 0.7356994152069092} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 8, ..., 124993, 124995, - 124998]), - col_indices=tensor([ 3859, 19407, 20153, ..., 16780, 40146, 42542]), - values=tensor([-0.4918, 1.7301, 1.0421, ..., -1.5226, -0.4153, - 1.9586]), size=(50000, 50000), nnz=124998, - layout=torch.sparse_csr) -tensor([0.5104, 0.0113, 0.1842, ..., 0.5070, 0.6053, 0.3843]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 124998 -Density: 4.99992e-05 -Time: 0.7356994152069092 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '14272', '-ss', '50000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 124997, "MATRIX_DENSITY": 4.99988e-05, "TIME_S": 10.296154499053955} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 3, ..., 124993, 124993, - 124997]), - col_indices=tensor([ 2083, 12614, 42719, ..., 9159, 18953, 49598]), - values=tensor([ 0.7591, -0.7071, -0.2497, ..., 0.0163, 1.3927, - 0.0120]), size=(50000, 50000), nnz=124997, - layout=torch.sparse_csr) -tensor([0.7363, 0.0296, 0.4551, ..., 0.5932, 0.0802, 0.7581]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 124997 -Density: 4.99988e-05 -Time: 10.296154499053955 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 3, ..., 124993, 124993, - 124997]), - col_indices=tensor([ 2083, 12614, 42719, ..., 9159, 18953, 49598]), - values=tensor([ 0.7591, -0.7071, -0.2497, ..., 0.0163, 1.3927, - 0.0120]), size=(50000, 50000), nnz=124997, - layout=torch.sparse_csr) -tensor([0.7363, 0.0296, 0.4551, ..., 0.5932, 0.0802, 0.7581]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 124997 -Density: 4.99988e-05 -Time: 10.296154499053955 seconds - -[18.93, 18.97, 18.85, 18.6, 18.66, 18.86, 18.95, 18.71, 24.1, 19.3] -[52.76] -10.29332423210144 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 14272, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 5e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 124997, 'MATRIX_DENSITY': 4.99988e-05, 'TIME_S': 10.296154499053955, 'TIME_S_1KI': 0.7214233813799015, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 543.075786485672, 'W': 52.76} -[18.93, 18.97, 18.85, 18.6, 18.66, 18.86, 18.95, 18.71, 24.1, 19.3, 18.92, 19.03, 19.06, 18.8, 23.0, 19.2, 18.78, 20.19, 18.75, 18.61] -350.39 -17.5195 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 14272, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 5e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 124997, 'MATRIX_DENSITY': 4.99988e-05, 'TIME_S': 10.296154499053955, 'TIME_S_1KI': 0.7214233813799015, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 543.075786485672, 'W': 52.76, 'J_1KI': 38.051834815419845, 'W_1KI': 3.696748878923767, 'W_D': 35.2405, 'J_D': 362.7418926013708, 'W_D_1KI': 2.4692054372197307, 'J_D_1KI': 0.17301047065721206} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_50000_8e-05.json b/pytorch/output_1core_after_test/xeon_4216_10_10_10_50000_8e-05.json deleted file mode 100644 index c87a346..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_50000_8e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 11535, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 199993, "MATRIX_DENSITY": 7.99972e-05, "TIME_S": 10.338973045349121, "TIME_S_1KI": 0.8963132245642931, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 549.6679055690765, "W": 53.0, "J_1KI": 47.65218080356103, "W_1KI": 4.59471174685739, "W_D": 35.971999999999994, "J_D": 373.0689414930343, "W_D_1KI": 3.1185088859991326, "J_D_1KI": 0.2703518756826296} diff --git a/pytorch/output_1core_after_test/xeon_4216_10_10_10_50000_8e-05.output b/pytorch/output_1core_after_test/xeon_4216_10_10_10_50000_8e-05.output deleted file mode 100644 index 07596ff..0000000 --- a/pytorch/output_1core_after_test/xeon_4216_10_10_10_50000_8e-05.output +++ /dev/null @@ -1,68 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '50000', '-sd', '8e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 199996, "MATRIX_DENSITY": 7.99984e-05, "TIME_S": 0.9102413654327393} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 13, ..., 199989, 199993, - 199996]), - col_indices=tensor([ 1854, 14801, 18462, ..., 33435, 37875, 38653]), - values=tensor([-0.6118, -1.1175, 0.4968, ..., -0.1548, -1.0527, - -0.4851]), size=(50000, 50000), nnz=199996, - layout=torch.sparse_csr) -tensor([0.3406, 0.2916, 0.2458, ..., 0.6214, 0.8343, 0.9095]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 199996 -Density: 7.99984e-05 -Time: 0.9102413654327393 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '11535', '-ss', '50000', '-sd', '8e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 199993, "MATRIX_DENSITY": 7.99972e-05, "TIME_S": 10.338973045349121} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 10, ..., 199988, 199988, - 199993]), - col_indices=tensor([ 1364, 19517, 29977, ..., 20703, 39856, 46483]), - values=tensor([-1.5742, -0.6518, 0.2459, ..., 1.7109, 0.6233, - -0.2330]), size=(50000, 50000), nnz=199993, - layout=torch.sparse_csr) -tensor([0.9941, 0.6031, 0.4580, ..., 0.8936, 0.8010, 0.2243]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 199993 -Density: 7.99972e-05 -Time: 10.338973045349121 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 10, ..., 199988, 199988, - 199993]), - col_indices=tensor([ 1364, 19517, 29977, ..., 20703, 39856, 46483]), - values=tensor([-1.5742, -0.6518, 0.2459, ..., 1.7109, 0.6233, - -0.2330]), size=(50000, 50000), nnz=199993, - layout=torch.sparse_csr) -tensor([0.9941, 0.6031, 0.4580, ..., 0.8936, 0.8010, 0.2243]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 199993 -Density: 7.99972e-05 -Time: 10.338973045349121 seconds - -[19.25, 18.97, 18.93, 18.76, 18.73, 18.94, 18.6, 18.46, 18.76, 18.82] -[53.0] -10.371092557907104 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 11535, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 8e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 199993, 'MATRIX_DENSITY': 7.99972e-05, 'TIME_S': 10.338973045349121, 'TIME_S_1KI': 0.8963132245642931, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 549.6679055690765, 'W': 53.0} -[19.25, 18.97, 18.93, 18.76, 18.73, 18.94, 18.6, 18.46, 18.76, 18.82, 19.43, 18.76, 18.89, 19.19, 18.79, 18.97, 18.86, 19.07, 19.02, 20.22] -340.56000000000006 -17.028000000000002 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 11535, 'MATRIX_TYPE': 'synthetic', 'MATRIX_DENSITY_GROUP': 8e-05, 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 199993, 'MATRIX_DENSITY': 7.99972e-05, 'TIME_S': 10.338973045349121, 'TIME_S_1KI': 0.8963132245642931, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 549.6679055690765, 'W': 53.0, 'J_1KI': 47.65218080356103, 'W_1KI': 4.59471174685739, 'W_D': 35.971999999999994, 'J_D': 373.0689414930343, 'W_D_1KI': 3.1185088859991326, 'J_D_1KI': 0.2703518756826296} diff --git a/pytorch/output_1core_before_test/altra_10_10_10_100000_0.0001.json b/pytorch/output_1core_before_test/altra_10_10_10_100000_0.0001.json deleted file mode 100644 index c770480..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_100000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 4355, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 999950, "MATRIX_DENSITY": 9.9995e-05, "TIME_S": 11.244934320449829, "TIME_S_1KI": 2.582074470826597, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 333.68617539405824, "W": 28.30760430786991, "J_1KI": 76.62139503881934, "W_1KI": 6.50002395129045, "W_D": 9.765604307869907, "J_D": 115.11561050748823, "W_D_1KI": 2.2423890488794274, "J_D_1KI": 0.5148998964131865} diff --git a/pytorch/output_1core_before_test/altra_10_10_10_100000_0.0001.output b/pytorch/output_1core_before_test/altra_10_10_10_100000_0.0001.output deleted file mode 100644 index 894dd4d..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_100000_0.0001.output +++ /dev/null @@ -1,18 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 8, 22, ..., 999931, 999945, - 999950]), - col_indices=tensor([ 3967, 15846, 63833, ..., 13069, 17256, 79405]), - values=tensor([-0.0879, -0.8525, -0.9413, ..., 0.8592, 0.1754, - -1.3106]), size=(100000, 100000), nnz=999950, - layout=torch.sparse_csr) -tensor([0.5639, 0.2280, 0.0677, ..., 0.3539, 0.2164, 0.6848]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 999950 -Density: 9.9995e-05 -Time: 11.244934320449829 seconds - diff --git a/pytorch/output_1core_before_test/altra_10_10_10_100000_1e-05.json b/pytorch/output_1core_before_test/altra_10_10_10_100000_1e-05.json deleted file mode 100644 index d625747..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_100000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 13489, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 11.447299718856812, "TIME_S_1KI": 0.8486396114505754, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 283.05337776184075, "W": 27.6759695539416, "J_1KI": 20.984014957509135, "W_1KI": 2.0517436099000372, "W_D": 6.526969553941598, "J_D": 66.75396774053564, "W_D_1KI": 0.4838734935089034, "J_D_1KI": 0.03587170980123829} diff --git a/pytorch/output_1core_before_test/altra_10_10_10_100000_1e-05.output b/pytorch/output_1core_before_test/altra_10_10_10_100000_1e-05.output deleted file mode 100644 index 26e5c63..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_100000_1e-05.output +++ /dev/null @@ -1,18 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 99998, 100000, - 100000]), - col_indices=tensor([19809, 69637, 20442, ..., 78648, 72374, 83397]), - values=tensor([ 1.4309, -1.6421, 1.0432, ..., 0.3725, -1.5914, - -0.1212]), size=(100000, 100000), nnz=100000, - layout=torch.sparse_csr) -tensor([0.5871, 0.5657, 0.8142, ..., 0.6073, 0.8645, 0.8871]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 100000 -Density: 1e-05 -Time: 11.447299718856812 seconds - diff --git a/pytorch/output_1core_before_test/altra_10_10_10_100000_2e-05.json b/pytorch/output_1core_before_test/altra_10_10_10_100000_2e-05.json deleted file mode 100644 index cb41f71..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_100000_2e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 10698, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 199997, "MATRIX_DENSITY": 1.99997e-05, "TIME_S": 10.250294923782349, "TIME_S_1KI": 0.9581505817706438, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 273.4043987178803, "W": 26.41730024698495, "J_1KI": 25.556589896978906, "W_1KI": 2.4693681292750935, "W_D": 8.052300246984949, "J_D": 83.3368393719197, "W_D_1KI": 0.7526921150668301, "J_D_1KI": 0.07035820854989998} diff --git a/pytorch/output_1core_before_test/altra_10_10_10_100000_2e-05.output b/pytorch/output_1core_before_test/altra_10_10_10_100000_2e-05.output deleted file mode 100644 index f145055..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_100000_2e-05.output +++ /dev/null @@ -1,18 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 199991, 199996, - 199997]), - col_indices=tensor([67062, 50348, 17603, ..., 89707, 99984, 35591]), - values=tensor([-0.9929, 0.7000, 1.1107, ..., 0.0629, -0.3885, - -0.4529]), size=(100000, 100000), nnz=199997, - layout=torch.sparse_csr) -tensor([0.6655, 0.1732, 0.0342, ..., 0.6053, 0.5292, 0.7404]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 199997 -Density: 1.99997e-05 -Time: 10.250294923782349 seconds - diff --git a/pytorch/output_1core_before_test/altra_10_10_10_100000_5e-05.json b/pytorch/output_1core_before_test/altra_10_10_10_100000_5e-05.json deleted file mode 100644 index 40670cc..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_100000_5e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 7199, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 499992, "MATRIX_DENSITY": 4.99992e-05, "TIME_S": 11.31485629081726, "TIME_S_1KI": 1.5717261134626006, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 295.0565606212616, "W": 27.701757111374366, "J_1KI": 40.9857703321658, "W_1KI": 3.848000710011719, "W_D": 9.363757111374365, "J_D": 99.73511632013322, "W_D_1KI": 1.3007024741456266, "J_D_1KI": 0.18067821560572672} diff --git a/pytorch/output_1core_before_test/altra_10_10_10_100000_5e-05.output b/pytorch/output_1core_before_test/altra_10_10_10_100000_5e-05.output deleted file mode 100644 index 1ded62a..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_100000_5e-05.output +++ /dev/null @@ -1,18 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 4, ..., 499981, 499988, - 499992]), - col_indices=tensor([24558, 34411, 73576, ..., 17247, 34835, 42520]), - values=tensor([-0.7589, -0.9344, -0.1628, ..., -1.6875, -0.0529, - 0.2816]), size=(100000, 100000), nnz=499992, - layout=torch.sparse_csr) -tensor([0.1922, 0.8660, 0.7634, ..., 0.4809, 0.8745, 0.7343]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 499992 -Density: 4.99992e-05 -Time: 11.31485629081726 seconds - diff --git a/pytorch/output_1core_before_test/altra_10_10_10_100000_8e-05.json b/pytorch/output_1core_before_test/altra_10_10_10_100000_8e-05.json deleted file mode 100644 index f37f0b2..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_100000_8e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 5032, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 799965, "MATRIX_DENSITY": 7.99965e-05, "TIME_S": 12.26259994506836, "TIME_S_1KI": 2.436923677477814, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 310.0338071632385, "W": 27.900264062285075, "J_1KI": 61.6124418050951, "W_1KI": 5.544567579945364, "W_D": 9.440264062285074, "J_D": 104.90226907253263, "W_D_1KI": 1.8760461173062546, "J_D_1KI": 0.3728231552675387} diff --git a/pytorch/output_1core_before_test/altra_10_10_10_100000_8e-05.output b/pytorch/output_1core_before_test/altra_10_10_10_100000_8e-05.output deleted file mode 100644 index ba40e40..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_100000_8e-05.output +++ /dev/null @@ -1,18 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 13, ..., 799954, 799959, - 799965]), - col_indices=tensor([41338, 53394, 67056, ..., 53369, 73131, 80461]), - values=tensor([-2.6729, -0.1857, -0.9737, ..., -0.6627, 1.6687, - 0.6213]), size=(100000, 100000), nnz=799965, - layout=torch.sparse_csr) -tensor([0.8377, 0.9749, 0.0757, ..., 0.6522, 0.6094, 0.7639]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 799965 -Density: 7.99965e-05 -Time: 12.26259994506836 seconds - diff --git a/pytorch/output_1core_before_test/altra_10_10_10_10000_0.0001.json b/pytorch/output_1core_before_test/altra_10_10_10_10000_0.0001.json deleted file mode 100644 index d964f1a..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_10000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 174988, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.306618690490723, "TIME_S_1KI": 0.058899002734420204, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 233.95734424591066, "W": 22.070639597402234, "J_1KI": 1.3369907893450446, "W_1KI": 0.12612658923698902, "W_D": 3.5896395974022326, "J_D": 38.05157269239426, "W_D_1KI": 0.020513632919984412, "J_D_1KI": 0.00011722879808892274} diff --git a/pytorch/output_1core_before_test/altra_10_10_10_10000_0.0001.output b/pytorch/output_1core_before_test/altra_10_10_10_10000_0.0001.output deleted file mode 100644 index f82e0df..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_10000_0.0001.output +++ /dev/null @@ -1,17 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 9997, 9998, 10000]), - col_indices=tensor([6969, 8195, 8621, ..., 6951, 3350, 5541]), - values=tensor([ 0.2453, 0.3434, 0.5670, ..., -0.9208, -0.4489, - 0.7386]), size=(10000, 10000), nnz=10000, - layout=torch.sparse_csr) -tensor([0.8507, 0.5219, 0.6863, ..., 0.6390, 0.4996, 0.7605]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 10000 -Density: 0.0001 -Time: 10.306618690490723 seconds - diff --git a/pytorch/output_1core_before_test/altra_10_10_10_10000_1e-05.json b/pytorch/output_1core_before_test/altra_10_10_10_10000_1e-05.json deleted file mode 100644 index f53b19b..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_10000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 421162, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.536884307861328, "TIME_S_1KI": 0.025018601649392225, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 229.9473793792725, "W": 22.209821165278864, "J_1KI": 0.545983206887783, "W_1KI": 0.05273462744805767, "W_D": 3.762821165278865, "J_D": 38.95802940464025, "W_D_1KI": 0.008934379562445959, "J_D_1KI": 2.1213641217502904e-05} diff --git a/pytorch/output_1core_before_test/altra_10_10_10_10000_1e-05.output b/pytorch/output_1core_before_test/altra_10_10_10_10000_1e-05.output deleted file mode 100644 index 079883a..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_10000_1e-05.output +++ /dev/null @@ -1,376 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 999, 1000, 1000]), - col_indices=tensor([2530, 4970, 2659, 1725, 3663, 2962, 9710, 2857, 7581, - 7863, 4259, 7152, 4132, 8123, 6365, 5831, 1862, 2378, - 6041, 3808, 738, 8656, 4929, 2571, 6828, 1533, 2995, - 1243, 8209, 6264, 8379, 2876, 600, 8504, 5014, 1819, - 3403, 2620, 7514, 2844, 2867, 7243, 6595, 3708, 130, - 8615, 6164, 5283, 9449, 3201, 5177, 5685, 7082, 9401, - 5774, 9313, 5218, 3286, 2233, 2323, 9250, 5678, 2157, - 6962, 5644, 1475, 4576, 225, 19, 8412, 5404, 760, - 6342, 8184, 3920, 3857, 6548, 7878, 5379, 4131, 1499, - 5973, 2387, 4985, 910, 1391, 8506, 3135, 509, 522, - 4643, 8609, 9181, 9369, 657, 9977, 8394, 6111, 6186, - 4777, 3097, 4563, 730, 5851, 3266, 7218, 5493, 3063, - 2883, 2246, 337, 3792, 1833, 3911, 6225, 9364, 6276, - 4585, 5047, 4826, 1619, 3702, 8469, 1382, 3104, 4033, - 7493, 7947, 7910, 1997, 7777, 4897, 6099, 9852, 4856, - 1844, 7680, 874, 5343, 5763, 5307, 8773, 5614, 5905, - 8183, 4355, 9619, 1976, 9747, 2263, 8143, 9318, 1946, - 1753, 6870, 4519, 9828, 1895, 9979, 6498, 262, 4826, - 3829, 5617, 6779, 9889, 4187, 2300, 6066, 4525, 9089, - 4126, 4058, 9514, 696, 4341, 5595, 7904, 7112, 6985, - 6659, 1606, 3123, 7688, 3627, 6299, 3412, 312, 6193, - 6376, 2483, 3969, 8843, 7427, 8335, 7357, 3132, 3039, - 466, 2901, 1429, 7850, 6922, 7907, 9085, 9145, 2015, - 4742, 1475, 964, 1286, 9946, 2777, 4072, 2422, 2541, - 6361, 9198, 2326, 2186, 6923, 9299, 3624, 5335, 6354, - 3125, 8186, 9904, 6046, 1850, 7368, 1244, 8332, 5278, - 6934, 8867, 5542, 3046, 5199, 8255, 7621, 5671, 7734, - 412, 633, 7079, 2211, 1388, 2321, 8012, 8327, 8541, - 3250, 1598, 5668, 228, 5743, 4854, 6945, 8979, 9158, - 2597, 7295, 7218, 6155, 15, 8834, 7252, 8410, 8218, - 3804, 3346, 2996, 5488, 7318, 7786, 3174, 3436, 3021, - 5953, 8701, 2201, 9693, 4123, 9385, 6291, 1402, 1008, - 6548, 9956, 4354, 3481, 3668, 7445, 1322, 4726, 7448, - 635, 7476, 7924, 1634, 1896, 2534, 2218, 9622, 93, - 6026, 3146, 11, 9326, 805, 7254, 6290, 3057, 427, - 2084, 7183, 6361, 71, 5017, 6363, 4758, 254, 6013, - 1914, 3182, 7225, 9444, 194, 8377, 306, 7800, 8356, - 6444, 9627, 619, 4537, 7553, 9849, 5554, 9818, 8297, - 7948, 4427, 7862, 5512, 855, 8058, 4316, 7687, 1015, - 3535, 9263, 5873, 8334, 9063, 3258, 1228, 7632, 1376, - 4313, 5474, 6064, 2794, 6708, 2807, 1308, 5134, 7593, - 2244, 9476, 6253, 354, 8329, 8753, 4660, 3645, 6940, - 8143, 7450, 7605, 1547, 3531, 2277, 7787, 2003, 9597, - 7816, 5466, 3629, 867, 6995, 6656, 3836, 2685, 8685, - 1479, 8958, 3651, 9165, 8060, 49, 5100, 21, 1697, - 3790, 6846, 1079, 2865, 8704, 699, 8775, 3570, 3932, - 1266, 3979, 2829, 6388, 2909, 1277, 9455, 5934, 6037, - 9149, 9705, 7213, 1029, 7296, 5411, 8853, 7556, 5533, - 5800, 5309, 6763, 1544, 1759, 7981, 7145, 6563, 2607, - 7010, 8134, 1028, 5674, 8013, 4676, 5686, 1472, 6127, - 5328, 3959, 1286, 5611, 5670, 9454, 3331, 684, 109, - 7644, 9986, 2838, 8271, 4305, 8553, 1885, 906, 1316, - 4856, 4789, 7595, 9951, 2205, 5178, 162, 1776, 1046, - 3723, 5211, 6940, 3349, 5131, 1597, 4048, 6264, 9579, - 7019, 6116, 2502, 2667, 5380, 3671, 3187, 5544, 5163, - 6509, 1208, 149, 8319, 7382, 3219, 3869, 4567, 6332, - 9521, 9931, 9978, 2937, 1016, 2149, 1500, 6817, 3464, - 1062, 4161, 3240, 3578, 3364, 3756, 2453, 8670, 3263, - 5538, 4849, 9251, 8158, 5248, 4932, 9430, 1436, 8161, - 102, 5106, 3750, 3210, 1704, 2198, 1239, 6397, 190, - 1441, 805, 7267, 140, 4722, 3132, 4159, 4761, 2738, - 3476, 4106, 6838, 6172, 7201, 386, 4252, 3467, 1013, - 5282, 3070, 7979, 7300, 6752, 5258, 7783, 3626, 4552, - 8953, 6445, 199, 6987, 4875, 2243, 8032, 3942, 9125, - 7152, 6019, 6436, 2737, 9308, 2830, 7267, 5576, 882, - 7044, 3088, 2429, 1602, 2091, 6300, 7519, 1080, 6215, - 8377, 2144, 8543, 5243, 8462, 1493, 7499, 2059, 8573, - 1994, 2300, 1644, 6521, 407, 5365, 2093, 1294, 7570, - 2633, 9009, 3734, 5191, 9570, 4305, 9956, 8898, 6696, - 6082, 7921, 6056, 1897, 1148, 1933, 5294, 9190, 3580, - 9337, 1312, 7221, 632, 8218, 4804, 6643, 4508, 961, - 9760, 8617, 347, 5123, 5239, 8631, 2748, 4481, 5972, - 5917, 9338, 7591, 7446, 9363, 8946, 7112, 5340, 5625, - 2010, 4953, 7224, 6204, 4062, 2517, 5631, 8711, 3404, - 6681, 2208, 874, 1332, 7033, 5402, 9363, 1083, 8664, - 8716, 624, 1360, 7803, 6700, 4845, 606, 2067, 213, - 3744, 7601, 5644, 3006, 3043, 6743, 2596, 1465, 4296, - 1738, 3250, 4939, 9984, 2420, 5681, 7823, 9870, 3676, - 8958, 3932, 3601, 475, 9973, 8292, 9179, 9570, 9380, - 5122, 6322, 3925, 2308, 8046, 2036, 9482, 6415, 4009, - 2858, 7935, 5276, 1290, 9345, 2286, 3007, 5523, 7015, - 5262, 7340, 8145, 8217, 2804, 8381, 4971, 2618, 4974, - 1261, 599, 8249, 2713, 7305, 6764, 3842, 1162, 5015, - 635, 4810, 7750, 4535, 4779, 8120, 5731, 7217, 7148, - 633, 6360, 6, 8763, 4069, 5070, 4717, 5466, 2843, - 5220, 6162, 2706, 7199, 3002, 519, 7110, 2010, 1067, - 8728, 5379, 53, 8578, 8877, 4325, 4612, 8938, 3715, - 5620, 901, 7418, 8900, 9908, 9045, 552, 6080, 7900, - 9718, 4415, 9953, 6498, 1629, 3217, 7963, 255, 9081, - 6282, 8645, 3913, 8715, 1055, 7665, 8681, 6398, 5150, - 8539, 3693, 9752, 3981, 824, 7020, 8844, 9783, 4279, - 4243, 6853, 5775, 6219, 666, 306, 9014, 7333, 2499, - 3076, 3059, 9663, 7677, 9773, 9180, 3187, 4902, 2441, - 3195, 762, 7082, 2939, 1656, 2527, 8328, 1946, 9014, - 3454, 3809, 6030, 6255, 425, 1983, 8056, 8899, 470, - 2626, 1404, 536, 1615, 380, 4649, 2042, 2943, 5846, - 1613, 1528, 27, 822, 1707, 887, 8207, 2457, 3124, - 5136, 4633, 7379, 1512, 9225, 3588, 5477, 9158, 4355, - 3182, 8209, 9681, 8206, 3612, 8002, 8716, 3247, 946, - 483, 9222, 2093, 7469, 1714, 6203, 4033, 2235, 8767, - 6476, 4278, 6654, 7649, 9578, 122, 2105, 8341, 1801, - 3438, 4028, 3776, 8321, 9941, 2532, 1929, 1322, 5146, - 7324, 6012, 8131, 4453, 2088, 3587, 4283, 3645, 5511, - 4808, 621, 9374, 3402, 8394, 8344, 7646, 5466, 8173, - 9383, 8305, 5465, 2075, 3105, 3557, 9922, 3611, 5901, - 8648, 3282, 2609, 7332, 6343, 7943, 2671, 6155, 3017, - 137, 8013, 5830, 1885, 341, 336, 4075, 1511, 4830, - 392, 2227, 1623, 7861, 4306, 7218, 9253, 4080, 3779, - 4389, 6138, 910, 2660, 4801, 333, 1664, 4895, 1135, - 6988, 8271, 2538, 3157, 5656, 7052, 5051, 1473, 4812, - 6242, 9085, 4588, 8888, 2121, 3093, 9974, 6175, 1452, - 9289]), - values=tensor([ 8.9470e-01, -6.3328e-01, 1.2992e-01, -4.9101e-03, - 5.5163e-01, 2.6371e-01, 1.7431e+00, -5.4804e-01, - 1.0729e+00, 1.3279e+00, 1.2904e+00, -1.4863e+00, - 2.1495e+00, 8.6254e-02, -5.1315e-01, -2.6226e-01, - 4.1487e-01, -1.2750e+00, -5.1574e-01, -1.1579e-01, - 1.1582e-02, 6.2385e-01, -8.2921e-01, -7.9801e-01, - -5.6621e-01, 1.4624e+00, -7.1880e-01, -8.8174e-01, - 1.1228e+00, -6.6545e-01, -4.3788e-01, -4.3776e-01, - 2.3466e+00, -1.1897e+00, 3.5670e-01, -2.2970e+00, - -1.2241e+00, -8.2906e-02, 7.7402e-01, -9.9002e-03, - -3.3721e-01, 1.4123e+00, 6.4595e-01, 1.3535e-01, - -3.6410e-01, 7.4891e-01, -4.0386e-01, -6.1129e-01, - -3.4769e-01, 1.6626e-01, -1.5877e+00, -5.6148e-01, - -1.4188e+00, 6.5498e-02, 1.6168e+00, -1.2089e+00, - 2.5670e+00, 6.9087e-01, 7.7932e-01, -7.9989e-01, - -8.5542e-02, 5.4492e-01, -8.6174e-01, -2.8907e-01, - -2.3817e-01, -3.5902e-03, -2.5215e-01, 1.5861e-02, - -9.2638e-01, -4.9050e-01, -7.2189e-01, -1.0584e+00, - 1.2391e-01, 2.5372e+00, 1.2737e+00, 6.5553e-01, - -7.1202e-01, -1.4809e+00, -1.0171e+00, 1.8006e+00, - 9.1026e-02, 1.8092e-01, -5.6572e-01, -9.4144e-01, - 1.7667e+00, 5.2346e-01, -7.0761e-01, -4.9577e-01, - 6.5988e-01, 1.3220e+00, -4.1523e-01, 1.8696e+00, - -6.9362e-01, 4.6472e-01, 5.7684e-01, 1.5456e+00, - 8.5336e-01, 7.8062e-01, 1.9827e-01, 1.2576e-01, - -9.6653e-01, -4.8192e-01, -6.4608e-01, 3.9988e-01, - 9.0879e-01, 3.0924e-01, 2.2371e-01, 4.6651e-01, - 7.5662e-02, -1.2638e+00, 1.5154e-01, -6.8864e-01, - -5.8504e-01, -1.7702e-01, -1.1063e+00, -1.5594e-01, - -3.5456e-01, 9.8416e-02, 1.8944e-01, -1.0316e-01, - -1.1537e+00, 1.9020e+00, -1.4234e+00, 4.7207e-01, - 2.3251e-01, -5.0383e-01, -1.7346e+00, 2.0380e-01, - -4.1302e-01, 8.0877e-01, 1.7696e-01, 1.0884e+00, - 7.6877e-01, -1.8517e+00, 1.1329e+00, -1.6727e+00, - 7.4248e-01, -6.2220e-02, -2.4725e+00, 1.5424e-01, - -7.6277e-01, 1.5777e+00, 1.4019e+00, -1.1586e+00, - -1.3856e+00, -1.0960e+00, -6.6632e-01, 6.4003e-01, - -4.8302e-01, 1.4978e+00, -7.6496e-01, -1.7062e+00, - -1.8932e-01, 1.3442e+00, 4.1649e-01, -6.3058e-01, - -5.2200e-01, 1.7874e+00, -1.0328e-01, -6.4158e-01, - 4.7096e-01, -3.6378e-02, 1.2852e+00, -6.5401e-01, - 1.6917e+00, -5.9966e-01, 8.9949e-01, 5.7687e-01, - -2.0450e-01, 9.0297e-01, 4.4177e-02, 1.1839e+00, - -8.0734e-01, -2.8815e-02, 1.2756e+00, -1.6029e-01, - -1.2085e+00, -1.4567e+00, -2.6680e+00, -2.8512e-01, - 2.6555e+00, -1.5458e+00, 4.2395e-01, -5.3377e-01, - 1.1846e+00, 6.5128e-02, 2.1652e+00, 9.7681e-01, - 4.2833e-02, -2.5953e-01, 1.0917e+00, -9.8225e-01, - -8.5215e-01, 1.8473e+00, -9.2533e-01, 2.2942e-01, - 3.5024e-02, -8.3874e-01, -2.7296e-01, 4.3417e-01, - -1.4622e+00, 2.5054e+00, -2.4562e-01, -1.0155e+00, - 2.0280e-01, -7.4808e-01, 2.3659e+00, -8.5380e-01, - 1.1584e-01, 8.3909e-01, 6.1597e-01, 3.7901e-01, - -7.6897e-01, -6.0971e-01, 1.6245e+00, -7.8796e-01, - 3.5382e-01, 1.0606e+00, -6.3211e-01, 5.9116e-01, - -2.2673e-01, 3.9744e-01, -1.2223e+00, 8.3866e-01, - 8.5580e-02, 4.5003e-01, -1.9895e+00, -1.5410e+00, - -1.0574e+00, 1.1098e+00, -1.9738e-01, 2.2011e-01, - 3.5964e-01, 1.0379e+00, -1.1501e+00, -5.7893e-01, - 3.2590e-01, 6.9814e-01, 6.3347e-01, -7.9617e-01, - 1.9328e+00, 2.1340e+00, 1.4220e-01, 2.5087e-01, - 1.2185e+00, 1.9544e-01, -4.7843e-01, 4.5100e-01, - -1.9931e+00, 2.5379e-01, 8.8952e-01, 4.0093e-01, - 1.5057e+00, 3.9045e-01, 1.5819e-01, 1.5494e+00, - -1.4377e+00, -4.6549e-01, -1.2749e+00, 1.5133e+00, - 2.4309e-01, -3.7416e-01, 1.1291e+00, -3.9234e-02, - -5.6900e-01, 9.1325e-01, 2.4710e-01, 1.0508e+00, - 3.8052e-01, -1.4816e+00, 1.9208e-01, 4.7700e-02, - -4.5286e-01, -2.2769e+00, -8.2130e-01, -1.4660e+00, - 8.4530e-03, -8.1260e-01, -2.1475e-01, 2.9007e-01, - -1.0202e+00, -1.0550e+00, -1.6464e+00, -5.1054e-01, - 8.4056e-02, -4.8204e-01, -2.4200e-01, 8.8335e-01, - -2.0087e-01, 1.4526e+00, 6.7583e-01, 6.5501e-01, - 1.7348e+00, 3.7417e-01, 1.0195e+00, 4.1031e-01, - -8.9822e-01, 1.6424e-01, 3.3839e-01, 8.2900e-01, - -2.8972e-01, 1.4430e+00, 1.4510e+00, 2.7382e-01, - -1.5728e+00, -3.4008e-01, 2.0765e-01, -5.8920e-01, - 1.0153e+00, -4.5164e-01, -1.2500e+00, 9.8244e-01, - -1.3938e+00, 5.8204e-01, -1.6011e+00, 3.3950e-01, - -1.3478e+00, -1.1412e+00, 4.8467e-01, -8.9456e-01, - 9.8666e-02, -5.5337e-01, 4.0520e-01, 1.4504e+00, - -4.6902e-01, -1.9035e-02, -1.4093e+00, 1.0945e+00, - 2.4254e+00, -1.2247e+00, -3.4132e-01, 1.1391e+00, - -1.0195e+00, 2.0456e+00, -1.2624e-01, 1.6242e+00, - 1.1411e+00, -3.3380e+00, 3.4374e-02, -8.0840e-01, - -6.7946e-01, -4.8907e-01, -5.9090e-01, 1.8193e-01, - -9.2658e-01, 3.3141e-01, 4.8071e-02, 1.8269e-01, - 9.6370e-01, -1.9650e+00, -3.9257e-01, -1.5556e+00, - -1.1833e-02, -6.7597e-01, 1.3737e+00, -4.2838e-01, - -1.4089e+00, -4.7673e-01, 2.7510e+00, 2.5499e-01, - 2.8952e-01, 5.6592e-01, 1.7760e-01, -1.2665e-01, - -1.7958e+00, 4.0447e-01, -1.4793e+00, 2.2040e-01, - -1.2838e+00, 7.3033e-01, -3.4105e-01, -8.9769e-01, - -1.5574e+00, 1.5666e-01, -8.8247e-01, -8.2500e-01, - 1.0369e+00, -2.3400e-01, 7.8729e-01, -1.3348e-01, - 6.8396e-01, 4.7203e-01, 2.3535e-01, 7.4789e-01, - 6.8961e-01, -9.4972e-02, -2.6393e-01, -7.0786e-01, - 8.4702e-01, 1.4584e+00, 2.6959e-01, 3.5619e-01, - 1.0724e+00, -4.5745e-01, 4.5769e-01, -3.4797e-01, - 5.3498e-01, 1.3786e+00, -4.2203e-01, 2.0502e-01, - -1.1847e+00, 1.2664e+00, 2.4788e+00, 6.1679e-02, - -3.7294e-01, 7.9168e-01, -6.5893e-01, 1.5618e+00, - 1.5180e+00, 7.9784e-01, -8.5964e-01, 5.2165e-01, - -1.8936e+00, 7.5706e-01, -2.8501e-01, -1.9869e+00, - 1.5665e+00, -1.0884e+00, -4.5241e-02, -1.7418e+00, - 1.5031e+00, 1.2108e+00, 1.5896e+00, 5.2247e-01, - -6.6288e-01, 1.0725e+00, -4.7222e-01, 1.2363e+00, - 5.9453e-01, 1.4958e+00, 8.0517e-01, 5.9643e-01, - 5.7505e-01, 1.9679e+00, 1.4612e-01, -1.5188e+00, - 1.6063e+00, 6.7179e-01, -6.1350e-01, -2.5872e-01, - 7.6412e-01, 1.1989e+00, 5.1334e-01, 8.2913e-01, - 9.6647e-01, 7.0046e-02, 1.6081e+00, -7.3860e-01, - -1.5420e+00, 1.5756e-01, -6.4708e-02, 4.8478e-01, - 2.5268e+00, -2.7869e-01, -1.9300e+00, -1.2759e+00, - 1.3952e+00, -4.4965e-01, -5.8387e-01, -1.1737e-01, - -4.4703e-02, -5.3928e-03, -1.0702e+00, -1.3311e+00, - -4.8351e-01, 7.3013e-01, -7.0295e-01, -1.8194e-01, - 1.8836e-01, 7.7657e-01, -5.5083e-01, -1.1130e+00, - 9.2671e-01, 1.3525e+00, -3.5006e-01, 7.4196e-01, - -6.3943e-01, -2.6915e-01, -1.7878e+00, 9.2550e-01, - -1.2805e-01, 2.9456e-01, 1.0258e+00, 2.8220e+00, - 5.9649e-01, 1.0403e-01, 8.6820e-01, -6.0915e-01, - 1.6235e+00, -1.3677e+00, 1.1249e+00, 5.0475e-01, - 1.7625e+00, -1.2130e+00, 4.7422e-01, 2.3344e-01, - -4.7930e-01, -8.4184e-01, -2.3063e+00, -2.4137e-01, - -1.2106e-01, -6.7608e-01, 2.3140e-01, -4.8369e-01, - -6.3059e-01, 6.2849e-02, 2.8805e-01, -1.7033e+00, - -2.2300e-01, -2.2792e-01, 3.8326e-01, 5.3423e-01, - -8.7879e-01, -1.9224e+00, -1.6434e+00, -2.9181e-01, - -1.9124e-01, -6.4700e-01, 3.0458e-03, -3.6707e-01, - 9.7367e-01, -1.5956e-01, -2.5277e-02, -3.3421e-01, - -1.2408e-01, -2.0157e+00, -9.9401e-01, 7.8027e-01, - -1.2730e+00, -5.2298e-01, 7.7452e-01, -1.8945e-01, - 8.8803e-01, -4.2500e-01, -1.2219e+00, 1.5046e-01, - -1.4904e+00, 1.9010e-01, 2.8726e-01, 2.9264e-01, - 9.2994e-01, 1.3011e-01, -1.7569e+00, 1.9840e-02, - -2.6004e-01, -8.5328e-01, 2.2076e-01, 4.3787e-01, - -1.0601e+00, -1.0720e-01, -4.2976e-02, 3.0975e-02, - -7.7689e-01, 1.0649e-01, 6.7526e-01, 1.7097e+00, - 1.9441e-01, -9.2419e-01, 6.2603e-01, -1.0258e+00, - -4.6087e-01, 6.4227e-01, 1.7753e+00, -8.6585e-01, - -9.4326e-01, -5.1792e-01, 3.0924e-01, -1.9302e+00, - -1.4643e+00, -4.7364e-01, -1.4321e+00, -1.1858e+00, - 1.3240e+00, 7.0653e-01, -1.6020e+00, -3.3540e-01, - -7.5205e-02, -1.1897e+00, -9.9895e-01, 1.4871e+00, - 1.8769e+00, 7.4980e-01, 1.2594e+00, 3.0823e-01, - -9.8196e-01, -2.6007e-02, -1.9805e+00, -1.0897e+00, - -6.3257e-01, -8.1333e-01, 1.8023e-01, 5.2256e-01, - 3.0330e-02, 7.0382e-01, -4.4174e-01, -5.4528e-01, - 5.2947e-01, -1.9958e-01, -2.3572e-01, 1.0042e+00, - 2.4078e+00, -1.7504e+00, -6.4608e-01, -9.8215e-01, - 4.7901e-01, 1.7700e+00, -5.1201e-01, -2.0727e+00, - 1.1244e+00, 1.1444e+00, 9.9255e-01, -2.8542e-01, - 2.3592e+00, -1.3202e+00, 6.7331e-01, -1.8613e+00, - 6.2552e-02, -1.4576e+00, -1.3137e-01, -3.6158e-01, - 5.8659e-03, -5.3628e-02, -8.7430e-01, -1.3782e+00, - -8.2555e-01, 1.7618e+00, -7.3578e-01, 8.6826e-02, - 1.3930e-01, 2.3544e+00, -1.8926e+00, -7.9015e-01, - 7.4101e-01, -1.0513e-01, 7.8354e-01, 4.9956e-01, - -1.4931e+00, 1.0831e+00, 5.4255e-01, 6.6320e-01, - 8.7351e-01, 2.3351e-01, 4.6022e-02, -3.6396e-01, - 1.7971e+00, -7.8342e-01, 1.4417e+00, -9.5022e-01, - 2.8444e-01, 7.9567e-01, -4.9468e-01, -4.5620e-01, - -7.2071e-01, 8.9833e-01, -2.9479e-01, -5.2172e-01, - -5.7475e-02, 3.5245e-01, -1.8242e-02, 1.7194e-01, - 2.0376e-01, 1.9668e+00, 1.1607e+00, -1.0201e+00, - -8.8988e-01, 1.9244e-01, 9.4279e-01, 4.2757e-01, - -1.1236e+00, -1.6247e+00, 6.7265e-01, -3.2769e-01, - -8.8991e-01, 8.6635e-01, 1.8502e-01, 7.6184e-02, - -1.0022e+00, 1.7965e+00, -1.5759e+00, 1.1171e+00, - -9.1340e-02, 1.0683e+00, -1.0615e+00, 2.7624e-02, - 1.2138e-01, -1.8209e-01, -1.5818e-01, -1.1219e+00, - 7.9002e-01, 1.0021e+00, -4.0343e-02, 5.0322e-01, - -3.0017e-01, -3.6917e-01, -9.0112e-01, -3.6111e-01, - 1.7868e+00, -1.1625e+00, 1.3144e-01, -1.2115e-01, - -1.3529e+00, -1.4616e-01, -8.6462e-01, 7.8624e-03, - -4.5036e-01, 7.6185e-02, -6.9569e-01, 3.4035e-01, - -4.0796e-01, -8.8995e-01, 1.8799e+00, -7.2486e-01, - -5.4362e-01, -4.0425e-01, 9.1659e-01, -3.6393e-01, - -1.1918e+00, -2.2858e-02, -8.5112e-02, 9.1905e-01, - 4.3963e-01, 1.1368e+00, -1.6529e-01, -1.0775e+00, - 1.5423e-01, 2.5594e-01, 1.3336e+00, -5.0227e-01, - 1.1538e+00, 1.7713e+00, 1.2267e+00, 1.0350e+00, - -4.1316e-01, 1.1455e+00, -3.6598e-01, 1.3717e-01, - -1.4888e+00, -1.1332e+00, -2.1479e+00, 3.5083e-01, - -4.5121e-01, -6.5572e-02, 1.9112e+00, 1.4673e-01, - 1.3955e+00, 3.7318e-01, 7.2463e-01, 4.1861e-01, - 2.2504e-02, -8.8837e-01, -2.2298e-01, -8.9439e-01, - 7.2990e-01, 4.7920e-01, -1.0970e+00, -2.2214e-01, - -8.2237e-01, -2.5817e-01, -8.4245e-01, 1.6783e+00, - 2.6325e-01, -8.0084e-01, -1.3940e-01, -4.6891e-01, - -4.0825e-01, -1.0788e+00, -4.2787e-01, 2.3392e-03, - 7.6665e-01, -2.7889e+00, 4.9696e-01, 1.0246e+00, - -6.5344e-01, -9.6619e-01, 3.0382e-01, -1.5664e+00, - 1.6839e-01, -4.5576e-01, 5.7640e-01, -3.0460e-01, - -9.5166e-03, 2.7927e-01, -1.2088e+00, 1.4455e-01, - 1.8245e-01, -9.5298e-01, -4.8514e-01, -1.5275e+00, - -5.5707e-01, -6.5409e-02, -2.4245e-01, 8.8684e-02, - -3.9952e-01, -2.3679e-01, 1.6628e-01, 2.8653e-01, - -2.2847e-01, -7.2285e-02, 3.0976e-01, 4.3467e-01, - 2.2024e-02, -1.5670e+00, -1.8041e+00, 3.4552e-01, - 1.4196e+00, 2.1582e+00, 1.4587e+00, 1.3556e+00, - -8.2307e-01, -3.5995e-01, 4.3963e-01, 1.0853e+00, - -1.0461e+00, 1.9422e+00, 3.3780e-01, -1.7451e-01, - -1.5490e-01, -2.2423e-01, -2.3621e-02, -1.0625e-01, - -2.0323e-01, -2.9332e-01, 5.5477e-01, 2.5819e-02, - -2.1378e-01, -8.5278e-01, -7.0569e-02, -2.2078e+00, - -4.9855e-02, 1.1305e+00, 1.4045e+00, 4.0245e-01, - -2.4021e-01, 3.3423e-01, -6.0139e-01, -1.7496e+00, - -3.2777e-01, -5.2787e-01, -1.3991e+00, 3.8358e-02, - -6.3318e-01, -7.7577e-01, 7.3562e-01, -2.7959e-01, - -2.5080e-01, 1.3028e-01, -1.2883e+00, 2.0003e-01, - -5.8190e-01, -9.9767e-02, 1.1887e+00, 2.7652e-01, - -2.9126e-01, 6.8052e-01, 1.0591e+00, 2.7902e-01, - -1.3262e+00, 1.0332e-01, 8.3872e-01, 3.3676e-01, - -3.7281e-01, 5.3883e-01, 4.5571e-01, 3.9211e-01, - -9.3511e-01, 5.1949e-01, -1.3974e+00, -2.7468e-02, - -4.2666e-01, 8.6021e-01, 1.1205e+00, -6.5077e-01, - -1.2670e+00, 2.9893e-01, 1.2343e+00, -9.9901e-01, - 6.9171e-01, 2.7780e-01, -6.0220e-01, -1.6771e+00, - -1.4072e+00, 6.4064e-01, -1.0835e-01, 5.5062e-01, - 1.0087e+00, -1.9452e+00, 7.4225e-01, 3.7207e-01, - 9.5448e-01, 6.4854e-01, -1.0018e+00, 6.9077e-02, - 1.6585e+00, 1.0669e+00, -5.5269e-01, -6.1291e-02, - -2.0841e-03, 1.8497e+00, -1.9300e+00, -7.7695e-01, - 8.5749e-01, -1.2011e-01, -2.0469e-01, -8.2343e-01, - -1.0558e+00, 1.0489e+00, -7.3995e-01, 7.4295e-01, - 1.0340e+00, 1.0733e+00, 1.6341e+00, 1.8494e-01, - 1.4786e+00, -1.8913e-01, 2.9562e-01, 1.0600e+00, - -2.3762e-01, 7.3992e-01, -1.0133e+00, 8.9775e-01, - -1.8880e+00, 1.8053e+00, 9.5499e-01, -3.1954e-02, - -3.5939e-01, -7.8488e-01, -1.5284e-02, -1.7831e+00, - -1.2333e+00, 3.0641e-01, -1.8010e+00, -5.0043e-01, - 1.9418e+00, -3.4874e-01, -4.5679e-01, 1.1499e+00, - -1.5938e-01, 2.8064e-01, 2.9951e-01, -1.2253e+00, - 3.8956e-01, 1.4223e+00, 3.6994e-02, 1.0937e-01, - 1.5851e+00, 1.9382e+00, -4.7179e-01, 1.5437e+00, - -2.3698e-01, 5.5444e-01, 5.0093e-01, -2.3629e-01, - 1.1946e+00, 1.0786e+00, 2.7450e-01, 1.3731e+00, - 2.1239e+00, 6.3523e-01, 1.1535e+00, -2.3906e-01, - -2.2832e+00, -7.3033e-01, 9.7389e-01, -2.0793e-01, - -3.1373e-01, 2.2560e-02, 5.4020e-01, 1.1695e+00, - -7.0974e-02, 4.5920e-01, 4.9151e-01, -1.0381e-01, - -8.7481e-01, 8.7994e-01, -9.5454e-01, -4.9043e-02, - -6.9171e-01, -2.3172e+00, -2.2653e-01, 2.2483e-01, - -1.3724e+00, 3.6675e-01, -1.6696e+00, -7.1760e-03, - 4.9692e-01, 3.0592e-01, 8.0049e-01, 9.6644e-01, - 1.6653e-01, 4.7140e-01, 5.6384e-01, 8.5685e-01, - -8.3702e-01, 2.1149e-01, -3.8775e-01, -9.9643e-01, - 2.1336e+00, 6.2605e-01, -1.0817e+00, 6.3471e-01]), - size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.2030, 0.3015, 0.6362, ..., 0.9382, 0.1176, 0.2577]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 1000 -Density: 1e-05 -Time: 10.536884307861328 seconds - diff --git a/pytorch/output_1core_before_test/altra_10_10_10_10000_2e-05.json b/pytorch/output_1core_before_test/altra_10_10_10_10000_2e-05.json deleted file mode 100644 index f44e17d..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_10000_2e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 369956, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 2000, "MATRIX_DENSITY": 2e-05, "TIME_S": 10.531599760055542, "TIME_S_1KI": 0.02846716842017846, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 237.72309764862058, "W": 21.839624352909834, "J_1KI": 0.6425712723908265, "W_1KI": 0.05903303190895629, "W_D": 3.327624352909833, "J_D": 36.22100619506832, "W_D_1KI": 0.008994648966119843, "J_D_1KI": 2.4312753316934565e-05} diff --git a/pytorch/output_1core_before_test/altra_10_10_10_10000_2e-05.output b/pytorch/output_1core_before_test/altra_10_10_10_10000_2e-05.output deleted file mode 100644 index 661ba98..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_10000_2e-05.output +++ /dev/null @@ -1,17 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 2000, 2000, 2000]), - col_indices=tensor([5535, 1889, 4938, ..., 638, 220, 8220]), - values=tensor([-2.0912, 0.7849, 0.1159, ..., -2.0269, 1.0335, - -0.3226]), size=(10000, 10000), nnz=2000, - layout=torch.sparse_csr) -tensor([0.6955, 0.5821, 0.3401, ..., 0.8045, 0.7311, 0.0501]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 2000 -Density: 2e-05 -Time: 10.531599760055542 seconds - diff --git a/pytorch/output_1core_before_test/altra_10_10_10_10000_5e-05.json b/pytorch/output_1core_before_test/altra_10_10_10_10000_5e-05.json deleted file mode 100644 index 7483683..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_10000_5e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 234716, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 4999, "MATRIX_DENSITY": 4.999e-05, "TIME_S": 10.51655387878418, "TIME_S_1KI": 0.04480544095325491, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 243.09820373535158, "W": 22.53373621978891, "J_1KI": 1.0357121105308185, "W_1KI": 0.09600426140437342, "W_D": 4.192736219788909, "J_D": 45.23203049087525, "W_D_1KI": 0.01786301837023854, "J_D_1KI": 7.61048176103825e-05} diff --git a/pytorch/output_1core_before_test/altra_10_10_10_10000_5e-05.output b/pytorch/output_1core_before_test/altra_10_10_10_10000_5e-05.output deleted file mode 100644 index 2838fe1..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_10000_5e-05.output +++ /dev/null @@ -1,17 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 4998, 4998, 4999]), - col_indices=tensor([7423, 8245, 1936, ..., 8643, 1181, 2417]), - values=tensor([-1.2841, 2.0112, 0.4498, ..., -0.7304, -0.4260, - -1.8730]), size=(10000, 10000), nnz=4999, - layout=torch.sparse_csr) -tensor([0.2088, 0.3538, 0.8006, ..., 0.2370, 0.9743, 0.4506]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 4999 -Density: 4.999e-05 -Time: 10.51655387878418 seconds - diff --git a/pytorch/output_1core_before_test/altra_10_10_10_10000_8e-05.json b/pytorch/output_1core_before_test/altra_10_10_10_10000_8e-05.json deleted file mode 100644 index 60cb023..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_10000_8e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 191432, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 8000, "MATRIX_DENSITY": 8e-05, "TIME_S": 10.596841812133789, "TIME_S_1KI": 0.05535564488765614, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 238.61670440673828, "W": 21.909053924199824, "J_1KI": 1.2464828472080858, "W_1KI": 0.11444823187450281, "W_D": 3.623053924199823, "J_D": 39.45953989028931, "W_D_1KI": 0.018926062122319273, "J_D_1KI": 9.88657179694057e-05} diff --git a/pytorch/output_1core_before_test/altra_10_10_10_10000_8e-05.output b/pytorch/output_1core_before_test/altra_10_10_10_10000_8e-05.output deleted file mode 100644 index a136dca..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_10000_8e-05.output +++ /dev/null @@ -1,17 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 4, ..., 7997, 7998, 8000]), - col_indices=tensor([1884, 3819, 3931, ..., 530, 4803, 8162]), - values=tensor([ 0.7232, -0.9657, 0.2765, ..., -1.1954, -0.1013, - -0.2160]), size=(10000, 10000), nnz=8000, - layout=torch.sparse_csr) -tensor([0.9908, 0.0482, 0.9010, ..., 0.1457, 0.5647, 0.1931]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 8000 -Density: 8e-05 -Time: 10.596841812133789 seconds - diff --git a/pytorch/output_1core_before_test/altra_10_10_10_150000_0.0001.json b/pytorch/output_1core_before_test/altra_10_10_10_150000_0.0001.json deleted file mode 100644 index 80b8488..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_150000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1902, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 2249876, "MATRIX_DENSITY": 9.999448888888889e-05, "TIME_S": 10.985355854034424, "TIME_S_1KI": 5.775686568892967, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 327.8529440498352, "W": 28.308663786684395, "J_1KI": 172.37273609349904, "W_1KI": 14.883629751148472, "W_D": 9.837663786684399, "J_D": 113.93356674623492, "W_D_1KI": 5.172273284271503, "J_D_1KI": 2.7193865847904855} diff --git a/pytorch/output_1core_before_test/altra_10_10_10_150000_0.0001.output b/pytorch/output_1core_before_test/altra_10_10_10_150000_0.0001.output deleted file mode 100644 index 47b927b..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_150000_0.0001.output +++ /dev/null @@ -1,19 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 14, 30, ..., 2249847, - 2249862, 2249876]), - col_indices=tensor([ 4148, 25396, 42440, ..., 111948, 113804, - 137308]), - values=tensor([ 0.9290, 0.4624, -1.0432, ..., 0.1435, -0.3192, - -0.4817]), size=(150000, 150000), nnz=2249876, - layout=torch.sparse_csr) -tensor([0.9615, 0.4406, 0.5244, ..., 0.9334, 0.8574, 0.8953]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 2249876 -Density: 9.999448888888889e-05 -Time: 10.985355854034424 seconds - diff --git a/pytorch/output_1core_before_test/altra_10_10_10_150000_1e-05.json b/pytorch/output_1core_before_test/altra_10_10_10_150000_1e-05.json deleted file mode 100644 index 6a0404f..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_150000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 7140, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 224996, "MATRIX_DENSITY": 9.999822222222222e-06, "TIME_S": 10.172731399536133, "TIME_S_1KI": 1.4247522968538, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 287.08693472862245, "W": 27.290755206277613, "J_1KI": 40.208254163672606, "W_1KI": 3.8222346227279567, "W_D": 8.802755206277613, "J_D": 92.6011753883362, "W_D_1KI": 1.2328788804310382, "J_D_1KI": 0.17267211210518743} diff --git a/pytorch/output_1core_before_test/altra_10_10_10_150000_1e-05.output b/pytorch/output_1core_before_test/altra_10_10_10_150000_1e-05.output deleted file mode 100644 index 141f01c..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_150000_1e-05.output +++ /dev/null @@ -1,18 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 224990, 224995, - 224996]), - col_indices=tensor([23784, 74178, 89450, ..., 58381, 91046, 42850]), - values=tensor([-0.7749, -0.1900, -2.3464, ..., 1.5658, -0.4629, - 1.5452]), size=(150000, 150000), nnz=224996, - layout=torch.sparse_csr) -tensor([0.9691, 0.8306, 0.5499, ..., 0.9207, 0.9110, 0.8720]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 224996 -Density: 9.999822222222222e-06 -Time: 10.172731399536133 seconds - diff --git a/pytorch/output_1core_before_test/altra_10_10_10_150000_2e-05.json b/pytorch/output_1core_before_test/altra_10_10_10_150000_2e-05.json deleted file mode 100644 index 6d24110..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_150000_2e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 5293, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 449998, "MATRIX_DENSITY": 1.999991111111111e-05, "TIME_S": 10.339292526245117, "TIME_S_1KI": 1.9533898594833021, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 290.1392547225952, "W": 27.488741223386217, "J_1KI": 54.81565364114778, "W_1KI": 5.193414174076368, "W_D": 8.968741223386218, "J_D": 94.66362512588498, "W_D_1KI": 1.6944532823325558, "J_D_1KI": 0.3201309809810232} diff --git a/pytorch/output_1core_before_test/altra_10_10_10_150000_2e-05.output b/pytorch/output_1core_before_test/altra_10_10_10_150000_2e-05.output deleted file mode 100644 index a0fcc9a..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_150000_2e-05.output +++ /dev/null @@ -1,19 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 9, ..., 449997, 449997, - 449998]), - col_indices=tensor([ 19772, 54292, 65560, ..., 86157, 112779, - 75889]), - values=tensor([ 2.4722, -0.2292, -1.5954, ..., -0.0059, 0.4660, - 0.5565]), size=(150000, 150000), nnz=449998, - layout=torch.sparse_csr) -tensor([0.2171, 0.6338, 0.6140, ..., 0.0705, 0.3733, 0.1122]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 449998 -Density: 1.999991111111111e-05 -Time: 10.339292526245117 seconds - diff --git a/pytorch/output_1core_before_test/altra_10_10_10_150000_5e-05.json b/pytorch/output_1core_before_test/altra_10_10_10_150000_5e-05.json deleted file mode 100644 index 7b98e3c..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_150000_5e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 2998, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 1124964, "MATRIX_DENSITY": 4.99984e-05, "TIME_S": 10.584005117416382, "TIME_S_1KI": 3.5303552759894536, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 281.6860947990417, "W": 27.34832483022761, "J_1KI": 93.95800360208196, "W_1KI": 9.122189736566915, "W_D": 8.972324830227613, "J_D": 92.4144041137695, "W_D_1KI": 2.9927701234915323, "J_D_1KI": 0.9982555448604176} diff --git a/pytorch/output_1core_before_test/altra_10_10_10_150000_5e-05.output b/pytorch/output_1core_before_test/altra_10_10_10_150000_5e-05.output deleted file mode 100644 index ed53d93..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_150000_5e-05.output +++ /dev/null @@ -1,19 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 8, 15, ..., 1124941, - 1124955, 1124964]), - col_indices=tensor([ 5396, 36299, 48720, ..., 104838, 113229, - 148805]), - values=tensor([-0.3281, 0.4676, -0.1990, ..., -0.6293, -0.7132, - 0.0544]), size=(150000, 150000), nnz=1124964, - layout=torch.sparse_csr) -tensor([0.3108, 0.2362, 0.8265, ..., 0.3622, 0.6966, 0.1679]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 1124964 -Density: 4.99984e-05 -Time: 10.584005117416382 seconds - diff --git a/pytorch/output_1core_before_test/altra_10_10_10_150000_8e-05.json b/pytorch/output_1core_before_test/altra_10_10_10_150000_8e-05.json deleted file mode 100644 index a934a50..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_150000_8e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 2200, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 1799933, "MATRIX_DENSITY": 7.999702222222222e-05, "TIME_S": 10.941624164581299, "TIME_S_1KI": 4.973465529355136, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 312.9840323638916, "W": 27.80822650753295, "J_1KI": 142.26546925631436, "W_1KI": 12.640102957969523, "W_D": 9.62822650753295, "J_D": 108.36653520584105, "W_D_1KI": 4.376466594333159, "J_D_1KI": 1.9893029974241634} diff --git a/pytorch/output_1core_before_test/altra_10_10_10_150000_8e-05.output b/pytorch/output_1core_before_test/altra_10_10_10_150000_8e-05.output deleted file mode 100644 index 84ad7f4..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_150000_8e-05.output +++ /dev/null @@ -1,19 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 12, 28, ..., 1799903, - 1799918, 1799933]), - col_indices=tensor([ 20567, 23884, 29488, ..., 132804, 133649, - 149402]), - values=tensor([-0.6439, -1.1052, -0.5250, ..., 2.6361, -0.6596, - 0.2152]), size=(150000, 150000), nnz=1799933, - layout=torch.sparse_csr) -tensor([0.3083, 0.5054, 0.4956, ..., 0.6007, 0.4097, 0.9222]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 1799933 -Density: 7.999702222222222e-05 -Time: 10.941624164581299 seconds - diff --git a/pytorch/output_1core_before_test/altra_10_10_10_200000_0.0001.json b/pytorch/output_1core_before_test/altra_10_10_10_200000_0.0001.json deleted file mode 100644 index c58a2ea..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_200000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 3999791, "MATRIX_DENSITY": 9.9994775e-05, "TIME_S": 12.91820240020752, "TIME_S_1KI": 12.91820240020752, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 398.09815809249875, "W": 29.22550966501482, "J_1KI": 398.09815809249875, "W_1KI": 29.22550966501482, "W_D": 10.86650966501482, "J_D": 148.01923155903813, "W_D_1KI": 10.86650966501482, "J_D_1KI": 10.86650966501482} diff --git a/pytorch/output_1core_before_test/altra_10_10_10_200000_0.0001.output b/pytorch/output_1core_before_test/altra_10_10_10_200000_0.0001.output deleted file mode 100644 index 69d27bb..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_200000_0.0001.output +++ /dev/null @@ -1,19 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 22, 52, ..., 3999743, - 3999762, 3999791]), - col_indices=tensor([ 2073, 2100, 7957, ..., 188560, 190096, - 196703]), - values=tensor([-1.2106, -0.7276, 0.5707, ..., -1.7235, -0.9896, - 0.9684]), size=(200000, 200000), nnz=3999791, - layout=torch.sparse_csr) -tensor([0.0226, 0.8163, 0.0532, ..., 0.2078, 0.7406, 0.9648]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 3999791 -Density: 9.9994775e-05 -Time: 12.91820240020752 seconds - diff --git a/pytorch/output_1core_before_test/altra_10_10_10_200000_1e-05.json b/pytorch/output_1core_before_test/altra_10_10_10_200000_1e-05.json deleted file mode 100644 index dea1dce..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_200000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 4363, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 399996, "MATRIX_DENSITY": 9.9999e-06, "TIME_S": 10.13521933555603, "TIME_S_1KI": 2.3229932009067222, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 276.770800628662, "W": 26.507982031782326, "J_1KI": 63.43589287844649, "W_1KI": 6.075631911937274, "W_D": 8.225982031782326, "J_D": 85.8877763748168, "W_D_1KI": 1.8853958358428435, "J_D_1KI": 0.43213289842833913} diff --git a/pytorch/output_1core_before_test/altra_10_10_10_200000_1e-05.output b/pytorch/output_1core_before_test/altra_10_10_10_200000_1e-05.output deleted file mode 100644 index 464a02b..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_200000_1e-05.output +++ /dev/null @@ -1,19 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 5, ..., 399993, 399996, - 399996]), - col_indices=tensor([ 19959, 140065, 97028, ..., 14484, 107134, - 180632]), - values=tensor([-1.5410, 1.2347, 0.6327, ..., 0.3226, 0.3103, - -0.5170]), size=(200000, 200000), nnz=399996, - layout=torch.sparse_csr) -tensor([0.3858, 0.2247, 0.3080, ..., 0.5810, 0.8361, 0.8056]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 399996 -Density: 9.9999e-06 -Time: 10.13521933555603 seconds - diff --git a/pytorch/output_1core_before_test/altra_10_10_10_200000_2e-05.json b/pytorch/output_1core_before_test/altra_10_10_10_200000_2e-05.json deleted file mode 100644 index 5946466..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_200000_2e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 3124, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 799990, "MATRIX_DENSITY": 1.999975e-05, "TIME_S": 10.508908987045288, "TIME_S_1KI": 3.3639273326009245, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 287.49335591316225, "W": 27.12286476098722, "J_1KI": 92.02732263545526, "W_1KI": 8.682094993913962, "W_D": 8.584864760987216, "J_D": 90.99671446752546, "W_D_1KI": 2.7480360950663303, "J_D_1KI": 0.8796530393938318} diff --git a/pytorch/output_1core_before_test/altra_10_10_10_200000_2e-05.output b/pytorch/output_1core_before_test/altra_10_10_10_200000_2e-05.output deleted file mode 100644 index bd96c0b..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_200000_2e-05.output +++ /dev/null @@ -1,19 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 10, ..., 799981, 799983, - 799990]), - col_indices=tensor([ 81597, 89837, 104074, ..., 124649, 148598, - 181345]), - values=tensor([ 1.3246, 0.0435, 0.3228, ..., 3.3401, -0.6021, - 1.2929]), size=(200000, 200000), nnz=799990, - layout=torch.sparse_csr) -tensor([0.2409, 0.5216, 0.4532, ..., 0.1503, 0.4515, 0.7861]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 799990 -Density: 1.999975e-05 -Time: 10.508908987045288 seconds - diff --git a/pytorch/output_1core_before_test/altra_10_10_10_200000_5e-05.json b/pytorch/output_1core_before_test/altra_10_10_10_200000_5e-05.json deleted file mode 100644 index cd42aea..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_200000_5e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1670, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 1999946, "MATRIX_DENSITY": 4.999865e-05, "TIME_S": 10.96977186203003, "TIME_S_1KI": 6.5687256658862445, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 312.92809484481813, "W": 28.670639571147365, "J_1KI": 187.38209272144798, "W_1KI": 17.16804764739363, "W_D": 10.199639571147369, "J_D": 111.32481963586812, "W_D_1KI": 6.107568605477467, "J_D_1KI": 3.6572267098667464} diff --git a/pytorch/output_1core_before_test/altra_10_10_10_200000_5e-05.output b/pytorch/output_1core_before_test/altra_10_10_10_200000_5e-05.output deleted file mode 100644 index 3db8383..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_200000_5e-05.output +++ /dev/null @@ -1,19 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 12, 22, ..., 1999922, - 1999935, 1999946]), - col_indices=tensor([ 1356, 4671, 28719, ..., 130386, 140323, - 189730]), - values=tensor([-1.0201, -0.3659, 0.4051, ..., -1.7721, 0.2732, - -1.2666]), size=(200000, 200000), nnz=1999946, - layout=torch.sparse_csr) -tensor([0.7252, 0.6227, 0.1293, ..., 0.3105, 0.3232, 0.8533]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 1999946 -Density: 4.999865e-05 -Time: 10.96977186203003 seconds - diff --git a/pytorch/output_1core_before_test/altra_10_10_10_200000_8e-05.json b/pytorch/output_1core_before_test/altra_10_10_10_200000_8e-05.json deleted file mode 100644 index 92427b4..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_200000_8e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 3199864, "MATRIX_DENSITY": 7.99966e-05, "TIME_S": 10.491073846817017, "TIME_S_1KI": 10.491073846817017, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 291.99573203086857, "W": 28.172083366275405, "J_1KI": 291.99573203086857, "W_1KI": 28.172083366275405, "W_D": 9.839083366275403, "J_D": 101.97933580899242, "W_D_1KI": 9.839083366275403, "J_D_1KI": 9.839083366275403} diff --git a/pytorch/output_1core_before_test/altra_10_10_10_200000_8e-05.output b/pytorch/output_1core_before_test/altra_10_10_10_200000_8e-05.output deleted file mode 100644 index 8646cbf..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_200000_8e-05.output +++ /dev/null @@ -1,19 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 16, 30, ..., 3199834, - 3199851, 3199864]), - col_indices=tensor([ 622, 17931, 19929, ..., 164428, 165760, - 182959]), - values=tensor([ 1.5371, 0.4535, 0.6808, ..., -1.4735, -1.1137, - -0.2374]), size=(200000, 200000), nnz=3199864, - layout=torch.sparse_csr) -tensor([0.4639, 0.7677, 0.4075, ..., 0.9409, 0.9057, 0.5443]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 3199864 -Density: 7.99966e-05 -Time: 10.491073846817017 seconds - diff --git a/pytorch/output_1core_before_test/altra_10_10_10_20000_0.0001.json b/pytorch/output_1core_before_test/altra_10_10_10_20000_0.0001.json deleted file mode 100644 index e568380..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_20000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 59832, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 40000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.537490367889404, "TIME_S_1KI": 0.176117969780208, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 247.72398761749267, "W": 23.681060476394155, "J_1KI": 4.140326039869847, "W_1KI": 0.39579256044247485, "W_D": 5.235060476394157, "J_D": 54.76317489767075, "W_D_1KI": 0.0874959967307487, "J_D_1KI": 0.0014623612236052397} diff --git a/pytorch/output_1core_before_test/altra_10_10_10_20000_0.0001.output b/pytorch/output_1core_before_test/altra_10_10_10_20000_0.0001.output deleted file mode 100644 index 9ed7a40..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_20000_0.0001.output +++ /dev/null @@ -1,17 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 4, ..., 39998, 39999, 40000]), - col_indices=tensor([ 6949, 12737, 4837, ..., 17748, 940, 3582]), - values=tensor([ 0.3175, -1.1230, -0.3611, ..., 0.1212, 0.6109, - 0.2287]), size=(20000, 20000), nnz=40000, - layout=torch.sparse_csr) -tensor([0.8753, 0.4850, 0.0169, ..., 0.3395, 0.8405, 0.1086]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 40000 -Density: 0.0001 -Time: 10.537490367889404 seconds - diff --git a/pytorch/output_1core_before_test/altra_10_10_10_20000_1e-05.json b/pytorch/output_1core_before_test/altra_10_10_10_20000_1e-05.json deleted file mode 100644 index 793b912..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_20000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 176391, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 4000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.436025381088257, "TIME_S_1KI": 0.059164160195748404, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 245.02620697021484, "W": 22.452811523643, "J_1KI": 1.3891083273535205, "W_1KI": 0.12729000642687552, "W_D": 4.125811523642998, "J_D": 45.024737648010216, "W_D_1KI": 0.02339014759054032, "J_D_1KI": 0.000132603974072035} diff --git a/pytorch/output_1core_before_test/altra_10_10_10_20000_1e-05.output b/pytorch/output_1core_before_test/altra_10_10_10_20000_1e-05.output deleted file mode 100644 index 75249dc..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_20000_1e-05.output +++ /dev/null @@ -1,17 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 4000, 4000, 4000]), - col_indices=tensor([ 7038, 14907, 15840, ..., 2266, 10724, 4700]), - values=tensor([-0.0756, 1.4940, 1.0237, ..., 1.0818, -0.9875, - -0.1046]), size=(20000, 20000), nnz=4000, - layout=torch.sparse_csr) -tensor([0.0655, 0.0531, 0.0446, ..., 0.3091, 0.0545, 0.8375]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 4000 -Density: 1e-05 -Time: 10.436025381088257 seconds - diff --git a/pytorch/output_1core_before_test/altra_10_10_10_20000_2e-05.json b/pytorch/output_1core_before_test/altra_10_10_10_20000_2e-05.json deleted file mode 100644 index f33e1c1..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_20000_2e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 124213, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 8000, "MATRIX_DENSITY": 2e-05, "TIME_S": 10.902704000473022, "TIME_S_1KI": 0.08777425873679101, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 233.6807588863373, "W": 21.87973913117048, "J_1KI": 1.8812906771943136, "W_1KI": 0.1761469341467518, "W_D": 3.5907391311704835, "J_D": 38.34993827414514, "W_D_1KI": 0.02890791729666366, "J_D_1KI": 0.00023272859762394967} diff --git a/pytorch/output_1core_before_test/altra_10_10_10_20000_2e-05.output b/pytorch/output_1core_before_test/altra_10_10_10_20000_2e-05.output deleted file mode 100644 index 766da7a..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_20000_2e-05.output +++ /dev/null @@ -1,17 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 8000, 8000, 8000]), - col_indices=tensor([ 4354, 13429, 12928, ..., 15020, 14646, 19167]), - values=tensor([ 1.2865, -0.3043, -1.0918, ..., 1.5649, 0.8460, - -0.0791]), size=(20000, 20000), nnz=8000, - layout=torch.sparse_csr) -tensor([0.2037, 0.5065, 0.5866, ..., 0.0284, 0.8729, 0.2058]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 8000 -Density: 2e-05 -Time: 10.902704000473022 seconds - diff --git a/pytorch/output_1core_before_test/altra_10_10_10_20000_5e-05.json b/pytorch/output_1core_before_test/altra_10_10_10_20000_5e-05.json deleted file mode 100644 index 15dc954..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_20000_5e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 76209, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 20000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.030388593673706, "TIME_S_1KI": 0.13161685094508135, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 224.19208275794986, "W": 22.00437610628281, "J_1KI": 2.941805859648465, "W_1KI": 0.28873723715417876, "W_D": 3.8003761062828083, "J_D": 38.72021776103976, "W_D_1KI": 0.049867812283100534, "J_D_1KI": 0.0006543559459263412} diff --git a/pytorch/output_1core_before_test/altra_10_10_10_20000_5e-05.output b/pytorch/output_1core_before_test/altra_10_10_10_20000_5e-05.output deleted file mode 100644 index 00ec45d..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_20000_5e-05.output +++ /dev/null @@ -1,17 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 3, ..., 19999, 20000, 20000]), - col_indices=tensor([ 8259, 19402, 5633, ..., 2308, 3033, 5423]), - values=tensor([ 1.5769, -0.6601, -1.2272, ..., -1.5862, -0.3276, - 1.7980]), size=(20000, 20000), nnz=20000, - layout=torch.sparse_csr) -tensor([0.1625, 0.5576, 0.6423, ..., 0.9184, 0.8092, 0.9258]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 20000 -Density: 5e-05 -Time: 10.030388593673706 seconds - diff --git a/pytorch/output_1core_before_test/altra_10_10_10_20000_8e-05.json b/pytorch/output_1core_before_test/altra_10_10_10_20000_8e-05.json deleted file mode 100644 index f74ee39..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_20000_8e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 63873, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 31999, "MATRIX_DENSITY": 7.99975e-05, "TIME_S": 10.265483856201172, "TIME_S_1KI": 0.16071710826485638, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 243.4201574897766, "W": 23.596340483763072, "J_1KI": 3.811002418702372, "W_1KI": 0.36942589957827365, "W_D": 5.3763404837630695, "J_D": 55.46239884853362, "W_D_1KI": 0.08417234956496594, "J_D_1KI": 0.0013178079871771476} diff --git a/pytorch/output_1core_before_test/altra_10_10_10_20000_8e-05.output b/pytorch/output_1core_before_test/altra_10_10_10_20000_8e-05.output deleted file mode 100644 index 78da96f..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_20000_8e-05.output +++ /dev/null @@ -1,17 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 5, ..., 31997, 31998, 31999]), - col_indices=tensor([ 7661, 12430, 14674, ..., 1317, 1668, 16257]), - values=tensor([ 1.2294, 1.3863, -0.2346, ..., 1.3472, 1.1634, - -0.5372]), size=(20000, 20000), nnz=31999, - layout=torch.sparse_csr) -tensor([0.8386, 0.4605, 0.7141, ..., 0.3240, 0.8539, 0.7370]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 31999 -Density: 7.99975e-05 -Time: 10.265483856201172 seconds - diff --git a/pytorch/output_1core_before_test/altra_10_10_10_50000_0.0001.json b/pytorch/output_1core_before_test/altra_10_10_10_50000_0.0001.json deleted file mode 100644 index 51cbb56..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_50000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 16346, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 249991, "MATRIX_DENSITY": 9.99964e-05, "TIME_S": 10.408724308013916, "TIME_S_1KI": 0.6367750096668247, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 297.23874763488766, "W": 28.161750792100978, "J_1KI": 18.184188647674517, "W_1KI": 1.722852734130734, "W_D": 9.808750792100977, "J_D": 103.52839291954034, "W_D_1KI": 0.6000704020617262, "J_D_1KI": 0.03671053481351562} diff --git a/pytorch/output_1core_before_test/altra_10_10_10_50000_0.0001.output b/pytorch/output_1core_before_test/altra_10_10_10_50000_0.0001.output deleted file mode 100644 index 7d4784e..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_50000_0.0001.output +++ /dev/null @@ -1,18 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 6, ..., 249980, 249983, - 249991]), - col_indices=tensor([ 36, 45823, 46465, ..., 37741, 45912, 48601]), - values=tensor([ 0.5460, 2.4548, 0.1718, ..., -0.5842, -0.3649, - 0.9708]), size=(50000, 50000), nnz=249991, - layout=torch.sparse_csr) -tensor([0.1356, 0.4896, 0.0726, ..., 0.8527, 0.5513, 0.2972]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 249991 -Density: 9.99964e-05 -Time: 10.408724308013916 seconds - diff --git a/pytorch/output_1core_before_test/altra_10_10_10_50000_1e-05.json b/pytorch/output_1core_before_test/altra_10_10_10_50000_1e-05.json deleted file mode 100644 index fff5855..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_50000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 38115, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.09147024154663, "TIME_S_1KI": 0.2647637476465074, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 236.10756771087648, "W": 23.137619574023116, "J_1KI": 6.194610198370103, "W_1KI": 0.6070476078715235, "W_D": 4.60861957402312, "J_D": 47.0286044182778, "W_D_1KI": 0.12091353991927377, "J_D_1KI": 0.0031723347742168115} diff --git a/pytorch/output_1core_before_test/altra_10_10_10_50000_1e-05.output b/pytorch/output_1core_before_test/altra_10_10_10_50000_1e-05.output deleted file mode 100644 index 0a6dfd3..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_50000_1e-05.output +++ /dev/null @@ -1,17 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 24999, 25000, 25000]), - col_indices=tensor([ 7386, 29462, 29552, ..., 29408, 22052, 28524]), - values=tensor([ 0.9699, 1.4627, 2.7280, ..., -1.3045, 0.9971, - 0.9145]), size=(50000, 50000), nnz=25000, - layout=torch.sparse_csr) -tensor([0.1635, 0.8941, 0.9193, ..., 0.4092, 0.8845, 0.1384]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 25000 -Density: 1e-05 -Time: 10.09147024154663 seconds - diff --git a/pytorch/output_1core_before_test/altra_10_10_10_50000_2e-05.json b/pytorch/output_1core_before_test/altra_10_10_10_50000_2e-05.json deleted file mode 100644 index 1933b53..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_50000_2e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 29249, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 50000, "MATRIX_DENSITY": 2e-05, "TIME_S": 10.311580419540405, "TIME_S_1KI": 0.3525447167267396, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 258.66943813323974, "W": 24.446345713171578, "J_1KI": 8.843701943083174, "W_1KI": 0.8358010774102218, "W_D": 6.260345713171578, "J_D": 66.24139767742155, "W_D_1KI": 0.2140362307487975, "J_D_1KI": 0.007317728153058138} diff --git a/pytorch/output_1core_before_test/altra_10_10_10_50000_2e-05.output b/pytorch/output_1core_before_test/altra_10_10_10_50000_2e-05.output deleted file mode 100644 index 85a7c96..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_50000_2e-05.output +++ /dev/null @@ -1,17 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 49998, 49999, 50000]), - col_indices=tensor([14047, 39956, 44680, ..., 23928, 14234, 25155]), - values=tensor([-0.2408, -1.0192, -0.9186, ..., 0.7145, 0.3660, - -1.6825]), size=(50000, 50000), nnz=50000, - layout=torch.sparse_csr) -tensor([0.0334, 0.3993, 0.5209, ..., 0.6805, 0.8639, 0.4287]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 50000 -Density: 2e-05 -Time: 10.311580419540405 seconds - diff --git a/pytorch/output_1core_before_test/altra_10_10_10_50000_5e-05.json b/pytorch/output_1core_before_test/altra_10_10_10_50000_5e-05.json deleted file mode 100644 index 92c429e..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_50000_5e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 21180, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 124996, "MATRIX_DENSITY": 4.99984e-05, "TIME_S": 10.37968921661377, "TIME_S_1KI": 0.49007031239913923, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 277.8537188720703, "W": 26.282372283195787, "J_1KI": 13.118683610579334, "W_1KI": 1.240905206949754, "W_D": 8.087372283195787, "J_D": 85.49861635684967, "W_D_1KI": 0.38184005114238845, "J_D_1KI": 0.018028331026552807} diff --git a/pytorch/output_1core_before_test/altra_10_10_10_50000_5e-05.output b/pytorch/output_1core_before_test/altra_10_10_10_50000_5e-05.output deleted file mode 100644 index ca9b0e2..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_50000_5e-05.output +++ /dev/null @@ -1,18 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 5, ..., 124990, 124992, - 124996]), - col_indices=tensor([37148, 868, 14393, ..., 11956, 13687, 17217]), - values=tensor([-0.7901, -0.0307, 0.6583, ..., 1.2664, -0.5294, - 0.2415]), size=(50000, 50000), nnz=124996, - layout=torch.sparse_csr) -tensor([0.6346, 0.7344, 0.7382, ..., 0.1031, 0.7761, 0.2680]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 124996 -Density: 4.99984e-05 -Time: 10.37968921661377 seconds - diff --git a/pytorch/output_1core_before_test/altra_10_10_10_50000_8e-05.json b/pytorch/output_1core_before_test/altra_10_10_10_50000_8e-05.json deleted file mode 100644 index e4f8ea1..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_50000_8e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 17954, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 199993, "MATRIX_DENSITY": 7.99972e-05, "TIME_S": 10.405897617340088, "TIME_S_1KI": 0.579586588912782, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 288.5746788787842, "W": 27.42722789039496, "J_1KI": 16.073002054070635, "W_1KI": 1.5276388487465167, "W_D": 9.17722789039496, "J_D": 96.55790231704715, "W_D_1KI": 0.5111522719391199, "J_D_1KI": 0.02847010537702573} diff --git a/pytorch/output_1core_before_test/altra_10_10_10_50000_8e-05.output b/pytorch/output_1core_before_test/altra_10_10_10_50000_8e-05.output deleted file mode 100644 index 3cb3ffb..0000000 --- a/pytorch/output_1core_before_test/altra_10_10_10_50000_8e-05.output +++ /dev/null @@ -1,18 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 6, ..., 199989, 199992, - 199993]), - col_indices=tensor([11520, 17771, 28351, ..., 14010, 24789, 25382]), - values=tensor([ 0.1757, 0.0573, -0.2332, ..., 0.2418, 0.8941, - 0.9165]), size=(50000, 50000), nnz=199993, - layout=torch.sparse_csr) -tensor([0.9397, 0.4984, 0.8473, ..., 0.2235, 0.6946, 0.4850]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 199993 -Density: 7.99972e-05 -Time: 10.405897617340088 seconds - diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_100000_0.0001.json b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_100000_0.0001.json deleted file mode 100644 index 72e4c81..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_100000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 7224, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 999961, "MATRIX_DENSITY": 9.99961e-05, "TIME_S": 10.391737222671509, "TIME_S_1KI": 1.4385018303808843, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 683.3481682777406, "W": 66.15, "J_1KI": 94.5941539697869, "W_1KI": 9.156976744186046, "W_D": 30.81575000000001, "J_D": 318.3353940529824, "W_D_1KI": 4.265746124031009, "J_D_1KI": 0.5904964180552338} diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_100000_0.0001.output b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_100000_0.0001.output deleted file mode 100644 index 4693b81..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_100000_0.0001.output +++ /dev/null @@ -1,18 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 13, 26, ..., 999940, 999950, - 999961]), - col_indices=tensor([11394, 34235, 37054, ..., 66681, 73720, 88333]), - values=tensor([-0.1030, 0.0523, 1.7276, ..., -0.0843, -1.1960, - 0.1651]), size=(100000, 100000), nnz=999961, - layout=torch.sparse_csr) -tensor([0.0552, 0.0885, 0.6701, ..., 0.2701, 0.0312, 0.3612]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 999961 -Density: 9.99961e-05 -Time: 10.391737222671509 seconds - diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_100000_1e-05.json b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_100000_1e-05.json deleted file mode 100644 index 54a5e8a..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_100000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 15444, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.258945941925049, "TIME_S_1KI": 0.6642674140070609, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 655.1369225239754, "W": 63.730000000000004, "J_1KI": 42.42015815358556, "W_1KI": 4.126521626521626, "W_D": 28.90775, "J_D": 297.1682782377601, "W_D_1KI": 1.8717786842786843, "J_D_1KI": 0.12119779100483581} diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_100000_1e-05.output b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_100000_1e-05.output deleted file mode 100644 index 1571eae..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_100000_1e-05.output +++ /dev/null @@ -1,18 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 99999, 100000, - 100000]), - col_indices=tensor([38240, 42310, 36136, ..., 17864, 20234, 87495]), - values=tensor([ 0.3816, -1.5588, -0.4494, ..., -0.4710, 0.7967, - 0.8525]), size=(100000, 100000), nnz=100000, - layout=torch.sparse_csr) -tensor([0.8033, 0.2555, 0.5482, ..., 0.8182, 0.5447, 0.0536]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 100000 -Density: 1e-05 -Time: 10.258945941925049 seconds - diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_100000_2e-05.json b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_100000_2e-05.json deleted file mode 100644 index 3ca0214..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_100000_2e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 12867, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 199999, "MATRIX_DENSITY": 1.99999e-05, "TIME_S": 10.453712463378906, "TIME_S_1KI": 0.8124436514633486, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 672.0596928977967, "W": 64.56, "J_1KI": 52.231265477407064, "W_1KI": 5.017486593611564, "W_D": 29.200499999999998, "J_D": 303.97272401583194, "W_D_1KI": 2.2694101189088363, "J_D_1KI": 0.17637445549924893} diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_100000_2e-05.output b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_100000_2e-05.output deleted file mode 100644 index 5050710..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_100000_2e-05.output +++ /dev/null @@ -1,18 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 8, ..., 199994, 199994, - 199999]), - col_indices=tensor([45341, 97429, 15892, ..., 55888, 75567, 93358]), - values=tensor([-0.3367, 0.6609, 0.0778, ..., -0.6682, 0.4871, - 0.4955]), size=(100000, 100000), nnz=199999, - layout=torch.sparse_csr) -tensor([0.5736, 0.7557, 0.5783, ..., 0.2968, 0.9318, 0.2649]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 199999 -Density: 1.99999e-05 -Time: 10.453712463378906 seconds - diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_100000_5e-05.json b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_100000_5e-05.json deleted file mode 100644 index f5bc8bc..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_100000_5e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 9705, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 499987, "MATRIX_DENSITY": 4.99987e-05, "TIME_S": 10.461008548736572, "TIME_S_1KI": 1.0778988715854274, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 680.3387736654282, "W": 65.31, "J_1KI": 70.1018829124604, "W_1KI": 6.72952086553323, "W_D": 30.586750000000002, "J_D": 318.6242839597464, "W_D_1KI": 3.151648634724369, "J_D_1KI": 0.32474483613852334} diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_100000_5e-05.output b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_100000_5e-05.output deleted file mode 100644 index 355d0b5..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_100000_5e-05.output +++ /dev/null @@ -1,18 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 8, ..., 499980, 499984, - 499987]), - col_indices=tensor([33369, 53489, 54258, ..., 9707, 29472, 36584]), - values=tensor([ 0.8793, -0.5186, -0.8822, ..., -0.0127, -0.7208, - 0.7916]), size=(100000, 100000), nnz=499987, - layout=torch.sparse_csr) -tensor([0.8939, 0.0323, 0.6990, ..., 0.7330, 0.9554, 0.9620]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 499987 -Density: 4.99987e-05 -Time: 10.461008548736572 seconds - diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_100000_8e-05.json b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_100000_8e-05.json deleted file mode 100644 index 179eb2a..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_100000_8e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 7610, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 799973, "MATRIX_DENSITY": 7.99973e-05, "TIME_S": 10.410067796707153, "TIME_S_1KI": 1.3679458339956836, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 683.4187345504761, "W": 66.0, "J_1KI": 89.8053527661598, "W_1KI": 8.672798948751643, "W_D": 31.27825, "J_D": 323.8809399083853, "W_D_1KI": 4.110151116951379, "J_D_1KI": 0.5400987013076713} diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_100000_8e-05.output b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_100000_8e-05.output deleted file mode 100644 index d648452..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_100000_8e-05.output +++ /dev/null @@ -1,18 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 22, ..., 799958, 799966, - 799973]), - col_indices=tensor([16358, 22024, 24798, ..., 41332, 74131, 83922]), - values=tensor([ 0.4447, 1.4577, 1.0781, ..., -0.2374, 0.5707, - -0.4063]), size=(100000, 100000), nnz=799973, - layout=torch.sparse_csr) -tensor([0.9625, 0.7543, 0.4294, ..., 0.2420, 0.7978, 0.4269]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 799973 -Density: 7.99973e-05 -Time: 10.410067796707153 seconds - diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_10000_0.0001.json b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_10000_0.0001.json deleted file mode 100644 index dda1322..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_10000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 374804, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.253702640533447, "TIME_S_1KI": 0.02735750589783846, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 676.8751910734177, "W": 64.98, "J_1KI": 1.8059444164774594, "W_1KI": 0.1733706150414617, "W_D": 30.122, "J_D": 313.7709219069481, "W_D_1KI": 0.08036733866234085, "J_D_1KI": 0.0002144249758869725} diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_10000_0.0001.output b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_10000_0.0001.output deleted file mode 100644 index d876127..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_10000_0.0001.output +++ /dev/null @@ -1,17 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 9999, 10000, 10000]), - col_indices=tensor([7907, 913, 6343, ..., 9697, 9188, 1941]), - values=tensor([ 1.4897, -1.5385, -0.3081, ..., 0.4741, 0.1537, - 1.2085]), size=(10000, 10000), nnz=10000, - layout=torch.sparse_csr) -tensor([0.9532, 0.1366, 0.0841, ..., 0.0892, 0.6228, 0.5359]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 10000 -Density: 0.0001 -Time: 10.253702640533447 seconds - diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_10000_1e-05.json b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_10000_1e-05.json deleted file mode 100644 index 3b254e6..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_10000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 645847, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.55691385269165, "TIME_S_1KI": 0.016345843292129018, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 673.5892867422104, "W": 65.17, "J_1KI": 1.0429548898457537, "W_1KI": 0.10090625179028469, "W_D": 30.532749999999993, "J_D": 315.58283404600616, "W_D_1KI": 0.047275515718118985, "J_D_1KI": 7.319924954071008e-05} diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_10000_1e-05.output b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_10000_1e-05.output deleted file mode 100644 index 8fd4016..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_10000_1e-05.output +++ /dev/null @@ -1,376 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([ 658, 5085, 5400, 1750, 9367, 3115, 3773, 5363, 946, - 7060, 2384, 8575, 3712, 1069, 1060, 5881, 5263, 322, - 5766, 7434, 3600, 3757, 6754, 5635, 813, 3639, 8495, - 2069, 8797, 2654, 8108, 4213, 1990, 2789, 3389, 7272, - 7581, 5296, 7640, 5588, 9232, 4192, 4108, 7342, 9503, - 6400, 6834, 1449, 5097, 611, 2658, 8035, 1300, 4040, - 6801, 7249, 1396, 9565, 7213, 8505, 5381, 4503, 9508, - 6607, 2183, 5832, 1379, 1224, 3772, 9898, 3123, 3729, - 995, 6140, 6786, 1132, 9565, 7799, 2269, 9961, 5060, - 34, 1043, 775, 2446, 6123, 2070, 1838, 123, 3120, - 8681, 2925, 8242, 9291, 3365, 6389, 9240, 996, 7312, - 5146, 8130, 7318, 2414, 2712, 2449, 3506, 4280, 9191, - 4377, 5729, 5738, 6008, 1707, 5553, 3341, 1830, 5880, - 8223, 3024, 1580, 7924, 6054, 3519, 5251, 2216, 2087, - 3312, 7678, 960, 7929, 5727, 2691, 5023, 9334, 523, - 1376, 2967, 7862, 5352, 8113, 9320, 8646, 3657, 4855, - 2016, 2530, 8784, 5422, 5936, 1848, 2253, 2532, 8402, - 398, 9482, 4533, 6480, 6472, 6473, 3888, 4754, 5033, - 5752, 7821, 9388, 9710, 4651, 2472, 7684, 523, 4759, - 6624, 7626, 6149, 5337, 7181, 4388, 8683, 2739, 4164, - 5672, 3537, 6465, 3117, 6523, 4665, 7613, 1085, 1927, - 7600, 7691, 4196, 6804, 8906, 3269, 2424, 5357, 9647, - 5773, 6914, 9514, 1162, 4797, 3803, 2405, 6411, 3596, - 5365, 2862, 2727, 2872, 8926, 5515, 3613, 7839, 5108, - 3999, 794, 3209, 7255, 2393, 2383, 6205, 7923, 2887, - 4890, 911, 3349, 27, 7220, 2401, 2075, 4324, 737, - 5197, 7147, 7574, 1628, 6128, 9149, 2809, 133, 6259, - 6747, 9274, 4828, 410, 5007, 2953, 426, 5589, 4895, - 3400, 5614, 2714, 8730, 219, 8035, 8260, 2814, 3307, - 6960, 5719, 7181, 3850, 4345, 6046, 8788, 8208, 5620, - 6884, 464, 4716, 6749, 5612, 7946, 8515, 4156, 9259, - 1043, 3848, 5993, 4307, 4783, 5985, 6030, 7095, 2985, - 3970, 9081, 8374, 7505, 2806, 1457, 4753, 499, 5181, - 7326, 8883, 1490, 4162, 6800, 6795, 3441, 1089, 7175, - 2445, 3748, 4365, 2691, 7256, 3689, 6099, 1293, 1898, - 6896, 5446, 2000, 5054, 4894, 6793, 9080, 5737, 5859, - 7738, 7731, 3528, 423, 6098, 5736, 5106, 19, 6131, - 8249, 2841, 7907, 9275, 6377, 8846, 686, 4170, 9705, - 8756, 1468, 9536, 6823, 7074, 9237, 7184, 836, 6872, - 9651, 1745, 7247, 5060, 386, 9373, 483, 5196, 901, - 7658, 8648, 9576, 9710, 577, 990, 877, 4189, 396, - 2141, 2832, 8017, 9110, 2108, 7662, 5155, 3988, 6759, - 2259, 5607, 3394, 4508, 8417, 1543, 4518, 5034, 89, - 6863, 2807, 9082, 313, 7711, 9435, 2977, 6848, 471, - 9640, 3349, 4067, 8107, 6809, 545, 2777, 8921, 8474, - 9019, 9294, 9748, 7478, 4620, 5312, 7563, 3657, 4875, - 2165, 8785, 1461, 1568, 2352, 3444, 3795, 4369, 2836, - 9649, 3498, 3734, 570, 6208, 3662, 7949, 3212, 5233, - 5829, 2740, 1472, 3888, 502, 7108, 8478, 6745, 8534, - 6437, 1817, 5344, 6711, 6102, 4527, 7687, 1412, 6725, - 7929, 5569, 4312, 9113, 5598, 1882, 5696, 5797, 631, - 868, 6834, 3451, 1025, 8828, 5791, 221, 526, 4748, - 5137, 9529, 1416, 6908, 1830, 9100, 7579, 1246, 1739, - 2593, 6625, 153, 1006, 5025, 9128, 5813, 9519, 9185, - 5475, 5537, 1829, 7076, 8034, 9313, 3654, 1983, 4489, - 2932, 4345, 7319, 7710, 9963, 9469, 5270, 3224, 3242, - 2070, 914, 5146, 3513, 3039, 331, 3787, 7446, 6679, - 7128, 3155, 7266, 7245, 6060, 3449, 4791, 2402, 8252, - 1025, 8713, 9215, 1136, 2112, 6634, 6989, 3427, 9931, - 2577, 5456, 3974, 3666, 1469, 8490, 3707, 5152, 1029, - 7608, 6983, 1827, 1249, 5180, 6469, 6307, 6259, 9875, - 7848, 2193, 7307, 6606, 4122, 3123, 3292, 2289, 7252, - 3414, 5193, 3814, 4982, 226, 9673, 8811, 5216, 9573, - 3476, 9649, 6775, 5463, 353, 2355, 9750, 4881, 4938, - 8366, 1968, 2676, 539, 2767, 8746, 6517, 4404, 3278, - 4416, 45, 7737, 3895, 2218, 6357, 8600, 100, 1895, - 7321, 508, 7562, 8358, 1361, 8591, 8180, 5873, 2063, - 7292, 8818, 4012, 3858, 6852, 705, 4590, 823, 1329, - 6831, 679, 4599, 9068, 2899, 9631, 6087, 7245, 9039, - 2946, 8552, 3218, 7616, 9247, 3460, 9043, 6117, 7302, - 8180, 1341, 3163, 5198, 9983, 8626, 9796, 7371, 6926, - 7936, 2804, 8197, 7132, 7990, 1976, 4197, 6906, 6153, - 792, 8200, 249, 1100, 7501, 2271, 1743, 7768, 6862, - 602, 791, 7671, 5528, 9588, 5388, 8850, 1341, 8007, - 9057, 3055, 9700, 206, 2832, 1148, 9432, 2959, 5369, - 5569, 8713, 5254, 1222, 432, 8726, 6010, 2985, 2487, - 8643, 1208, 5283, 1597, 5461, 4943, 1085, 9074, 7589, - 6931, 976, 4552, 8284, 7108, 9318, 1759, 2979, 9341, - 1509, 7888, 9951, 4957, 4804, 7238, 4712, 6760, 6125, - 7907, 1390, 490, 6146, 3680, 67, 1360, 7201, 6148, - 91, 4783, 1717, 7680, 6500, 2759, 6488, 2917, 6456, - 9150, 124, 7655, 9399, 8702, 8576, 4182, 6527, 6640, - 9295, 9044, 6650, 7282, 2520, 4172, 5904, 5466, 1946, - 9765, 8308, 2218, 895, 1037, 3568, 110, 9271, 4107, - 319, 2165, 3009, 7702, 6240, 5588, 3723, 3851, 2002, - 3049, 6823, 3653, 2801, 6082, 5142, 2659, 7143, 3614, - 3346, 7848, 5179, 3701, 6125, 773, 9917, 4350, 2823, - 9954, 9153, 5232, 3359, 976, 6936, 5069, 7750, 9000, - 8006, 8675, 6673, 6113, 8787, 5318, 7908, 9949, 8191, - 7412, 7913, 9705, 7693, 1014, 7719, 6492, 597, 3555, - 8749, 3092, 4834, 2321, 5628, 3798, 7083, 9640, 2549, - 417, 9423, 2685, 2399, 2409, 4934, 595, 4908, 5419, - 5805, 7474, 5810, 8434, 6063, 9035, 668, 4575, 3949, - 4234, 1928, 7896, 3199, 3282, 7758, 8229, 801, 2416, - 8093, 7178, 7943, 2385, 5037, 675, 3449, 2753, 8050, - 4322, 2836, 719, 6479, 4223, 3368, 6248, 2630, 4826, - 9800, 4386, 4794, 8174, 9414, 7748, 7760, 2788, 8619, - 2795, 5972, 8190, 3035, 6938, 9862, 1238, 4606, 1658, - 5894, 2109, 1950, 5906, 4673, 859, 7851, 6527, 2869, - 1326, 482, 7427, 6562, 6760, 314, 2122, 8046, 7828, - 9529, 2450, 9454, 874, 2574, 4598, 8477, 7287, 5004, - 3450, 9800, 4891, 8694, 3218, 1636, 6437, 7930, 9209, - 6972, 5146, 1164, 2426, 8614, 3118, 8082, 7678, 509, - 2276, 3127, 9146, 1027, 2073, 3592, 8364, 3864, 6947, - 9984, 7601, 9184, 9618, 176, 5415, 589, 9486, 4678, - 9447, 7287, 6643, 5093, 4074, 1969, 4050, 3260, 7250, - 8170, 8020, 273, 1677, 6478, 3769, 7474, 4814, 5262, - 6916, 4827, 9737, 3879, 3560, 828, 2843, 2277, 251, - 272, 3217, 9064, 5693, 3931, 2963, 2532, 2459, 6391, - 2553, 1260, 4986, 6781, 8034, 1005, 2852, 4671, 3801, - 2296, 3396, 9162, 1814, 4536, 1669, 8677, 2988, 6305, - 817]), - values=tensor([-1.1101e+00, 3.0633e-01, 1.1078e+00, -6.9790e-01, - -4.4491e-01, -1.5632e+00, -8.6309e-01, -7.4224e-02, - -8.3532e-01, 1.8072e-01, -1.1300e+00, 6.4618e-01, - -3.6050e-01, 1.9692e+00, -3.1057e-01, 2.2580e-01, - 4.3246e-01, -1.8039e+00, -9.0683e-01, 1.1888e+00, - -1.0522e+00, 1.9999e+00, -4.0159e-01, 1.0670e+00, - 4.2154e-01, -7.6806e-01, -1.4815e+00, 2.4191e+00, - 2.8794e-01, -1.1054e+00, -1.3622e+00, 1.0197e+00, - 1.4237e+00, -9.8478e-01, 1.8460e-01, 1.0144e+00, - -1.2621e-01, -7.2858e-01, -8.3001e-02, 1.2743e+00, - -1.6888e+00, -3.3885e-01, 1.4860e+00, 1.7916e+00, - -8.0670e-02, 1.6117e+00, 1.2010e+00, -2.0557e-01, - 1.3862e-01, 6.7139e-03, -1.6822e-01, 1.3083e+00, - 1.2182e+00, -7.5700e-02, -6.0411e-01, -1.6342e+00, - 3.6152e-01, -4.3839e-01, 1.3985e-01, 9.3304e-01, - -3.2117e-01, -1.6663e+00, 1.5405e+00, -1.4186e+00, - -5.4272e-01, -2.3433e-01, 6.1664e-01, 1.1291e+00, - -3.5028e-01, -1.1777e+00, -2.8878e-01, -6.7576e-01, - 8.5777e-01, 3.3195e-02, 3.4202e-01, 1.9557e+00, - -2.8681e-01, -4.0757e-01, 8.6801e-01, -1.1406e+00, - 5.2232e-01, 5.8082e-01, -1.0019e-01, -4.7086e-01, - 1.3153e+00, -2.5996e+00, -2.4177e+00, -1.3512e-01, - -1.3412e+00, -1.9384e-01, -2.5537e+00, 8.1877e-01, - 1.2961e+00, -1.3440e+00, -8.6302e-02, -5.1707e-01, - -8.4898e-01, -1.3612e+00, 1.0507e+00, -7.2639e-02, - -9.4044e-02, 1.9989e-01, -3.1055e-01, -4.5701e-01, - 1.2299e+00, 1.2220e+00, -1.7957e+00, -9.8279e-01, - 1.2410e+00, -1.9697e-01, 9.3087e-01, 2.5291e-01, - -3.0749e-01, -1.0041e+00, -3.7751e-01, -1.2605e+00, - -9.2966e-01, -1.4022e+00, -3.0375e-01, -4.4995e-02, - 1.2199e+00, -8.7584e-01, -6.9101e-01, -1.2119e+00, - -2.7304e-01, -1.1502e+00, 6.2733e-01, 5.3783e-01, - 8.8895e-01, 2.2571e-01, -3.8331e-03, -1.0010e+00, - -2.7722e-01, -1.3517e-01, 1.3492e+00, -5.4185e-01, - -2.9196e-02, -1.8725e-01, 6.0237e-01, -4.9842e-03, - 3.7575e-01, 1.7022e+00, -3.9786e-01, 6.6903e-01, - -5.9174e-01, 1.9763e-01, 5.2993e-01, 3.2086e-01, - 6.2695e-01, -6.3719e-01, 6.8584e-01, 2.4598e-02, - -3.7492e-01, 4.9750e-01, -4.5188e-01, 1.1244e+00, - 8.2121e-01, 7.8224e-01, 1.4531e+00, -3.8115e-01, - 9.9784e-01, 6.0671e-01, 2.3657e+00, 8.6611e-01, - 2.9388e-01, 9.7872e-01, 3.5482e-01, 2.5645e-02, - -2.3585e-01, 8.2998e-01, 9.5246e-01, 1.0043e+00, - -3.6132e-01, 4.4223e-02, 4.9203e-01, -8.2406e-01, - 1.9468e+00, -4.5481e-01, -6.9257e-01, 1.6778e+00, - -8.4002e-01, 1.3942e-01, -1.1854e+00, -1.9812e-01, - -6.2034e-01, -1.9289e+00, 1.6543e+00, -7.4901e-01, - 1.2020e+00, 4.4489e-01, 4.0680e-01, -1.2729e+00, - 4.4908e-01, 5.5719e-02, -3.8409e-01, -2.6272e-01, - -2.9870e-02, 8.4146e-01, -1.9631e+00, 5.0530e-01, - 2.1186e-01, -1.2994e+00, 8.2884e-01, 1.1651e+00, - 6.3637e-01, 1.2109e-01, -1.0037e+00, 2.9119e-01, - 1.9717e-01, 9.8994e-01, -9.9557e-01, -1.9863e-01, - 2.0257e-01, -3.8923e-01, -8.4524e-01, 9.5141e-01, - 3.7623e-01, -2.2325e-01, -1.8083e+00, 1.0991e-01, - -9.0641e-01, 2.0688e+00, 1.4178e+00, 3.1012e-01, - 7.9233e-01, -8.4590e-01, -5.6846e-01, 2.2360e+00, - -5.1472e-02, 4.2616e-01, -1.2356e+00, 4.7289e-02, - 6.6122e-01, 1.6173e+00, 1.3212e+00, 7.0257e-01, - 3.8464e-01, 1.1072e+00, -1.3051e-01, 1.9886e+00, - -1.3245e-01, 1.2420e+00, 6.4742e-01, -2.2014e-01, - 7.3139e-01, -5.0874e-01, -2.4830e+00, -2.0007e+00, - -8.6082e-01, 3.0811e-01, 1.4139e+00, -7.2951e-01, - -7.8390e-01, -3.2628e-01, 5.4602e-01, 1.7243e+00, - -1.8951e+00, 4.2836e-01, -1.5769e+00, -1.0607e+00, - -1.6718e-01, -4.7869e-02, -8.8981e-01, -1.2220e+00, - 1.5377e-01, -1.2579e-01, -1.5300e+00, 3.7642e-01, - -9.0735e-01, -1.0045e+00, -8.4508e-01, 5.9071e-01, - 5.6875e-01, -7.3547e-01, -1.3744e+00, -4.1511e-01, - 3.2743e+00, 1.1751e+00, 3.6610e-01, 7.8297e-01, - -3.8187e-01, -4.1496e-01, -2.8833e-01, 3.4046e-01, - 3.0074e-01, -6.0557e-01, 1.9626e-01, -7.6253e-01, - 1.9814e+00, -1.5145e+00, 3.3779e-01, -1.7072e+00, - 7.6451e-01, -1.3515e+00, -2.1471e+00, -5.6433e-01, - 2.0077e-01, -9.2786e-01, -1.0530e+00, 9.5779e-01, - -5.2643e-02, 2.4042e-01, -2.1610e-01, 1.0033e+00, - -8.6542e-02, -1.6929e+00, 9.5664e-01, 8.9946e-01, - 6.4726e-01, 3.4099e-01, -1.4117e+00, -1.0214e+00, - 1.1858e+00, 3.4372e-01, -2.4910e-01, -9.1282e-01, - -1.2469e+00, -1.6411e+00, -4.1444e-01, -3.5274e-01, - 8.8071e-01, 1.5452e-01, 9.1064e-01, 2.7011e-01, - 8.1228e-01, -5.4686e-01, -9.0318e-01, 1.0659e+00, - 1.0078e+00, -1.5743e+00, -1.9022e-01, -5.6275e-01, - 8.6418e-01, 1.8274e-01, -1.2418e-01, -6.6055e-01, - -1.3332e+00, -5.9012e-01, 9.3504e-01, 4.5504e-01, - 1.9550e+00, -9.0783e-01, -8.6953e-02, -1.1860e-01, - -1.5204e-01, 1.9387e-01, -1.9887e+00, -3.6803e-01, - -5.1950e-01, -1.4356e+00, -1.2649e-01, 8.2256e-01, - 6.0702e-01, 6.0080e-01, 1.0603e+00, -6.9410e-01, - 1.0359e+00, -6.3054e-02, -7.8894e-01, -2.0135e-01, - -2.0324e+00, -2.2405e+00, -1.0969e+00, 3.8275e-01, - 2.7963e-01, -3.5224e-01, -1.9494e+00, 3.1860e-01, - 1.0007e-01, 2.2995e+00, 7.5008e-01, -1.9524e-02, - -5.1374e-01, 2.0433e+00, 9.7380e-01, -7.2325e-02, - 1.3023e-01, -1.0526e+00, 6.4749e-01, 1.0045e-01, - 1.5622e+00, 1.0251e+00, -1.5920e-01, -4.7846e-01, - -1.9719e+00, 1.5229e+00, -1.2428e+00, 5.8854e-01, - 5.2239e-01, 2.1113e-01, -8.8288e-01, -2.3680e-01, - 6.2525e-01, 4.5351e-01, 4.3144e-03, -1.3203e-01, - -2.4933e-01, 4.7427e-01, 8.5376e-01, -6.2237e-01, - 1.2002e+00, -1.8151e+00, 1.9647e+00, 5.6878e-01, - -1.1499e+00, 2.4919e-01, 2.1148e-01, 7.5684e-01, - 7.3061e-01, -1.4165e+00, 1.2722e+00, -8.6737e-01, - 1.7023e+00, -7.8548e-01, -2.4336e-01, 1.3139e+00, - -2.9009e-03, -5.5892e-01, 6.1137e-01, -1.4268e+00, - 2.1152e+00, -2.6966e-01, -1.8218e-01, -2.0308e+00, - -6.8449e-01, -1.5520e+00, -9.5974e-01, -3.0451e-01, - -4.6271e-01, 2.2035e-01, -3.1573e-01, 6.3969e-01, - 4.3839e-01, 1.8468e+00, 4.7894e-01, -2.2052e+00, - -4.5386e-01, 1.1816e+00, 2.5451e-01, -2.5339e-02, - 2.0513e-01, 2.9905e+00, -4.6901e-01, 4.2322e-01, - -1.5722e+00, 6.7107e-01, -1.0722e+00, 9.9840e-01, - 8.9106e-01, 1.4954e+00, 4.1568e-02, 6.9454e-01, - 6.1753e-01, 9.5793e-01, -1.1236e+00, -1.7969e+00, - -2.2964e+00, 1.5782e+00, 2.2913e+00, 6.3989e-01, - 7.2726e-02, 5.2010e-01, -1.1253e+00, 1.3088e+00, - 1.6874e-01, 4.9868e-01, -7.7034e-01, 1.1711e+00, - -2.1818e-01, 2.1663e-01, -1.3206e-01, -3.2760e-01, - 9.9035e-02, -1.2327e+00, -3.0801e-01, -1.8339e-01, - -4.5552e-01, 8.4937e-01, -1.0021e+00, -1.6153e+00, - -2.9234e-01, -1.4844e+00, 2.8656e-02, 7.3140e-01, - -1.0538e+00, -9.1988e-01, -3.1755e-01, 1.8249e-01, - 2.3980e-02, -7.1233e-02, 1.6406e+00, -7.6251e-01, - 1.0104e+00, 7.1827e-01, 1.9132e+00, 1.0323e-01, - 1.2683e+00, -7.4135e-01, -1.7863e+00, 1.0542e+00, - -1.7399e+00, 1.5927e+00, 3.5366e-02, 2.2407e+00, - 1.0217e+00, -7.8771e-01, -7.2670e-01, -2.4228e+00, - 8.0455e-01, 9.6663e-01, -7.2882e-01, 1.5787e+00, - 1.7877e+00, 6.1887e-01, 8.7700e-02, 9.6863e-01, - -1.3768e+00, 5.5425e-01, -1.9948e+00, 1.7206e+00, - 1.0497e+00, 7.9046e-02, 1.3227e+00, -1.1295e-02, - -5.7344e-01, -6.0092e-01, -1.2697e-01, 1.5765e+00, - -1.3238e+00, -8.6421e-01, 1.1792e+00, -1.5885e+00, - 8.2128e-01, -9.3574e-01, 1.6104e+00, 1.0154e+00, - 2.5791e+00, -6.6624e-02, 9.8975e-01, -2.5865e-01, - -7.4949e-01, 1.0262e-01, 7.2524e-01, 7.2704e-01, - -5.0348e-01, -7.5342e-03, -8.9516e-01, 6.3419e-02, - 1.1720e+00, 1.6602e+00, -8.9392e-01, -9.3062e-01, - 4.7539e-01, 6.3553e-01, -2.1609e-01, -1.2500e+00, - 1.1840e+00, 8.8650e-01, 1.9297e-02, 1.0488e+00, - -8.3000e-01, 1.4083e-01, -8.2655e-01, -8.9094e-01, - -4.0533e-01, 4.9665e-01, -1.4626e+00, 1.4442e-01, - -9.4797e-01, 4.7958e-01, 2.4741e-01, -4.7970e-01, - -1.1963e-01, -1.4950e+00, 6.7523e-02, -1.3952e+00, - -1.6777e+00, -2.9169e-01, -4.5961e-01, 8.1465e-01, - -1.5504e+00, 8.7591e-02, 3.4944e-01, -8.2243e-01, - -1.5846e+00, -4.0457e-01, 1.3515e-01, -1.2188e+00, - -6.1643e-01, -1.3648e-01, -1.3261e+00, -4.4080e-01, - 6.6744e-03, 9.4432e-01, 5.1276e-01, 2.1639e-01, - -2.1014e-02, 6.8714e-02, 1.0089e-01, 5.1488e-01, - 1.3674e+00, -5.0616e-01, -1.9159e-01, -6.3132e-01, - 2.5726e-01, -4.9345e-01, 1.0432e+00, 6.4481e-01, - 3.5208e-01, 8.7194e-01, 7.4212e-01, 4.7158e-01, - 7.9093e-01, 1.0921e+00, 6.5971e-01, 1.9012e+00, - 9.0687e-02, -1.2699e+00, -4.0293e-01, -2.9606e-01, - -4.9594e-02, 5.6358e-01, -2.6688e-01, 2.1744e-01, - -9.9318e-01, 2.2010e+00, 1.1679e+00, -1.2500e+00, - -1.2004e+00, -2.2378e+00, -1.5538e+00, 9.3212e-01, - -1.0745e-01, -1.1977e+00, 3.5637e-01, -1.5744e+00, - 1.1292e+00, 7.8477e-01, 1.1032e+00, 7.0366e-01, - 9.6198e-01, 1.5110e+00, -4.2784e-01, -1.1140e+00, - 9.6417e-01, -5.3590e-01, 4.0630e-02, 4.9276e-01, - -1.9879e-01, 9.6080e-01, 3.7759e-01, -8.3082e-01, - 3.0512e-01, 5.2086e-01, -2.3916e-01, -5.2063e-01, - 1.3061e+00, -8.3874e-01, 1.1083e+00, -9.7442e-01, - 1.7895e+00, -1.1733e+00, 1.3481e-01, 1.3544e+00, - -1.9054e+00, -3.8114e-01, 4.9589e-02, -5.2211e-01, - 9.5972e-01, -1.6596e+00, 3.9037e-02, 1.0858e+00, - 9.8406e-01, -5.0575e-01, -8.0033e-01, -6.4918e-01, - 1.5455e-01, 1.1112e+00, -5.7494e-02, 6.6258e-01, - 1.5814e-01, 3.7104e-01, 1.1440e+00, 2.8607e-01, - 1.6218e+00, 5.7906e-01, -1.7273e+00, 9.7640e-01, - -7.3580e-01, -8.5307e-01, 8.0178e-02, 2.0017e-01, - -5.1231e-01, 9.2380e-01, -2.0326e+00, 8.1669e-01, - -1.8486e+00, 3.9404e-01, 5.0657e-02, 8.7639e-01, - -8.7927e-01, -8.7029e-02, 6.4627e-01, -1.3844e-01, - 7.4230e-01, 9.4586e-01, -3.2407e-01, 3.3061e-01, - 2.8879e-01, -3.2935e-01, 5.3343e-01, 6.7137e-01, - 4.6274e-02, 7.8894e-01, -1.0804e+00, 1.8410e+00, - -3.7385e-01, -6.4220e-01, -1.3178e+00, -8.6118e-01, - 7.9919e-01, -6.2513e-01, -2.4281e-01, 9.0632e-01, - 1.0062e+00, -6.0323e-01, 1.0595e+00, 8.3073e-01, - 2.2926e-01, -7.1525e-01, 4.9692e-01, -3.4215e-01, - -1.0983e+00, -1.3816e+00, 1.5023e+00, -8.0958e-01, - -4.1775e-01, 4.1237e-01, 2.2070e-01, -5.4385e-01, - -8.4414e-01, 3.6103e-01, 2.9414e-01, 3.9339e-01, - 2.5215e-01, 9.4499e-01, -4.1586e-01, -2.8105e-01, - 4.4691e-01, -1.0939e+00, -1.1174e+00, -8.8043e-02, - 2.3152e+00, -4.2413e-01, -3.3629e-01, 5.0607e-02, - 1.2786e+00, 1.0187e+00, 4.7689e-01, -1.2020e+00, - 9.2624e-01, -4.7086e-01, -1.9326e-01, -8.9418e-02, - -1.4110e+00, -7.1176e-01, 1.0884e+00, 8.0859e-01, - -3.2440e-01, 1.1067e+00, -1.1770e-01, -1.7388e-01, - 3.8087e-01, 9.2122e-01, 1.0352e+00, 9.3617e-01, - -7.8908e-01, -1.7249e+00, -2.5452e-01, 7.8519e-01, - 7.9503e-01, -7.8532e-01, 7.7537e-03, 2.4897e-01, - 4.1586e-01, -4.7472e-01, 2.7599e-01, 6.8624e-01, - 7.3890e-01, 2.3200e+00, 8.7931e-01, 9.6184e-01, - -8.9754e-01, 4.2017e-01, -3.3134e-01, 1.8952e-01, - -4.7846e-01, -1.2996e+00, -4.9127e-01, 1.9463e-01, - 1.0143e-03, 1.5636e+00, -9.9079e-01, 1.7624e+00, - -3.6375e-02, -7.9053e-01, -4.4883e-01, 1.2157e+00, - -2.6231e-02, -3.4524e-01, -2.3478e-01, -4.1122e-01, - 1.6644e+00, 7.9206e-01, -1.4457e+00, -2.3074e+00, - -5.5168e-01, 6.9523e-01, -7.2315e-02, -2.8139e-01, - 4.6702e-01, 2.4078e+00, 1.0358e+00, 2.3683e-01, - 2.5336e-01, -3.1101e-01, 4.9968e-01, -7.4670e-01, - 1.8358e-01, -9.6935e-01, 7.9894e-01, -1.6002e+00, - 1.3370e+00, 1.4723e+00, -2.3582e-01, 1.0230e+00, - -7.3251e-01, -1.2369e+00, -1.3749e-01, 8.2332e-01, - -2.8971e-01, 8.0377e-01, -9.5670e-01, 8.9885e-01, - 9.9951e-01, 4.5254e-01, -1.0177e+00, 8.2800e-01, - -5.6214e-01, -8.8724e-01, 1.1048e+00, 9.8448e-01, - 5.5925e-01, 6.7611e-01, -5.2145e-01, 1.8351e-01, - -2.3523e-01, 4.3155e-01, 1.0193e+00, 2.3610e-01, - -4.6260e-01, 3.3762e-01, 1.1612e-01, -9.8299e-02, - 2.1743e+00, 2.6099e-01, -4.8127e-01, 3.8411e-01, - 1.2142e+00, 5.2875e-01, 1.3551e+00, -2.0640e-01, - 8.3411e-01, 4.6170e-01, 1.0470e+00, 5.7198e-01, - 1.2615e+00, 1.0583e+00, 8.1686e-01, -9.5442e-01, - 7.4199e-01, -1.0609e+00, -8.2496e-01, 3.7695e-01, - 7.5155e-02, -1.0779e-01, 2.8829e-02, 1.2611e+00, - 6.3854e-01, 2.5892e-01, 4.0395e-01, 1.2237e+00, - -8.1136e-01, -1.3650e-02, -5.2556e-01, 1.6708e+00, - -9.9370e-01, 1.5559e+00, 1.1607e+00, 7.6918e-01, - -1.0928e+00, -1.0364e+00, 4.5910e-01, 1.4694e-01, - -2.1708e-02, 1.5726e+00, -8.8808e-01, 6.9904e-01, - -1.0295e+00, 9.2627e-02, 3.5812e-01, 4.5086e-01, - 1.1136e+00, -2.1554e-01, 3.1405e-01, 4.4729e-02, - -1.6017e+00, -1.0030e+00, -1.7926e+00, -1.3471e+00, - 8.3406e-01, -2.5401e-01, 6.0159e-01, -1.1012e+00, - -9.7848e-02, 1.6516e-01, -1.9635e+00, 1.8386e+00, - -3.7695e-01, 1.3790e+00, -1.0959e+00, -3.3286e-01, - 4.5961e-02, 2.9561e-01, -1.7780e+00, 8.1762e-01, - -1.6859e+00, -2.1618e-01, 4.8435e-01, 4.9063e-01, - -2.8747e-01, -3.4936e-01, 4.6109e-01, 6.9496e-01, - 1.1330e-01, 7.5762e-02, 2.9081e-02, 2.8333e-01, - -1.3262e+00, -9.4245e-01, 6.1664e-01, -1.1768e+00, - 1.4389e+00, -3.6166e-01, -3.2900e-01, 4.5601e-02, - 8.0823e-01, 1.3165e+00, 5.1738e-01, -6.1047e-01, - 1.0479e+00, -8.6655e-01, 3.2544e-02, 1.4840e+00, - 6.5582e-01, -9.9131e-01, 7.8099e-01, -7.3803e-02, - -1.1795e+00, 3.9588e-01, 7.9163e-02, -1.4995e+00, - 6.8484e-01, 3.5421e-01, 2.1919e+00, 5.5206e-01, - -5.2023e-02, -7.4985e-01, -1.2236e+00, 2.5959e-01, - 7.7205e-01, 2.2405e-01, -5.3099e-02, 7.0039e-01, - 2.4703e-01, 5.1276e-01, -7.9451e-01, 1.9195e+00, - -6.4991e-01, 7.7349e-01, 2.1384e+00, -8.7642e-01, - -1.6751e+00, -5.1041e-01, -7.0769e-01, 1.4207e-01, - 1.1558e+00, -1.5498e+00, 4.1444e-01, 1.9956e-02]), - size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.3258, 0.6879, 0.6589, ..., 0.7533, 0.8477, 0.6133]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 1000 -Density: 1e-05 -Time: 10.55691385269165 seconds - diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_10000_2e-05.json b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_10000_2e-05.json deleted file mode 100644 index f0a893e..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_10000_2e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 615380, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 2000, "MATRIX_DENSITY": 2e-05, "TIME_S": 10.506898164749146, "TIME_S_1KI": 0.017073837571499148, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 690.4202508544922, "W": 65.28, "J_1KI": 1.1219413221984662, "W_1KI": 0.10608079560596705, "W_D": 30.163249999999998, "J_D": 319.01529766523834, "W_D_1KI": 0.04901564886736651, "J_D_1KI": 7.965102679217151e-05} diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_10000_2e-05.output b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_10000_2e-05.output deleted file mode 100644 index 2a20241..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_10000_2e-05.output +++ /dev/null @@ -1,17 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 2000, 2000, 2000]), - col_indices=tensor([ 456, 5195, 2467, ..., 4369, 8138, 884]), - values=tensor([ 1.2223, 2.2517, 0.2703, ..., 1.7456, 0.6802, - -0.3801]), size=(10000, 10000), nnz=2000, - layout=torch.sparse_csr) -tensor([0.6497, 0.2929, 0.7805, ..., 0.4581, 0.7998, 0.8461]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 2000 -Density: 2e-05 -Time: 10.506898164749146 seconds - diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_10000_5e-05.json b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_10000_5e-05.json deleted file mode 100644 index a4621ab..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_10000_5e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 480724, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.461936235427856, "TIME_S_1KI": 0.0217628748209531, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 698.7221143078804, "W": 65.27, "J_1KI": 1.4534787410403482, "W_1KI": 0.13577437365307327, "W_D": 30.49349999999999, "J_D": 326.43607771790016, "W_D_1KI": 0.06343244772468191, "J_D_1KI": 0.00013195190530258924} diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_10000_5e-05.output b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_10000_5e-05.output deleted file mode 100644 index 3e790ff..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_10000_5e-05.output +++ /dev/null @@ -1,17 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 5000, 5000, 5000]), - col_indices=tensor([3873, 7060, 5337, ..., 1746, 4350, 923]), - values=tensor([-2.1665, -0.1151, -1.2526, ..., -1.4332, 1.7008, - 1.2042]), size=(10000, 10000), nnz=5000, - layout=torch.sparse_csr) -tensor([0.7096, 0.7801, 0.1155, ..., 0.1339, 0.9153, 0.1921]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 5000 -Density: 5e-05 -Time: 10.461936235427856 seconds - diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_10000_8e-05.json b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_10000_8e-05.json deleted file mode 100644 index ec01f5f..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_10000_8e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 400654, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 8000, "MATRIX_DENSITY": 8e-05, "TIME_S": 10.416202068328857, "TIME_S_1KI": 0.025997998443367237, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 692.797785410881, "W": 65.53, "J_1KI": 1.7291672750325242, "W_1KI": 0.16355758335122075, "W_D": 29.424750000000003, "J_D": 311.08502420675757, "W_D_1KI": 0.07344179766082456, "J_D_1KI": 0.00018330479081907222} diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_10000_8e-05.output b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_10000_8e-05.output deleted file mode 100644 index 5ad416c..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_10000_8e-05.output +++ /dev/null @@ -1,17 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 8000, 8000, 8000]), - col_indices=tensor([6974, 9206, 9380, ..., 4650, 3402, 4596]), - values=tensor([-0.5891, -0.1574, -1.5644, ..., 1.8363, -0.1227, - 1.9639]), size=(10000, 10000), nnz=8000, - layout=torch.sparse_csr) -tensor([0.4384, 0.6609, 0.9442, ..., 0.0845, 0.4427, 0.0852]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 8000 -Density: 8e-05 -Time: 10.416202068328857 seconds - diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_150000_0.0001.json b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_150000_0.0001.json deleted file mode 100644 index 3f952ea..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_150000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 3572, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 2249898, "MATRIX_DENSITY": 9.999546666666666e-05, "TIME_S": 10.584399223327637, "TIME_S_1KI": 2.9631576773033697, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 725.7578255653382, "W": 69.42, "J_1KI": 203.17968240910923, "W_1KI": 19.434490481522957, "W_D": 34.61050000000001, "J_D": 361.83868080854427, "W_D_1KI": 9.689389697648378, "J_D_1KI": 2.712595100125526} diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_150000_0.0001.output b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_150000_0.0001.output deleted file mode 100644 index edb034a..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_150000_0.0001.output +++ /dev/null @@ -1,19 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 11, 25, ..., 2249863, - 2249879, 2249898]), - col_indices=tensor([ 2589, 10993, 22053, ..., 117927, 120962, - 137342]), - values=tensor([ 1.5727, 1.2288, 0.3898, ..., -0.3891, 1.5743, - 0.8407]), size=(150000, 150000), nnz=2249898, - layout=torch.sparse_csr) -tensor([0.1316, 0.7619, 0.4224, ..., 0.7069, 0.2314, 0.7800]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 2249898 -Density: 9.999546666666666e-05 -Time: 10.584399223327637 seconds - diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_150000_1e-05.json b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_150000_1e-05.json deleted file mode 100644 index 10e37a6..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_150000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 9191, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 224995, "MATRIX_DENSITY": 9.999777777777778e-06, "TIME_S": 10.396477222442627, "TIME_S_1KI": 1.1311584400438066, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 669.4190217018128, "W": 64.26, "J_1KI": 72.8341879775664, "W_1KI": 6.991622239146992, "W_D": 29.523500000000006, "J_D": 307.55668358564384, "W_D_1KI": 3.212218474594713, "J_D_1KI": 0.34949608036064767} diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_150000_1e-05.output b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_150000_1e-05.output deleted file mode 100644 index 387a99f..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_150000_1e-05.output +++ /dev/null @@ -1,19 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 224990, 224993, - 224995]), - col_indices=tensor([ 52836, 29680, 11077, ..., 89106, 36976, - 133647]), - values=tensor([-1.3706, -0.9801, -0.1720, ..., 2.3561, 0.0103, - 0.2901]), size=(150000, 150000), nnz=224995, - layout=torch.sparse_csr) -tensor([0.3110, 0.9801, 0.4482, ..., 0.1481, 0.2196, 0.8258]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 224995 -Density: 9.999777777777778e-06 -Time: 10.396477222442627 seconds - diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_150000_2e-05.json b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_150000_2e-05.json deleted file mode 100644 index 5adee4e..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_150000_2e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 6914, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 449989, "MATRIX_DENSITY": 1.9999511111111113e-05, "TIME_S": 10.456040382385254, "TIME_S_1KI": 1.5122997371109712, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 677.801438331604, "W": 64.97, "J_1KI": 98.03318460104195, "W_1KI": 9.396875903962973, "W_D": 30.232750000000003, "J_D": 315.40405471324925, "W_D_1KI": 4.372685854787389, "J_D_1KI": 0.6324393773195529} diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_150000_2e-05.output b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_150000_2e-05.output deleted file mode 100644 index d151f80..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_150000_2e-05.output +++ /dev/null @@ -1,19 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 9, ..., 449987, 449987, - 449989]), - col_indices=tensor([133485, 140828, 2305, ..., 119888, 4793, - 24733]), - values=tensor([ 0.0145, -0.4665, 2.8914, ..., -0.2910, -0.8625, - 0.6344]), size=(150000, 150000), nnz=449989, - layout=torch.sparse_csr) -tensor([0.5460, 0.1498, 0.1000, ..., 0.7732, 0.7955, 0.4525]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 449989 -Density: 1.9999511111111113e-05 -Time: 10.456040382385254 seconds - diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_150000_5e-05.json b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_150000_5e-05.json deleted file mode 100644 index 167738e..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_150000_5e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 5066, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 1124974, "MATRIX_DENSITY": 4.999884444444444e-05, "TIME_S": 10.351625680923462, "TIME_S_1KI": 2.0433528781925507, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 681.9561458969116, "W": 65.96, "J_1KI": 134.614320153358, "W_1KI": 13.020134228187917, "W_D": 30.700249999999997, "J_D": 317.40788611388206, "W_D_1KI": 6.060057244374259, "J_D_1KI": 1.1962213273537818} diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_150000_5e-05.output b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_150000_5e-05.output deleted file mode 100644 index 94115be..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_150000_5e-05.output +++ /dev/null @@ -1,19 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 12, 14, ..., 1124964, - 1124971, 1124974]), - col_indices=tensor([ 3366, 3847, 5978, ..., 23715, 48535, - 121237]), - values=tensor([ 0.6516, 0.9593, -0.5601, ..., -1.5166, 0.0467, - 0.9951]), size=(150000, 150000), nnz=1124974, - layout=torch.sparse_csr) -tensor([0.9506, 0.1227, 0.0741, ..., 0.0499, 0.3200, 0.9652]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 1124974 -Density: 4.999884444444444e-05 -Time: 10.351625680923462 seconds - diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_150000_8e-05.json b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_150000_8e-05.json deleted file mode 100644 index d48e29e..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_150000_8e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 4281, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 1799936, "MATRIX_DENSITY": 7.999715555555555e-05, "TIME_S": 10.443866491317749, "TIME_S_1KI": 2.439585725605641, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 704.0738398504258, "W": 67.89, "J_1KI": 164.46480725307774, "W_1KI": 15.85844428871759, "W_D": 33.215250000000005, "J_D": 344.4688261760474, "W_D_1KI": 7.7587596355991595, "J_D_1KI": 1.8123708562483438} diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_150000_8e-05.output b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_150000_8e-05.output deleted file mode 100644 index 7e290dc..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_150000_8e-05.output +++ /dev/null @@ -1,19 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 14, 30, ..., 1799918, - 1799925, 1799936]), - col_indices=tensor([ 12089, 21062, 25587, ..., 127797, 130427, - 147650]), - values=tensor([-1.6655, 0.7203, 0.8555, ..., 1.2764, 0.0934, - -0.6315]), size=(150000, 150000), nnz=1799936, - layout=torch.sparse_csr) -tensor([0.4848, 0.2793, 0.2056, ..., 0.0127, 0.3003, 0.4574]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 1799936 -Density: 7.999715555555555e-05 -Time: 10.443866491317749 seconds - diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_200000_0.0001.json b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_200000_0.0001.json deleted file mode 100644 index e9d9f66..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_200000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 2095, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 3999780, "MATRIX_DENSITY": 9.99945e-05, "TIME_S": 10.490428686141968, "TIME_S_1KI": 5.007364527991393, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 791.5454059553147, "W": 76.53, "J_1KI": 377.8259694297444, "W_1KI": 36.52983293556086, "W_D": 41.87875, "J_D": 433.1495122128725, "W_D_1KI": 19.98985680190931, "J_D_1KI": 9.54169775747461} diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_200000_0.0001.output b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_200000_0.0001.output deleted file mode 100644 index a47ad40..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_200000_0.0001.output +++ /dev/null @@ -1,19 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 26, 45, ..., 3999736, - 3999762, 3999780]), - col_indices=tensor([ 338, 10465, 27342, ..., 176199, 185309, - 186476]), - values=tensor([-1.6089, -0.0542, -1.2665, ..., 0.0676, 0.4559, - -0.2149]), size=(200000, 200000), nnz=3999780, - layout=torch.sparse_csr) -tensor([0.3457, 0.8304, 0.9183, ..., 0.2549, 0.5990, 0.1911]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 3999780 -Density: 9.99945e-05 -Time: 10.490428686141968 seconds - diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_200000_1e-05.json b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_200000_1e-05.json deleted file mode 100644 index 0c4b342..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_200000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 6316, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 399997, "MATRIX_DENSITY": 9.999925e-06, "TIME_S": 10.397881507873535, "TIME_S_1KI": 1.646276362867881, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 673.127347688675, "W": 64.76, "J_1KI": 106.57494421923289, "W_1KI": 10.253324889170361, "W_D": 29.513250000000006, "J_D": 306.76614722317464, "W_D_1KI": 4.672775490816973, "J_D_1KI": 0.739831458330743} diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_200000_1e-05.output b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_200000_1e-05.output deleted file mode 100644 index cf32939..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_200000_1e-05.output +++ /dev/null @@ -1,19 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 4, ..., 399993, 399996, - 399997]), - col_indices=tensor([ 59588, 38991, 116750, ..., 162329, 175526, - 46215]), - values=tensor([-0.9965, 0.1698, 2.4196, ..., -0.0719, -0.4474, - -1.5447]), size=(200000, 200000), nnz=399997, - layout=torch.sparse_csr) -tensor([0.3974, 0.5312, 0.0295, ..., 0.7579, 0.0977, 0.4617]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 399997 -Density: 9.999925e-06 -Time: 10.397881507873535 seconds - diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_200000_2e-05.json b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_200000_2e-05.json deleted file mode 100644 index 70c071e..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_200000_2e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 4692, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 799995, "MATRIX_DENSITY": 1.9999875e-05, "TIME_S": 10.473196029663086, "TIME_S_1KI": 2.2321389662538547, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 685.49769947052, "W": 65.68, "J_1KI": 146.09925393659847, "W_1KI": 13.99829497016198, "W_D": 31.06625000000001, "J_D": 324.23634144604216, "W_D_1KI": 6.621110400682014, "J_D_1KI": 1.411148849250216} diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_200000_2e-05.output b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_200000_2e-05.output deleted file mode 100644 index 2c43409..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_200000_2e-05.output +++ /dev/null @@ -1,19 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 5, ..., 799980, 799988, - 799995]), - col_indices=tensor([ 84851, 60881, 116062, ..., 78517, 126138, - 193669]), - values=tensor([ 0.6880, 0.8714, -1.0635, ..., -0.7129, 0.3128, - -1.2824]), size=(200000, 200000), nnz=799995, - layout=torch.sparse_csr) -tensor([0.5360, 0.2832, 0.4672, ..., 0.4430, 0.9728, 0.0899]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 799995 -Density: 1.9999875e-05 -Time: 10.473196029663086 seconds - diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_200000_5e-05.json b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_200000_5e-05.json deleted file mode 100644 index d301332..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_200000_5e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 3277, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 1999956, "MATRIX_DENSITY": 4.99989e-05, "TIME_S": 10.51095986366272, "TIME_S_1KI": 3.2074946181454744, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 710.4174321079254, "W": 67.94, "J_1KI": 216.78896310891832, "W_1KI": 20.732377174244736, "W_D": 33.265, "J_D": 347.83685426950456, "W_D_1KI": 10.151052792187977, "J_D_1KI": 3.097666399813237} diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_200000_5e-05.output b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_200000_5e-05.output deleted file mode 100644 index 3bbb252..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_200000_5e-05.output +++ /dev/null @@ -1,19 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 14, 29, ..., 1999937, - 1999947, 1999956]), - col_indices=tensor([ 13156, 36605, 37372, ..., 140377, 155111, - 183705]), - values=tensor([-1.3884, -1.0689, 0.0024, ..., 0.0580, 0.3079, - -0.5076]), size=(200000, 200000), nnz=1999956, - layout=torch.sparse_csr) -tensor([0.7999, 0.7597, 0.9270, ..., 0.0480, 0.9227, 0.6744]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 1999956 -Density: 4.99989e-05 -Time: 10.51095986366272 seconds - diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_200000_8e-05.json b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_200000_8e-05.json deleted file mode 100644 index 5868e02..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_200000_8e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 2489, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 3199869, "MATRIX_DENSITY": 7.9996725e-05, "TIME_S": 10.859083414077759, "TIME_S_1KI": 4.362829816825134, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 789.1867278862, "W": 75.08, "J_1KI": 317.06979826685415, "W_1KI": 30.164724789071915, "W_D": 40.222, "J_D": 422.78461066913604, "W_D_1KI": 16.159903575733228, "J_D_1KI": 6.492528555939424} diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_200000_8e-05.output b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_200000_8e-05.output deleted file mode 100644 index 4f9c5e1..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_200000_8e-05.output +++ /dev/null @@ -1,19 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 17, 33, ..., 3199838, - 3199851, 3199869]), - col_indices=tensor([ 14841, 21942, 32682, ..., 160149, 173929, - 179958]), - values=tensor([-0.3425, 0.4721, 2.0501, ..., 0.4646, -1.9362, - -2.3289]), size=(200000, 200000), nnz=3199869, - layout=torch.sparse_csr) -tensor([0.5919, 0.7537, 0.7084, ..., 0.8331, 0.4028, 0.3348]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 3199869 -Density: 7.9996725e-05 -Time: 10.859083414077759 seconds - diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_20000_0.0001.json b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_20000_0.0001.json deleted file mode 100644 index e415231..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_20000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 74789, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 39999, "MATRIX_DENSITY": 9.99975e-05, "TIME_S": 10.514662027359009, "TIME_S_1KI": 0.14059102310980237, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 668.7646343803406, "W": 64.42, "J_1KI": 8.942018670932097, "W_1KI": 0.8613566166147428, "W_D": 29.704250000000002, "J_D": 308.369324600935, "W_D_1KI": 0.3971740496597093, "J_D_1KI": 0.0053105944678991475} diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_20000_0.0001.output b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_20000_0.0001.output deleted file mode 100644 index 042a5de..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_20000_0.0001.output +++ /dev/null @@ -1,17 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 2, ..., 39996, 39997, 39999]), - col_indices=tensor([ 6250, 6566, 5693, ..., 18035, 11217, 16204]), - values=tensor([-0.0798, -0.5556, -0.3731, ..., 0.0478, -0.6990, - 2.0741]), size=(20000, 20000), nnz=39999, - layout=torch.sparse_csr) -tensor([0.1075, 0.9169, 0.7876, ..., 0.5612, 0.8385, 0.3274]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 39999 -Density: 9.99975e-05 -Time: 10.514662027359009 seconds - diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_20000_1e-05.json b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_20000_1e-05.json deleted file mode 100644 index 96aba50..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_20000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 345732, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 4000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.12452244758606, "TIME_S_1KI": 0.029284308214414804, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 696.1312260389328, "W": 65.7, "J_1KI": 2.0134995488960605, "W_1KI": 0.19003158515844645, "W_D": 30.93350000000001, "J_D": 327.7591366921664, "W_D_1KI": 0.08947248157532427, "J_D_1KI": 0.0002587914383838472} diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_20000_1e-05.output b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_20000_1e-05.output deleted file mode 100644 index 05e611b..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_20000_1e-05.output +++ /dev/null @@ -1,17 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 3999, 3999, 4000]), - col_indices=tensor([ 92, 9668, 11990, ..., 4090, 8, 301]), - values=tensor([-0.1730, 1.1446, 0.3740, ..., 0.5940, -1.4466, - -0.6480]), size=(20000, 20000), nnz=4000, - layout=torch.sparse_csr) -tensor([0.9925, 0.0746, 0.5129, ..., 0.8252, 0.1954, 0.0976]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 4000 -Density: 1e-05 -Time: 10.12452244758606 seconds - diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_20000_2e-05.json b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_20000_2e-05.json deleted file mode 100644 index 695dfba..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_20000_2e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 291432, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 8000, "MATRIX_DENSITY": 2e-05, "TIME_S": 10.491690874099731, "TIME_S_1KI": 0.036000476523167436, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 779.9160013961791, "W": 65.16, "J_1KI": 2.6761508736040622, "W_1KI": 0.22358560487523677, "W_D": 29.93874999999999, "J_D": 358.3442324554919, "W_D_1KI": 0.10272979631612174, "J_D_1KI": 0.0003525000559860336} diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_20000_2e-05.output b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_20000_2e-05.output deleted file mode 100644 index 39b85f2..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_20000_2e-05.output +++ /dev/null @@ -1,17 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 8000, 8000, 8000]), - col_indices=tensor([11397, 19708, 1023, ..., 664, 15608, 13025]), - values=tensor([-0.6025, 0.0031, -0.8026, ..., 1.3202, 0.8655, - -0.7453]), size=(20000, 20000), nnz=8000, - layout=torch.sparse_csr) -tensor([0.2073, 0.4457, 0.4239, ..., 0.4766, 0.0584, 0.3044]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 8000 -Density: 2e-05 -Time: 10.491690874099731 seconds - diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_20000_5e-05.json b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_20000_5e-05.json deleted file mode 100644 index c805dbb..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_20000_5e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 89951, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 20000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.222992897033691, "TIME_S_1KI": 0.11365068645188704, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 645.7357754731179, "W": 64.43, "J_1KI": 7.178750380464007, "W_1KI": 0.7162788629364878, "W_D": 28.18925000000001, "J_D": 282.52067683929215, "W_D_1KI": 0.31338450934397627, "J_D_1KI": 0.003483946919366947} diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_20000_5e-05.output b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_20000_5e-05.output deleted file mode 100644 index e6aa362..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_20000_5e-05.output +++ /dev/null @@ -1,16 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 19998, 19999, 20000]), - col_indices=tensor([ 5649, 8244, 8312, ..., 12695, 6483, 5873]), - values=tensor([0.1482, 1.1268, 1.6953, ..., 0.0526, 0.1507, 0.2372]), - size=(20000, 20000), nnz=20000, layout=torch.sparse_csr) -tensor([0.2916, 0.1894, 0.7486, ..., 0.6986, 0.2995, 0.3962]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 20000 -Density: 5e-05 -Time: 10.222992897033691 seconds - diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_20000_8e-05.json b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_20000_8e-05.json deleted file mode 100644 index e2ba9a5..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_20000_8e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 78738, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 31998, "MATRIX_DENSITY": 7.9995e-05, "TIME_S": 10.495476722717285, "TIME_S_1KI": 0.13329620669457296, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 658.7904186344147, "W": 64.44, "J_1KI": 8.366867568828452, "W_1KI": 0.8184104244456298, "W_D": 29.73324999999999, "J_D": 303.97238073962916, "W_D_1KI": 0.37762262185983886, "J_D_1KI": 0.0047959387063405065} diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_20000_8e-05.output b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_20000_8e-05.output deleted file mode 100644 index fedf9e9..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_20000_8e-05.output +++ /dev/null @@ -1,17 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 31997, 31998, 31998]), - col_indices=tensor([ 1213, 8141, 9649, ..., 9291, 13235, 12511]), - values=tensor([ 0.0452, -1.1084, 0.1051, ..., 0.6984, -0.2088, - 1.5476]), size=(20000, 20000), nnz=31998, - layout=torch.sparse_csr) -tensor([0.0679, 0.2772, 0.0074, ..., 0.8995, 0.6844, 0.7017]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 31998 -Density: 7.9995e-05 -Time: 10.495476722717285 seconds - diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_50000_0.0001.json b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_50000_0.0001.json deleted file mode 100644 index 286f338..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_50000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 19822, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 249990, "MATRIX_DENSITY": 9.9996e-05, "TIME_S": 10.2759370803833, "TIME_S_1KI": 0.5184107093322219, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 674.9763563752174, "W": 65.47, "J_1KI": 34.05187954672674, "W_1KI": 3.3028957723741295, "W_D": 30.65025, "J_D": 315.9950216433406, "W_D_1KI": 1.5462743416406013, "J_D_1KI": 0.07800798817680363} diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_50000_0.0001.output b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_50000_0.0001.output deleted file mode 100644 index ef9bff5..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_50000_0.0001.output +++ /dev/null @@ -1,18 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 8, ..., 249982, 249984, - 249990]), - col_indices=tensor([ 2676, 5016, 6104, ..., 45237, 49066, 49759]), - values=tensor([-0.1669, -0.1431, 0.5576, ..., 0.0646, -1.0721, - 0.0970]), size=(50000, 50000), nnz=249990, - layout=torch.sparse_csr) -tensor([0.6505, 0.3061, 0.3962, ..., 0.1525, 0.2115, 0.1747]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 249990 -Density: 9.9996e-05 -Time: 10.2759370803833 seconds - diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_50000_1e-05.json b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_50000_1e-05.json deleted file mode 100644 index 600f332..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_50000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 43470, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.26034140586853, "TIME_S_1KI": 0.23603269854770026, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 663.9404110598564, "W": 64.57, "J_1KI": 15.273531425347512, "W_1KI": 1.4853922245226592, "W_D": 29.945499999999996, "J_D": 307.91431902420516, "W_D_1KI": 0.6888773867034735, "J_D_1KI": 0.015847190860443373} diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_50000_1e-05.output b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_50000_1e-05.output deleted file mode 100644 index dc7735a..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_50000_1e-05.output +++ /dev/null @@ -1,17 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 25000, 25000, 25000]), - col_indices=tensor([14004, 26907, 25557, ..., 5331, 36718, 10718]), - values=tensor([-0.6440, 0.1930, -1.8608, ..., -0.8884, -1.2043, - -0.7536]), size=(50000, 50000), nnz=25000, - layout=torch.sparse_csr) -tensor([0.9911, 0.7354, 0.4549, ..., 0.9403, 0.3282, 0.4853]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 25000 -Density: 1e-05 -Time: 10.26034140586853 seconds - diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_50000_2e-05.json b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_50000_2e-05.json deleted file mode 100644 index 168f60c..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_50000_2e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 31901, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 49999, "MATRIX_DENSITY": 1.99996e-05, "TIME_S": 10.44495439529419, "TIME_S_1KI": 0.32741777358998747, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 658.0571809387208, "W": 64.64, "J_1KI": 20.62810510450208, "W_1KI": 2.0262687690041066, "W_D": 29.81450000000001, "J_D": 303.5217484699489, "W_D_1KI": 0.9345945268173415, "J_D_1KI": 0.02929671567716816} diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_50000_2e-05.output b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_50000_2e-05.output deleted file mode 100644 index f40b925..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_50000_2e-05.output +++ /dev/null @@ -1,17 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 49997, 49999, 49999]), - col_indices=tensor([ 339, 37811, 42112, ..., 41947, 23231, 41819]), - values=tensor([-0.5023, 0.6449, 1.9473, ..., 0.2212, 1.0990, - 0.0764]), size=(50000, 50000), nnz=49999, - layout=torch.sparse_csr) -tensor([0.0140, 0.8746, 0.9519, ..., 0.1795, 0.2485, 0.3704]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 49999 -Density: 1.99996e-05 -Time: 10.44495439529419 seconds - diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_50000_5e-05.json b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_50000_5e-05.json deleted file mode 100644 index ed92785..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_50000_5e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 23753, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 124994, "MATRIX_DENSITY": 4.99976e-05, "TIME_S": 10.234593868255615, "TIME_S_1KI": 0.4308758417149671, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 657.1890437602996, "W": 64.1, "J_1KI": 27.667622774399007, "W_1KI": 2.69860649181156, "W_D": 29.162999999999997, "J_D": 298.9953835129738, "W_D_1KI": 1.227760703911085, "J_D_1KI": 0.0516886584394007} diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_50000_5e-05.output b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_50000_5e-05.output deleted file mode 100644 index df015fb..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_50000_5e-05.output +++ /dev/null @@ -1,18 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 3, ..., 124992, 124993, - 124994]), - col_indices=tensor([24726, 35130, 40481, ..., 8574, 18917, 36412]), - values=tensor([ 2.0361, -1.8129, 0.3538, ..., 0.0804, -0.0831, - -0.1861]), size=(50000, 50000), nnz=124994, - layout=torch.sparse_csr) -tensor([0.0699, 0.9305, 0.2070, ..., 0.3566, 0.4277, 0.0306]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 124994 -Density: 4.99976e-05 -Time: 10.234593868255615 seconds - diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_50000_8e-05.json b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_50000_8e-05.json deleted file mode 100644 index 941283f..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_50000_8e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 19635, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 199993, "MATRIX_DENSITY": 7.99972e-05, "TIME_S": 10.2986741065979, "TIME_S_1KI": 0.5245059387113777, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 666.2074404859543, "W": 64.86, "J_1KI": 33.92958698680694, "W_1KI": 3.303284950343774, "W_D": 29.582249999999995, "J_D": 303.8531461041569, "W_D_1KI": 1.5066080977845682, "J_D_1KI": 0.07673074091085144} diff --git a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_50000_8e-05.output b/pytorch/output_1core_before_test/epyc_7313p_10_10_10_50000_8e-05.output deleted file mode 100644 index 79e93df..0000000 --- a/pytorch/output_1core_before_test/epyc_7313p_10_10_10_50000_8e-05.output +++ /dev/null @@ -1,18 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 4, ..., 199986, 199990, - 199993]), - col_indices=tensor([17669, 23752, 22742, ..., 10907, 19387, 22472]), - values=tensor([-0.7486, -1.6315, -0.5133, ..., -0.9165, -1.3647, - -1.5533]), size=(50000, 50000), nnz=199993, - layout=torch.sparse_csr) -tensor([0.2912, 0.0652, 0.5829, ..., 0.7422, 0.5995, 0.9407]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 199993 -Density: 7.99972e-05 -Time: 10.2986741065979 seconds - diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_100000_0.0001.json b/pytorch/output_1core_before_test/xeon_4216_10_10_10_100000_0.0001.json deleted file mode 100644 index 8f8d3bb..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_100000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 3863, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 999937, "MATRIX_DENSITY": 9.99937e-05, "TIME_S": 10.451575994491577, "TIME_S_1KI": 2.7055594083591967, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 552.2880682086945, "W": 53.02000000000001, "J_1KI": 142.96869485081402, "W_1KI": 13.725084131504014, "W_D": 36.101500000000016, "J_D": 376.05484146428125, "W_D_1KI": 9.345456898783333, "J_D_1KI": 2.4192225987013547} diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_100000_0.0001.output b/pytorch/output_1core_before_test/xeon_4216_10_10_10_100000_0.0001.output deleted file mode 100644 index d46c4d9..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_100000_0.0001.output +++ /dev/null @@ -1,18 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 8, 16, ..., 999915, 999924, - 999937]), - col_indices=tensor([ 5854, 25638, 48835, ..., 56929, 66626, 88254]), - values=tensor([-1.6403, 0.2547, -0.8936, ..., 1.7770, -1.3275, - -0.8781]), size=(100000, 100000), nnz=999937, - layout=torch.sparse_csr) -tensor([0.5026, 0.9242, 0.4948, ..., 0.7978, 0.0587, 0.2827]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 999937 -Density: 9.99937e-05 -Time: 10.451575994491577 seconds - diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_100000_1e-05.json b/pytorch/output_1core_before_test/xeon_4216_10_10_10_100000_1e-05.json deleted file mode 100644 index 76fa2b0..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_100000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 10396, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 99999, "MATRIX_DENSITY": 9.9999e-06, "TIME_S": 10.391824960708618, "TIME_S_1KI": 0.9995983994525411, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 551.7713853979111, "W": 52.940000000000005, "J_1KI": 53.075354501530505, "W_1KI": 5.09234320892651, "W_D": 35.718250000000005, "J_D": 372.2763182185293, "W_D_1KI": 3.435768564832628, "J_D_1KI": 0.33048947333903694} diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_100000_1e-05.output b/pytorch/output_1core_before_test/xeon_4216_10_10_10_100000_1e-05.output deleted file mode 100644 index e000b8a..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_100000_1e-05.output +++ /dev/null @@ -1,17 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 2, ..., 99993, 99997, 99999]), - col_indices=tensor([23234, 45047, 34421, ..., 94375, 10081, 62145]), - values=tensor([-1.0023, -0.2523, -0.7467, ..., 0.2274, 2.3351, - -0.5035]), size=(100000, 100000), nnz=99999, - layout=torch.sparse_csr) -tensor([0.8267, 0.0639, 0.1197, ..., 0.1927, 0.3049, 0.8902]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 99999 -Density: 9.9999e-06 -Time: 10.391824960708618 seconds - diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_100000_2e-05.json b/pytorch/output_1core_before_test/xeon_4216_10_10_10_100000_2e-05.json deleted file mode 100644 index 6d4780b..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_100000_2e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 8433, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 199998, "MATRIX_DENSITY": 1.99998e-05, "TIME_S": 10.325804948806763, "TIME_S_1KI": 1.2244521461883981, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 550.1599510192872, "W": 53.20000000000001, "J_1KI": 65.2389364424626, "W_1KI": 6.308549745049213, "W_D": 36.06625000000001, "J_D": 372.9738032603265, "W_D_1KI": 4.276799478240248, "J_D_1KI": 0.5071504183849458} diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_100000_2e-05.output b/pytorch/output_1core_before_test/xeon_4216_10_10_10_100000_2e-05.output deleted file mode 100644 index 8c22a7d..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_100000_2e-05.output +++ /dev/null @@ -1,18 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 10, ..., 199997, 199998, - 199998]), - col_indices=tensor([11483, 49150, 54634, ..., 44217, 61397, 15581]), - values=tensor([-0.7612, 0.2917, 1.6644, ..., -1.7074, -0.6969, - 0.1237]), size=(100000, 100000), nnz=199998, - layout=torch.sparse_csr) -tensor([0.0579, 0.1923, 0.4697, ..., 0.4617, 0.1350, 0.8769]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 199998 -Density: 1.99998e-05 -Time: 10.325804948806763 seconds - diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_100000_5e-05.json b/pytorch/output_1core_before_test/xeon_4216_10_10_10_100000_5e-05.json deleted file mode 100644 index 694af0d..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_100000_5e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 5929, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 499993, "MATRIX_DENSITY": 4.99993e-05, "TIME_S": 10.397029399871826, "TIME_S_1KI": 1.7535890369154707, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 555.3987412333488, "W": 53.55, "J_1KI": 93.67494370608009, "W_1KI": 9.031877213695395, "W_D": 36.336, "J_D": 376.86215987777706, "W_D_1KI": 6.128520829819531, "J_D_1KI": 1.033651683221375} diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_100000_5e-05.output b/pytorch/output_1core_before_test/xeon_4216_10_10_10_100000_5e-05.output deleted file mode 100644 index 0c5f856..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_100000_5e-05.output +++ /dev/null @@ -1,18 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 10, ..., 499980, 499989, - 499993]), - col_indices=tensor([45244, 53584, 88044, ..., 11037, 25829, 72406]), - values=tensor([ 1.1631, 2.6457, 0.2975, ..., 0.2843, -0.4203, - 0.6048]), size=(100000, 100000), nnz=499993, - layout=torch.sparse_csr) -tensor([0.8804, 0.6592, 0.8066, ..., 0.3508, 0.4772, 0.1541]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 499993 -Density: 4.99993e-05 -Time: 10.397029399871826 seconds - diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_100000_8e-05.json b/pytorch/output_1core_before_test/xeon_4216_10_10_10_100000_8e-05.json deleted file mode 100644 index 76303d5..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_100000_8e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 4805, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 799966, "MATRIX_DENSITY": 7.99966e-05, "TIME_S": 10.459421873092651, "TIME_S_1KI": 2.176778745700864, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 568.125558218956, "W": 53.86000000000001, "J_1KI": 118.2363284534768, "W_1KI": 11.209157127991677, "W_D": 33.93100000000001, "J_D": 357.9106631252767, "W_D_1KI": 7.061602497398545, "J_D_1KI": 1.4696363157957428} diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_100000_8e-05.output b/pytorch/output_1core_before_test/xeon_4216_10_10_10_100000_8e-05.output deleted file mode 100644 index 6c092ec..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_100000_8e-05.output +++ /dev/null @@ -1,18 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 15, ..., 799954, 799957, - 799966]), - col_indices=tensor([10789, 16988, 50145, ..., 73665, 79032, 84140]), - values=tensor([ 0.0681, -3.4738, -0.3210, ..., 0.1183, -0.1618, - 0.6751]), size=(100000, 100000), nnz=799966, - layout=torch.sparse_csr) -tensor([0.3383, 0.6398, 0.8946, ..., 0.4367, 0.9060, 0.7604]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 799966 -Density: 7.99966e-05 -Time: 10.459421873092651 seconds - diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_10000_0.0001.json b/pytorch/output_1core_before_test/xeon_4216_10_10_10_10000_0.0001.json deleted file mode 100644 index e330268..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_10000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 124271, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 9999, "MATRIX_DENSITY": 9.999e-05, "TIME_S": 10.570436477661133, "TIME_S_1KI": 0.08505955917037067, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 550.5434536242485, "W": 52.17, "J_1KI": 4.430184464792658, "W_1KI": 0.4198083221346895, "W_D": 35.0095, "J_D": 369.4508537408114, "W_D_1KI": 0.281718985121227, "J_D_1KI": 0.0022669728667285773} diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_10000_0.0001.output b/pytorch/output_1core_before_test/xeon_4216_10_10_10_10000_0.0001.output deleted file mode 100644 index ceb20d5..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_10000_0.0001.output +++ /dev/null @@ -1,17 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 2, ..., 9993, 9995, 9999]), - col_indices=tensor([2143, 2826, 2135, ..., 5322, 5686, 7701]), - values=tensor([-0.3942, -0.9365, 1.1765, ..., -1.0037, -0.1325, - 0.2244]), size=(10000, 10000), nnz=9999, - layout=torch.sparse_csr) -tensor([0.1055, 0.7003, 0.4545, ..., 0.4197, 0.7666, 0.0347]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 9999 -Density: 9.999e-05 -Time: 10.570436477661133 seconds - diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_10000_1e-05.json b/pytorch/output_1core_before_test/xeon_4216_10_10_10_10000_1e-05.json deleted file mode 100644 index 6e676c5..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_10000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 350169, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.529792547225952, "TIME_S_1KI": 0.03007060175865354, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 540.0640004825592, "W": 52.28, "J_1KI": 1.542295293080082, "W_1KI": 0.14929933831949715, "W_D": 35.20075, "J_D": 363.6315582438111, "W_D_1KI": 0.10052503219873832, "J_D_1KI": 0.0002870757611288787} diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_10000_1e-05.output b/pytorch/output_1core_before_test/xeon_4216_10_10_10_10000_1e-05.output deleted file mode 100644 index d4afdfc..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_10000_1e-05.output +++ /dev/null @@ -1,376 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([8881, 3770, 6723, 8570, 3894, 2446, 1201, 8305, 9311, - 5705, 2799, 5132, 480, 9336, 671, 9450, 7805, 2582, - 5584, 3495, 3605, 1613, 7630, 8303, 141, 7833, 3628, - 3787, 4398, 8770, 4321, 4690, 7961, 837, 1688, 4873, - 744, 8988, 7394, 8667, 516, 1879, 8954, 3346, 7018, - 4952, 2916, 1912, 1943, 3278, 1234, 8169, 280, 971, - 7457, 9488, 8301, 9617, 4484, 6785, 7059, 6480, 9462, - 5585, 5362, 434, 1151, 9603, 7135, 680, 3322, 1091, - 4690, 8899, 5463, 9641, 1460, 1452, 4828, 1665, 7822, - 2348, 5595, 1886, 1379, 3613, 6269, 1342, 2028, 1360, - 3709, 951, 1583, 6586, 7163, 3267, 5806, 6608, 9455, - 882, 1821, 1845, 6754, 5650, 187, 6696, 1579, 7435, - 2967, 9963, 1783, 9204, 4300, 5056, 8754, 1005, 9389, - 3013, 5666, 5579, 6051, 7005, 5567, 9965, 41, 2346, - 3212, 1046, 3278, 4198, 4091, 6791, 9118, 7304, 7374, - 8552, 5788, 2319, 6660, 4009, 3828, 5924, 7888, 948, - 9511, 7785, 8735, 1724, 5753, 7075, 3426, 256, 3227, - 3879, 6485, 9185, 8527, 3554, 185, 6656, 7091, 4829, - 7286, 5855, 6239, 8621, 9138, 7747, 264, 1213, 5364, - 7731, 6644, 9115, 9454, 6578, 8900, 3092, 7644, 9281, - 370, 1197, 2269, 9534, 3397, 2296, 496, 122, 1779, - 3523, 6912, 837, 3868, 734, 5259, 2977, 1663, 9350, - 615, 9585, 2762, 2528, 9179, 1698, 3782, 3155, 3907, - 7206, 1292, 8159, 3856, 3053, 5806, 3543, 6645, 4576, - 2695, 8064, 5143, 897, 2481, 8370, 6100, 7009, 2336, - 7078, 8317, 1637, 2716, 2740, 3361, 9387, 1541, 992, - 2446, 9932, 8790, 3571, 6386, 5592, 4909, 7413, 8006, - 3944, 2203, 4897, 8035, 2774, 6606, 6621, 769, 1908, - 4246, 5829, 4367, 503, 845, 9787, 6332, 8221, 5754, - 302, 9894, 4424, 2380, 8614, 8315, 4038, 5410, 4641, - 325, 3776, 2997, 6406, 1833, 8312, 1725, 4234, 1166, - 7437, 2604, 5583, 9413, 4899, 5865, 8045, 2880, 4059, - 9975, 9999, 6592, 7058, 735, 7821, 4913, 8623, 3346, - 4618, 5895, 2592, 8646, 4566, 9998, 625, 7513, 4709, - 2475, 4328, 7445, 5607, 3099, 2826, 1247, 621, 2106, - 1509, 6827, 3470, 8367, 5307, 5671, 8541, 5939, 5290, - 2347, 6724, 115, 4176, 2645, 1497, 7763, 115, 1790, - 2398, 4826, 2580, 133, 976, 9877, 380, 5756, 5648, - 9067, 6133, 5892, 2186, 1271, 7671, 362, 1744, 2451, - 9146, 104, 1920, 456, 5632, 3040, 6105, 4530, 1873, - 1269, 3226, 4189, 3753, 2865, 5214, 7590, 3633, 5907, - 8095, 6316, 6439, 1615, 2823, 8860, 4557, 9975, 9535, - 8205, 5955, 8880, 6970, 2467, 4203, 8362, 4670, 8209, - 8165, 7783, 2731, 413, 8729, 7540, 116, 5993, 3121, - 6746, 5986, 8549, 1740, 1673, 7833, 9400, 2736, 1686, - 1145, 3419, 6732, 224, 2861, 6407, 5219, 2705, 8474, - 3974, 2354, 5135, 745, 9952, 2153, 9573, 4143, 7056, - 4145, 9091, 7331, 8001, 6686, 7469, 7706, 1784, 6518, - 9228, 6376, 3121, 7855, 4203, 5080, 7140, 1031, 625, - 2488, 252, 1346, 7977, 7843, 384, 8326, 2053, 3690, - 8396, 8906, 9795, 7505, 4330, 9884, 3557, 7712, 4838, - 1177, 3463, 6568, 3773, 5187, 708, 3985, 2877, 635, - 1256, 8621, 7731, 3829, 2651, 7133, 7363, 9422, 2446, - 1706, 9541, 8409, 4411, 306, 4858, 1215, 7052, 3022, - 991, 4212, 72, 6461, 8305, 3463, 4860, 6167, 3622, - 1452, 2213, 8880, 9570, 9128, 2400, 2942, 549, 6937, - 8510, 6704, 7598, 7679, 237, 8912, 3908, 4968, 6752, - 5509, 4050, 1794, 2428, 8953, 8092, 8852, 4157, 9081, - 8603, 1776, 5635, 7324, 5132, 2777, 1386, 2872, 2934, - 4743, 6471, 3906, 5202, 4075, 386, 9101, 8488, 6550, - 2163, 4348, 8839, 9482, 6738, 2946, 537, 8040, 3913, - 5219, 3907, 8245, 108, 2469, 7362, 5858, 6463, 3449, - 6919, 804, 379, 6993, 8713, 9863, 980, 5127, 8498, - 6090, 9584, 3093, 2278, 8095, 601, 5427, 2163, 3168, - 922, 6789, 3632, 3825, 3752, 1702, 3699, 2564, 390, - 1631, 6383, 5237, 6066, 9833, 7109, 5836, 2614, 4317, - 569, 9179, 7976, 5005, 8990, 6063, 3193, 2990, 3266, - 9653, 4578, 3935, 9977, 5577, 9756, 9976, 4004, 1252, - 6079, 2840, 6655, 5228, 6235, 3066, 4504, 825, 5752, - 6290, 5716, 5443, 3837, 6977, 4702, 464, 2419, 8760, - 2437, 9464, 6914, 3967, 6967, 7997, 9000, 8279, 4943, - 8453, 3772, 5733, 5159, 5465, 5132, 7058, 757, 7808, - 1128, 1387, 1417, 2612, 211, 7724, 8356, 4195, 5001, - 4480, 7919, 8043, 7558, 4356, 4105, 1711, 3558, 5969, - 2933, 3318, 5441, 508, 7938, 6792, 5224, 9356, 9390, - 2902, 6175, 9158, 4453, 939, 6035, 7742, 5501, 9346, - 6644, 8044, 2930, 8354, 1337, 7863, 9907, 9021, 6022, - 631, 6742, 1475, 1372, 2639, 3479, 5517, 8249, 9540, - 1598, 8717, 3992, 7663, 145, 475, 1673, 7162, 5628, - 4571, 6246, 6287, 9564, 7309, 3255, 5500, 5693, 4912, - 8274, 7368, 6291, 3720, 3438, 3296, 8958, 6023, 6654, - 1627, 5939, 6122, 5374, 8464, 5798, 526, 3830, 1825, - 625, 2454, 2101, 5133, 4517, 594, 8804, 5329, 4572, - 9201, 2822, 6970, 8346, 6102, 1124, 424, 7814, 1560, - 1405, 6621, 3486, 4247, 8766, 7423, 7126, 6944, 767, - 9761, 2363, 8023, 5745, 8791, 3019, 3108, 2992, 6578, - 9914, 7882, 9654, 6967, 6190, 893, 6524, 8157, 4459, - 1382, 7973, 2356, 4776, 3125, 2064, 1463, 4260, 5105, - 4743, 6306, 8616, 92, 4734, 6436, 5427, 5207, 1900, - 6704, 2599, 125, 1282, 6170, 5984, 5762, 3685, 6811, - 174, 4284, 8202, 9604, 9775, 2507, 5540, 6269, 5834, - 1692, 5376, 226, 1002, 3206, 7113, 6556, 7724, 3256, - 8570, 2750, 9391, 553, 6012, 7131, 8153, 6473, 3802, - 6290, 5904, 9034, 9385, 4128, 8797, 8098, 4011, 4296, - 4805, 6671, 464, 1074, 4111, 1592, 8618, 7921, 9623, - 5334, 5956, 8636, 2483, 8599, 6678, 6142, 2746, 6850, - 7060, 2053, 3261, 9466, 1194, 1243, 3065, 3976, 420, - 5716, 31, 2078, 9782, 9691, 9701, 9139, 7069, 8866, - 731, 9595, 7481, 1836, 1819, 7079, 4898, 7283, 4335, - 2280, 7145, 9113, 2063, 5174, 6561, 7808, 3417, 5418, - 7965, 1135, 3969, 543, 6386, 1116, 1088, 7110, 6812, - 6191, 1838, 6725, 4036, 4277, 5202, 1921, 9508, 4182, - 6521, 1875, 4219, 6986, 7114, 2562, 2398, 1294, 7851, - 9639, 6662, 5198, 4893, 4597, 4569, 6735, 324, 3818, - 9424, 2963, 5114, 3885, 9653, 9770, 9178, 7594, 6751, - 4091, 3065, 1693, 1763, 4754, 1243, 8465, 7441, 8389, - 4284, 9315, 2842, 6627, 7946, 2298, 3246, 3028, 7745, - 665, 2333, 9075, 1067, 5475, 4783, 4550, 4888, 43, - 7728, 3973, 5253, 1805, 6784, 8885, 1179, 1332, 1625, - 2677, 1400, 7935, 19, 9156, 3536, 6831, 2397, 7139, - 927, 220, 3029, 3541, 8879, 9344, 6231, 9685, 3698, - 2492]), - values=tensor([-1.8192e+00, 1.7179e-01, -1.6688e+00, -4.6865e-01, - -8.6396e-01, 2.6762e-01, 2.1614e+00, 4.1427e-01, - -9.4250e-01, -1.5365e+00, -1.4113e+00, 3.5706e-01, - 1.6944e-01, -5.5163e-01, -1.4528e+00, -7.7452e-01, - 4.3781e-01, -8.4842e-01, 1.7591e+00, -4.8012e-01, - -1.4445e-01, -1.2547e+00, 4.6513e-01, 2.3046e-01, - 9.5788e-01, 9.8459e-02, 7.8298e-01, -7.8188e-02, - 1.2505e+00, 4.0737e-01, 3.3132e+00, 1.5869e-01, - 1.4108e+00, -8.3864e-01, -5.0105e-01, 1.0426e+00, - -3.1939e-01, -6.0248e-01, -3.4007e-01, 6.7035e-01, - -2.2732e-01, 7.3171e-01, -2.4602e+00, 6.1773e-01, - 8.6977e-01, -3.9540e-02, 6.4378e-01, -5.6020e-01, - -4.2936e-01, 1.5354e+00, 1.0445e+00, 2.5619e-01, - -8.1661e-02, 6.5883e-03, -1.1178e+00, -5.1561e-01, - 1.2207e+00, -7.7142e-02, -1.2947e+00, 8.7911e-01, - 7.9214e-01, 4.4838e-01, 9.3153e-01, 1.2218e+00, - 1.9365e+00, -3.0891e-01, -9.4335e-01, -1.1768e+00, - 2.1614e-01, 2.0049e+00, 6.9305e-01, -2.6934e-01, - 1.2646e+00, 1.0439e-01, -2.1381e-01, -3.8391e-01, - -1.2382e+00, 8.7580e-01, 3.6034e-02, 6.3099e-03, - -1.6970e+00, 3.8949e-01, 5.9931e-01, 2.6156e-01, - 9.6812e-01, -1.6003e-01, -7.1709e-01, 4.5685e-01, - 2.4145e+00, 2.0183e+00, -5.3880e-01, 1.3848e+00, - -6.1037e-01, -4.2168e-01, 7.3635e-01, 1.0137e+00, - 6.9075e-01, -3.3648e-02, 2.4100e-01, -7.8331e-01, - 4.3581e-01, 4.1269e-01, 1.5342e+00, -2.0438e+00, - -4.9568e-02, 1.6664e+00, 9.0798e-01, -6.4877e-01, - -8.9573e-01, -3.6047e-01, 1.7959e+00, 1.7761e+00, - -1.3967e-01, 5.5465e-01, 1.5308e+00, -2.9891e+00, - -6.5992e-02, 9.0158e-01, 2.4134e-01, 1.6944e-01, - 7.6599e-01, -1.6741e+00, 1.4113e+00, -4.5442e-01, - 2.2133e-01, -4.2905e-01, -1.1090e+00, 1.8690e+00, - 1.6235e+00, 6.9318e-01, 5.1791e-01, -4.0734e-01, - -7.0534e-01, 8.1519e-01, 9.4605e-01, -8.3334e-01, - 1.4761e+00, -1.3698e+00, -5.0389e-01, 2.2240e+00, - -8.9049e-01, 1.0419e+00, -5.6453e-01, 1.0939e+00, - -2.1677e-01, -6.6758e-01, 1.6507e+00, -2.7041e+00, - -1.0205e+00, 3.8320e-01, -2.0924e+00, -3.5273e-01, - 1.3996e+00, 6.8613e-01, 1.4750e+00, 2.0195e-01, - 1.0868e+00, -1.1798e+00, 4.8367e-01, 1.9090e+00, - 1.5756e+00, -1.8854e-01, 1.6431e+00, 5.1664e-01, - -6.8316e-02, -4.5856e-01, 1.1501e-01, -1.0780e+00, - -3.6932e-01, 1.7504e-01, 1.0448e+00, -1.4507e+00, - 8.7674e-01, 2.0473e-01, 1.7419e-01, 8.7454e-01, - 9.4268e-01, 3.5436e-01, 4.7819e-01, -5.8479e-02, - 4.3305e-01, -4.0100e-01, 4.7147e-01, -1.0618e+00, - 8.7154e-01, -6.5976e-01, -4.0778e-01, -1.4641e+00, - -1.2325e+00, 4.2783e-01, 2.0840e-02, -1.5895e+00, - -4.2066e-01, -8.4133e-01, 1.7989e+00, 9.6708e-01, - -1.9325e-01, -7.7866e-01, -1.1506e+00, 1.4607e+00, - 2.7338e-02, -9.3039e-01, 1.0010e+00, 2.6759e+00, - 1.1041e+00, 8.2279e-01, 5.1380e-01, -1.3378e+00, - -4.5932e-01, -7.2315e-01, -7.5809e-01, -1.0896e+00, - 1.0757e-01, 8.6793e-01, 2.2431e-01, 6.3395e-01, - 4.9149e-01, -3.4970e-01, -1.0570e-01, 1.9580e+00, - -6.9116e-02, 6.3299e-01, -9.4924e-02, 9.9015e-02, - -2.1083e-01, -8.1593e-02, -1.0833e+00, 2.5612e-02, - 4.3657e-01, 2.5680e-01, 1.2043e+00, 1.7510e+00, - -9.1644e-02, -3.7973e-01, 9.1598e-01, 6.2536e-01, - 9.3076e-01, -9.0193e-01, 1.4177e+00, 1.6362e+00, - -8.0815e-01, 1.3135e-01, -3.3239e-02, -1.9994e+00, - 2.7828e-01, -1.7743e+00, -2.0146e-01, -2.0787e+00, - 1.3391e-02, 1.7380e+00, 3.4977e-01, -4.6683e-01, - -9.7511e-01, -3.1032e-01, 3.3885e-01, -2.9372e-01, - -1.3336e+00, -2.5860e-01, -1.6907e+00, -1.0318e+00, - 8.9213e-01, 2.1597e+00, 4.7569e-01, -1.4048e-01, - -4.3132e-01, 4.9633e-01, -6.0592e-01, 7.1351e-01, - -2.5562e-01, 2.4004e-02, -5.6510e-01, 1.6429e-01, - -1.3149e+00, 1.1242e+00, 1.4007e-01, -6.3893e-01, - -9.7116e-01, 1.8472e-01, 9.9023e-02, -1.8536e+00, - -1.3342e+00, -2.3583e-01, -1.1926e+00, 8.0409e-01, - -7.5076e-01, 3.6587e-01, -5.8678e-01, 4.9796e-01, - -4.3956e-01, 5.6572e-01, -6.1539e-01, 7.1926e-01, - -6.6055e-01, 2.9456e-01, -1.1753e+00, 4.1510e-01, - -2.5053e-01, 3.5051e-02, 1.9216e-02, -5.7703e-01, - -9.1319e-01, 7.3071e-01, 1.1828e+00, 9.1432e-01, - -5.6412e-01, 1.4990e+00, 7.0031e-02, 1.2785e+00, - -9.5742e-02, -9.8553e-01, 3.6405e-01, 2.2359e-01, - 1.2354e+00, -7.0849e-01, -3.0134e-01, -6.2627e-01, - -1.8219e+00, -1.4908e+00, 1.1885e+00, -9.7936e-01, - -4.6604e-02, 2.8652e-01, 4.9052e-01, -1.5186e+00, - -1.6923e+00, 5.8480e-01, 1.9160e+00, -4.9290e-01, - 1.0009e+00, -1.2347e+00, -1.4449e+00, 1.8535e+00, - -1.1859e+00, -1.3123e+00, 1.4239e+00, 1.3009e+00, - 1.2669e-01, -4.2959e-01, 3.4777e-01, 4.7329e-01, - -6.0546e-02, -3.2768e-01, 8.3842e-01, -5.3204e-01, - 9.5733e-01, 1.2733e-01, -3.0970e-02, 3.2467e-01, - -1.9662e-01, -9.6093e-01, 8.9099e-02, -1.0089e+00, - -4.8586e-01, 6.1335e-02, 1.0520e+00, 9.3374e-01, - -1.1619e+00, 1.4633e+00, 1.4563e+00, 5.3267e-01, - 2.2023e-01, -5.6705e-01, 3.5953e-01, -3.4284e-01, - 2.4990e-01, 6.8749e-01, -1.3073e-02, -7.2648e-01, - 9.8312e-01, -1.2776e+00, -4.9545e-01, 9.1382e-01, - 1.6207e-01, 1.0117e+00, -1.9005e-01, -7.4040e-01, - -3.9182e-01, -3.7770e-02, 1.3936e+00, 1.5464e+00, - 3.1973e-02, 6.5826e-01, -5.1662e-01, 9.4213e-03, - -2.2751e+00, -2.6060e-02, -2.9272e-02, 2.7364e-01, - -1.0093e+00, -2.4557e+00, -1.6161e-02, 7.1315e-01, - 6.8324e-01, -1.2294e+00, -8.1808e-01, 4.8449e-01, - 1.3290e+00, 3.8479e-01, 1.2599e+00, 6.6009e-02, - -9.7098e-01, -9.0301e-02, -1.4591e+00, -2.3537e-01, - -1.7684e-01, -6.0158e-01, -2.6266e-01, 1.1068e+00, - -1.7911e-01, -8.2442e-01, 3.1314e-01, 1.9698e+00, - -4.4764e-01, -9.7747e-01, -9.6835e-02, 9.5620e-01, - 2.2745e+00, -2.4435e+00, 6.2701e-02, 1.5441e-01, - 8.9950e-01, 1.4643e+00, -1.1420e+00, -3.6487e-01, - -6.4646e-01, -1.0881e+00, -5.0089e-01, -1.0231e+00, - 6.8371e-01, -3.6137e-01, 2.2163e-02, -9.2700e-01, - 1.0393e+00, -1.0715e+00, -4.5928e-01, 1.9363e-02, - 9.6457e-01, -6.0185e-01, 3.8514e-01, 4.7767e-01, - 1.6218e+00, 7.1395e-01, -4.1333e-01, 1.9394e+00, - 4.1466e-01, -1.1971e+00, -1.5762e+00, 1.8651e+00, - 2.3870e+00, -3.6095e-01, 2.0154e+00, -2.1710e+00, - -5.7418e-01, -7.2310e-01, -2.3932e+00, -3.1524e-01, - 2.5613e+00, 2.0968e+00, 8.6258e-01, 2.1356e-01, - -5.2228e-01, -4.9497e-01, -6.5445e-01, -1.5901e+00, - 9.0171e-01, 8.1199e-02, 3.4296e-01, -9.8790e-01, - -1.2921e+00, 1.0129e+00, -2.6747e-01, -1.6503e+00, - 2.2356e-01, -8.6587e-01, 9.5970e-01, -4.2501e-01, - -8.5020e-01, 5.9750e-01, 8.4765e-01, 1.3758e+00, - 8.6392e-01, -3.5739e-01, -2.0402e-01, 1.3152e+00, - 1.1395e+00, -4.0612e-01, -4.6573e-01, 5.2894e-01, - -1.8059e+00, 3.5566e-01, 2.9843e-01, -2.5628e-01, - 4.8110e-01, 2.9712e-01, -2.3123e+00, -8.0438e-01, - 3.4344e-01, -1.7864e+00, 9.6173e-01, 1.2932e-01, - 6.1017e-01, 1.7240e+00, -1.1101e+00, 3.1163e-01, - 5.5188e-01, 8.1368e-01, 1.1917e+00, 1.3892e-01, - 8.6921e-01, 9.7215e-01, -5.0268e-01, -1.6259e-01, - -6.8797e-01, 1.7426e+00, 1.1131e-01, 1.0952e+00, - -7.9808e-01, -3.0173e-01, -5.9934e-01, 8.5048e-02, - 1.0261e+00, -4.8662e-01, -7.2418e-01, -1.7976e-01, - 5.7413e-01, -1.4889e+00, 6.5826e-01, -2.7116e-01, - 1.6744e+00, -5.2702e-01, 1.1387e+00, 1.1999e+00, - -2.8947e-01, -5.4761e-01, -1.0748e+00, -1.9498e-02, - -1.7562e+00, 1.7983e+00, -5.1504e-01, 9.0818e-01, - 2.8158e-01, -3.9005e-01, -4.8375e-01, 3.0729e-01, - 8.8624e-01, 5.3810e-02, 4.8169e-01, -7.2521e-01, - -6.0497e-01, -3.1331e-01, 3.1297e-01, -3.2995e-01, - -6.9801e-01, -2.8011e-01, -3.8277e-01, 3.6351e-01, - 4.6971e-01, -6.7723e-01, 1.2957e+00, -5.1396e-01, - -5.5117e-01, 9.3084e-01, 4.4375e-01, 2.2771e-01, - -1.0934e+00, 8.2355e-01, 1.0758e+00, 7.8175e-02, - 9.2166e-01, -1.7884e-01, -5.9036e-01, 2.0255e+00, - 5.5920e-01, -3.4720e-01, -6.2264e-01, 5.8312e-02, - -1.5865e+00, 6.5014e-01, 1.3558e+00, -1.1489e+00, - -2.9565e-01, -2.6684e-01, 1.8455e-02, 4.0059e-01, - -1.3326e+00, -2.6043e-01, 1.6349e-01, 1.2646e+00, - -1.3829e-01, -2.1357e+00, -1.0213e+00, 2.9699e+00, - -6.5566e-01, 1.0915e+00, -3.7341e-01, -6.5642e-01, - -9.2507e-02, 1.1308e+00, -5.5853e-01, 9.0838e-01, - 5.5454e-01, -9.5281e-01, -1.3089e+00, 3.7153e-01, - 1.1626e+00, 5.4792e-01, -1.0554e+00, 8.2933e-02, - 1.9410e-01, -1.4169e-01, 1.8149e+00, 3.2653e-01, - 3.5080e-01, 1.1214e+00, -4.1762e-01, 1.2990e+00, - 1.2820e+00, 1.1709e+00, 6.8982e-01, -3.5863e-01, - -6.3697e-01, 4.8695e-01, 1.5961e+00, -8.3991e-01, - -1.4319e+00, 4.7277e-01, 2.2509e-01, -3.9248e-01, - -7.6130e-01, -7.5857e-01, 3.7614e-01, 2.2258e-01, - -1.2563e+00, 5.0437e-01, -3.1614e-01, -6.4835e-01, - 2.2665e+00, -1.8295e+00, 8.6642e-01, -3.5748e-01, - -7.0224e-02, 6.2965e-01, -2.4626e-01, 2.8088e-01, - 5.0547e-01, -7.9341e-01, -4.2194e-01, -2.1408e+00, - -1.8894e+00, -1.5132e+00, 1.8123e-01, 1.3945e+00, - -2.6800e-01, -1.8625e-01, 8.2600e-01, 4.8068e-01, - 1.3136e-01, 6.9110e-01, -4.6681e-01, -2.2447e+00, - 2.0742e+00, 7.1582e-01, -1.0807e+00, -1.4060e-01, - -7.1276e-01, -1.3768e+00, -6.5210e-01, -6.5327e-01, - -2.7030e-01, -2.9710e-01, 2.7598e-01, 6.3636e-01, - 9.6781e-01, -5.9469e-01, 5.3368e-01, -9.6360e-01, - 6.2484e-01, -1.1129e-01, -1.0094e+00, -1.4255e+00, - -2.0386e-01, 2.1415e-01, -9.3259e-02, -3.6696e-01, - 1.5604e+00, -6.0420e-01, 5.5591e-02, 1.1165e+00, - 9.6481e-03, 4.7949e-01, 1.7253e+00, 1.2964e-01, - 4.0638e-01, -2.4963e-01, -9.6640e-01, 2.8459e-03, - -1.0295e+00, 3.3857e+00, 1.0982e+00, 4.4677e-01, - -2.3383e-01, -2.3264e+00, 6.9628e-01, 5.6506e-01, - 1.6191e-01, -1.3639e+00, -2.7812e-01, -8.0977e-01, - 1.3369e+00, 2.3332e-01, -1.7063e+00, -1.2610e+00, - 9.0434e-02, 6.6513e-01, -9.9940e-01, -7.2881e-01, - 1.7633e+00, -4.9065e-01, 4.9921e-01, 6.8279e-01, - -2.7990e+00, -1.5987e+00, -9.3542e-01, 1.7732e+00, - -1.5062e+00, 1.0165e+00, -7.3539e-01, 1.9606e-01, - -1.0949e+00, 4.6431e-01, -1.5526e-01, -1.1898e+00, - 1.7142e-01, 6.2000e-01, -1.9745e-01, -1.6670e+00, - 2.9628e-01, -2.4014e-01, 8.8113e-02, 2.3942e-01, - 3.6614e-01, -7.6074e-01, -6.7773e-01, -1.9141e+00, - 3.6051e-02, -1.6109e-01, 1.5579e+00, -9.3419e-01, - 4.5476e-01, 9.7602e-01, -3.7607e-01, 3.1578e+00, - 8.6702e-01, -2.1222e-01, -3.0129e-01, 1.1134e+00, - -1.3552e+00, -1.0326e+00, 1.8897e-01, 1.3837e+00, - -2.0805e+00, 2.2807e-01, 3.7128e-01, 2.3474e-01, - 7.3218e-01, 4.9265e-01, -5.6289e-01, -4.6572e-01, - 1.5460e+00, 1.1781e+00, 4.3069e-01, 2.0519e-01, - 5.4322e-01, -3.9543e-01, -1.4697e+00, 1.4359e+00, - -1.2352e+00, 1.2267e+00, -6.1644e-01, -5.2100e-01, - -5.0487e-01, -7.4363e-01, -1.9089e+00, -1.9210e-01, - -1.1816e+00, -7.6019e-01, -1.6526e+00, -8.5826e-01, - -3.1771e-01, 1.9921e+00, 1.6610e+00, 1.2075e+00, - -3.9492e-02, -2.2031e-01, 3.5366e-01, 1.1621e+00, - -2.2938e-01, -6.2729e-02, 8.0927e-01, 2.6465e-01, - 6.6559e-02, 1.5877e+00, -2.3910e+00, 1.4463e+00, - -9.5618e-02, 2.2262e-01, 1.8923e-01, 9.7087e-01, - 1.6066e+00, 1.0419e-01, -1.0594e+00, -5.6464e-01, - -2.7390e-01, -3.3479e-01, -1.4155e-01, 8.4380e-01, - -1.6601e+00, -7.6686e-01, -7.9874e-01, -2.7066e+00, - -1.6652e+00, -1.9044e-01, 4.1308e-01, 2.2224e-01, - 8.8893e-01, 6.1358e-02, 5.8049e-01, 1.4204e+00, - 9.8221e-01, -6.0553e-01, -2.9161e-02, -8.0143e-01, - -1.2393e+00, -1.0876e+00, 1.5592e+00, -4.4824e-01, - -4.2776e-01, -1.5620e+00, 1.0854e+00, -9.3333e-01, - 3.8629e-01, 6.9257e-01, 2.6971e+00, -1.8341e-01, - -1.8102e+00, 9.7258e-02, 4.8324e-01, -4.2855e-01, - -7.6725e-01, 2.5860e-01, -1.2001e+00, -1.6496e-01, - 8.2910e-02, -6.7764e-01, 4.0457e-01, -5.4328e-02, - -1.1770e+00, -1.8088e+00, -3.8623e-01, -6.3639e-02, - 6.7500e-01, -4.0664e-01, 3.8255e-01, 1.9943e+00, - 4.5731e-01, 6.7740e-01, -9.3059e-01, 6.6940e-01, - -8.7656e-01, -8.4154e-01, -3.2926e-01, -2.6666e-01, - 1.0328e-01, -1.0809e+00, -1.7362e+00, -7.2848e-01, - -1.3816e+00, 1.0433e+00, -1.5373e+00, -2.6714e-02, - 5.1339e-01, 2.5450e-02, 1.1070e+00, -2.8705e-01, - 1.4356e+00, -1.0171e+00, -3.2209e-01, 1.1876e+00, - -3.9399e-01, -5.2678e-01, -1.9038e+00, -4.1074e-01, - 4.6064e-01, -1.0356e+00, -1.6825e+00, 3.2682e-01, - -3.0588e-01, 3.4738e-01, -7.3840e-01, -1.7962e+00, - 1.4722e+00, 2.0192e+00, 2.5433e+00, 4.0779e-01, - 1.2947e+00, -2.3723e+00, 2.7552e-01, 1.8335e-01, - -2.2391e-01, -6.0661e-01, 6.3264e-01, -1.4974e+00, - 9.1415e-01, 1.3918e+00, -3.9506e-01, -1.2315e+00, - 1.0559e+00, -1.1001e+00, -2.5174e-01, 7.7580e-01, - -1.9896e-01, 6.7725e-01, -1.2727e-01, 8.8792e-01, - 9.0833e-01, 1.0871e+00, 2.3733e+00, -6.2405e-01, - 1.6388e-01, 9.2070e-01, 2.1208e+00, 1.0274e+00, - 7.4366e-01, 1.1230e+00, 1.4505e+00, 8.9374e-01, - -2.0233e+00, -1.0817e+00, 1.4180e-01, 6.0655e-01, - 5.3708e-01, -7.4563e-01, 1.3176e+00, -3.6409e-01, - -1.7794e+00, 2.1959e-01, 1.9171e-02, -8.1156e-01, - -1.4609e+00, 9.7246e-01, -3.4861e-01, 7.9824e-01, - 7.4586e-01, -8.3595e-01, -6.8254e-01, 1.8095e+00, - 1.1254e+00, 5.4535e-01, 2.6560e-01, -5.0144e-01, - -5.2311e-01, -1.9004e-01, 1.3269e+00, -1.8799e-01, - 1.3368e+00, -1.0868e+00, -1.4989e+00, -2.1602e+00, - -1.7841e+00, -1.6179e+00, 1.1835e+00, 8.4064e-01, - 1.0906e-01, -1.0904e-03, -2.8054e-01, -4.6094e-01, - 8.1749e-01, 4.8914e-01, -2.6690e+00, 6.1372e-01, - 4.2035e-01, 6.6885e-01, 1.1730e+00, -1.2174e+00, - 2.9131e+00, 6.6929e-01, 4.8679e-01, -9.3831e-01, - -9.6456e-01, -1.3206e+00, 5.9787e-01, -1.2949e-01, - 8.9920e-01, 3.0232e-01, -4.6847e-02, 1.4643e-01, - 1.6119e-01, -3.0036e-02, -6.8822e-01, 1.2814e+00]), - size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.8096, 0.0712, 0.0691, ..., 0.0113, 0.8979, 0.6719]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 1000 -Density: 1e-05 -Time: 10.529792547225952 seconds - diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_10000_2e-05.json b/pytorch/output_1core_before_test/xeon_4216_10_10_10_10000_2e-05.json deleted file mode 100644 index fe9e662..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_10000_2e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 280057, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 2000, "MATRIX_DENSITY": 2e-05, "TIME_S": 10.53843879699707, "TIME_S_1KI": 0.03762962110212232, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 540.5401263332367, "W": 52.12, "J_1KI": 1.9301075364416411, "W_1KI": 0.18610497148794708, "W_D": 35.16199999999999, "J_D": 364.66753496026985, "W_D_1KI": 0.1255530124224711, "J_D_1KI": 0.00044831235220855434} diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_10000_2e-05.output b/pytorch/output_1core_before_test/xeon_4216_10_10_10_10000_2e-05.output deleted file mode 100644 index 2d17cc2..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_10000_2e-05.output +++ /dev/null @@ -1,17 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 2000, 2000, 2000]), - col_indices=tensor([6068, 2930, 8831, ..., 9074, 323, 8671]), - values=tensor([ 0.1056, 1.6158, 1.3399, ..., -1.5710, -1.3045, - -1.0064]), size=(10000, 10000), nnz=2000, - layout=torch.sparse_csr) -tensor([0.8225, 0.2742, 0.9912, ..., 0.7272, 0.1523, 0.4272]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 2000 -Density: 2e-05 -Time: 10.53843879699707 seconds - diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_10000_5e-05.json b/pytorch/output_1core_before_test/xeon_4216_10_10_10_10000_5e-05.json deleted file mode 100644 index c76e8b2..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_10000_5e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 160651, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.460874557495117, "TIME_S_1KI": 0.06511552718311817, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 538.2348584890366, "W": 52.150000000000006, "J_1KI": 3.3503361852029343, "W_1KI": 0.3246167157378417, "W_D": 35.09600000000001, "J_D": 362.22225490951547, "W_D_1KI": 0.21846113625187524, "J_D_1KI": 0.001359849215080362} diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_10000_5e-05.output b/pytorch/output_1core_before_test/xeon_4216_10_10_10_10000_5e-05.output deleted file mode 100644 index 4339aaa..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_10000_5e-05.output +++ /dev/null @@ -1,17 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 3, ..., 4998, 4999, 5000]), - col_indices=tensor([2618, 5016, 7029, ..., 3093, 3769, 7809]), - values=tensor([-0.5312, 1.0956, 1.9238, ..., -1.6101, 0.6692, - -1.5294]), size=(10000, 10000), nnz=5000, - layout=torch.sparse_csr) -tensor([0.5487, 0.0525, 0.7028, ..., 0.6050, 0.3883, 0.7962]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 5000 -Density: 5e-05 -Time: 10.460874557495117 seconds - diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_10000_8e-05.json b/pytorch/output_1core_before_test/xeon_4216_10_10_10_10000_8e-05.json deleted file mode 100644 index 37b0c0b..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_10000_8e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 127780, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 8000, "MATRIX_DENSITY": 8e-05, "TIME_S": 10.27719521522522, "TIME_S_1KI": 0.08042882466133369, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 543.231458580494, "W": 52.150000000000006, "J_1KI": 4.251302696670011, "W_1KI": 0.4081233369854438, "W_D": 34.71625000000001, "J_D": 361.62912989348183, "W_D_1KI": 0.2716876663014557, "J_D_1KI": 0.002126214323849238} diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_10000_8e-05.output b/pytorch/output_1core_before_test/xeon_4216_10_10_10_10000_8e-05.output deleted file mode 100644 index 3b4b23e..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_10000_8e-05.output +++ /dev/null @@ -1,17 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 7997, 7999, 8000]), - col_indices=tensor([6622, 670, 3821, ..., 5774, 5868, 3996]), - values=tensor([-0.1822, -2.5278, -0.0841, ..., -0.9141, 0.5824, - 0.4698]), size=(10000, 10000), nnz=8000, - layout=torch.sparse_csr) -tensor([0.9919, 0.2427, 0.4295, ..., 0.7353, 0.7669, 0.1786]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 8000 -Density: 8e-05 -Time: 10.27719521522522 seconds - diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_150000_0.0001.json b/pytorch/output_1core_before_test/xeon_4216_10_10_10_150000_0.0001.json deleted file mode 100644 index 6b1a340..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_150000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 1954, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 2249894, "MATRIX_DENSITY": 9.999528888888889e-05, "TIME_S": 10.577555656433105, "TIME_S_1KI": 5.41328334515512, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 578.4567875862122, "W": 54.0, "J_1KI": 296.0372505558916, "W_1KI": 27.635619242579324, "W_D": 36.891999999999996, "J_D": 395.1931075487136, "W_D_1KI": 18.880245649948822, "J_D_1KI": 9.662357036821302} diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_150000_0.0001.output b/pytorch/output_1core_before_test/xeon_4216_10_10_10_150000_0.0001.output deleted file mode 100644 index 8180928..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_150000_0.0001.output +++ /dev/null @@ -1,19 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 11, 25, ..., 2249868, - 2249883, 2249894]), - col_indices=tensor([ 12376, 31951, 37551, ..., 130942, 138263, - 149748]), - values=tensor([-0.7754, -1.2502, 1.2097, ..., -0.3923, -1.3405, - -0.6699]), size=(150000, 150000), nnz=2249894, - layout=torch.sparse_csr) -tensor([0.6034, 0.2387, 0.8858, ..., 0.8532, 0.9902, 0.9325]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 2249894 -Density: 9.999528888888889e-05 -Time: 10.577555656433105 seconds - diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_150000_1e-05.json b/pytorch/output_1core_before_test/xeon_4216_10_10_10_150000_1e-05.json deleted file mode 100644 index a7dc4b9..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_150000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 6408, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 224999, "MATRIX_DENSITY": 9.999955555555555e-06, "TIME_S": 10.407044172286987, "TIME_S_1KI": 1.6240705637151978, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 553.3263424086571, "W": 53.089999999999996, "J_1KI": 86.34930437088906, "W_1KI": 8.284956304619225, "W_D": 35.653499999999994, "J_D": 371.59579485905164, "W_D_1KI": 5.5639044943820215, "J_D_1KI": 0.8682747338299035} diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_150000_1e-05.output b/pytorch/output_1core_before_test/xeon_4216_10_10_10_150000_1e-05.output deleted file mode 100644 index 7077a47..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_150000_1e-05.output +++ /dev/null @@ -1,19 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 3, ..., 224993, 224996, - 224999]), - col_indices=tensor([ 74850, 39094, 146152, ..., 41609, 99963, - 125158]), - values=tensor([ 1.0182, -0.1434, 0.3829, ..., 0.2855, -1.3283, - -0.6398]), size=(150000, 150000), nnz=224999, - layout=torch.sparse_csr) -tensor([0.6530, 0.2357, 0.9419, ..., 0.9448, 0.4899, 0.9528]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 224999 -Density: 9.999955555555555e-06 -Time: 10.407044172286987 seconds - diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_150000_2e-05.json b/pytorch/output_1core_before_test/xeon_4216_10_10_10_150000_2e-05.json deleted file mode 100644 index 20aa929..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_150000_2e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 4604, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 449995, "MATRIX_DENSITY": 1.9999777777777777e-05, "TIME_S": 10.460046529769897, "TIME_S_1KI": 2.271947552078605, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 558.5272074079513, "W": 53.43, "J_1KI": 121.31346815985042, "W_1KI": 11.605125977410946, "W_D": 36.44175, "J_D": 380.941584513545, "W_D_1KI": 7.915236750651608, "J_D_1KI": 1.7192086773787159} diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_150000_2e-05.output b/pytorch/output_1core_before_test/xeon_4216_10_10_10_150000_2e-05.output deleted file mode 100644 index f6ecc63..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_150000_2e-05.output +++ /dev/null @@ -1,19 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 11, ..., 449989, 449993, - 449995]), - col_indices=tensor([ 13961, 111225, 121681, ..., 134762, 92697, - 143241]), - values=tensor([-0.6068, -0.6282, 1.2338, ..., 0.6739, -1.3009, - 0.7767]), size=(150000, 150000), nnz=449995, - layout=torch.sparse_csr) -tensor([0.4093, 0.8662, 0.0271, ..., 0.8036, 0.8782, 0.7840]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 449995 -Density: 1.9999777777777777e-05 -Time: 10.460046529769897 seconds - diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_150000_5e-05.json b/pytorch/output_1core_before_test/xeon_4216_10_10_10_150000_5e-05.json deleted file mode 100644 index 1404736..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_150000_5e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 3309, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 1124971, "MATRIX_DENSITY": 4.999871111111111e-05, "TIME_S": 10.420583724975586, "TIME_S_1KI": 3.149164014800721, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 564.6881503105163, "W": 54.199999999999996, "J_1KI": 170.65220619840323, "W_1KI": 16.37957086733152, "W_D": 36.70025, "J_D": 382.3652451740503, "W_D_1KI": 11.091039588999697, "J_D_1KI": 3.3517798697490777} diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_150000_5e-05.output b/pytorch/output_1core_before_test/xeon_4216_10_10_10_150000_5e-05.output deleted file mode 100644 index fae403f..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_150000_5e-05.output +++ /dev/null @@ -1,19 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 15, ..., 1124952, - 1124965, 1124971]), - col_indices=tensor([ 486, 31110, 48720, ..., 104724, 114560, - 122715]), - values=tensor([-0.0814, -1.0227, 0.0824, ..., -0.7247, 0.3624, - 1.2281]), size=(150000, 150000), nnz=1124971, - layout=torch.sparse_csr) -tensor([0.4716, 0.4879, 0.9069, ..., 0.1650, 0.2158, 0.0027]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 1124971 -Density: 4.999871111111111e-05 -Time: 10.420583724975586 seconds - diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_150000_8e-05.json b/pytorch/output_1core_before_test/xeon_4216_10_10_10_150000_8e-05.json deleted file mode 100644 index a850cac..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_150000_8e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 2262, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [150000, 150000], "MATRIX_ROWS": 150000, "MATRIX_SIZE": 22500000000, "MATRIX_NNZ": 1799954, "MATRIX_DENSITY": 7.999795555555555e-05, "TIME_S": 10.491406202316284, "TIME_S_1KI": 4.638110611103574, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 562.1841441822052, "W": 53.81999999999999, "J_1KI": 248.53410441299965, "W_1KI": 23.79310344827586, "W_D": 36.86899999999999, "J_D": 385.12016372823706, "W_D_1KI": 16.299292661361626, "J_D_1KI": 7.205699673457836} diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_150000_8e-05.output b/pytorch/output_1core_before_test/xeon_4216_10_10_10_150000_8e-05.output deleted file mode 100644 index 3860e6b..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_150000_8e-05.output +++ /dev/null @@ -1,19 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 17, 24, ..., 1799924, - 1799939, 1799954]), - col_indices=tensor([ 14610, 19136, 29214, ..., 136382, 146324, - 149409]), - values=tensor([-0.2315, 3.0364, 0.0444, ..., -0.0371, 0.0904, - -0.1241]), size=(150000, 150000), nnz=1799954, - layout=torch.sparse_csr) -tensor([0.4942, 0.3481, 0.8504, ..., 0.3527, 0.9112, 0.6069]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([150000, 150000]) -Rows: 150000 -Size: 22500000000 -NNZ: 1799954 -Density: 7.999795555555555e-05 -Time: 10.491406202316284 seconds - diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_200000_0.0001.json b/pytorch/output_1core_before_test/xeon_4216_10_10_10_200000_0.0001.json deleted file mode 100644 index 57b41e5..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_200000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 3999806, "MATRIX_DENSITY": 9.999515e-05, "TIME_S": 11.476663827896118, "TIME_S_1KI": 11.476663827896118, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 620.7309038543701, "W": 53.79, "J_1KI": 620.7309038543701, "W_1KI": 53.79, "W_D": 36.455, "J_D": 420.68683956146236, "W_D_1KI": 36.455, "J_D_1KI": 36.455} diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_200000_0.0001.output b/pytorch/output_1core_before_test/xeon_4216_10_10_10_200000_0.0001.output deleted file mode 100644 index c25ccaa..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_200000_0.0001.output +++ /dev/null @@ -1,19 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 19, 43, ..., 3999771, - 3999789, 3999806]), - col_indices=tensor([ 6959, 8867, 19111, ..., 154179, 160638, - 186229]), - values=tensor([-0.4907, 1.2405, -0.7131, ..., 0.6578, 0.2708, - -1.9861]), size=(200000, 200000), nnz=3999806, - layout=torch.sparse_csr) -tensor([0.0492, 0.2725, 0.6463, ..., 0.6373, 0.0160, 0.1028]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 3999806 -Density: 9.999515e-05 -Time: 11.476663827896118 seconds - diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_200000_1e-05.json b/pytorch/output_1core_before_test/xeon_4216_10_10_10_200000_1e-05.json deleted file mode 100644 index 6920104..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_200000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 4408, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 399997, "MATRIX_DENSITY": 9.999925e-06, "TIME_S": 10.44572901725769, "TIME_S_1KI": 2.369720738942307, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 558.8137521886825, "W": 53.339999999999996, "J_1KI": 126.7726298068699, "W_1KI": 12.100725952813066, "W_D": 36.5005, "J_D": 382.3956010829211, "W_D_1KI": 8.28051270417423, "J_D_1KI": 1.8785192160104875} diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_200000_1e-05.output b/pytorch/output_1core_before_test/xeon_4216_10_10_10_200000_1e-05.output deleted file mode 100644 index 82075a7..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_200000_1e-05.output +++ /dev/null @@ -1,19 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 2, ..., 399992, 399995, - 399997]), - col_indices=tensor([ 46773, 109226, 3978, ..., 177327, 107515, - 186616]), - values=tensor([ 0.1343, 0.3740, -0.5352, ..., 1.1270, -0.0206, - -0.1296]), size=(200000, 200000), nnz=399997, - layout=torch.sparse_csr) -tensor([0.4514, 0.6256, 0.0469, ..., 0.9495, 0.3371, 0.2048]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 399997 -Density: 9.999925e-06 -Time: 10.44572901725769 seconds - diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_200000_2e-05.json b/pytorch/output_1core_before_test/xeon_4216_10_10_10_200000_2e-05.json deleted file mode 100644 index 190e417..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_200000_2e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 3132, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 799993, "MATRIX_DENSITY": 1.9999825e-05, "TIME_S": 10.525863409042358, "TIME_S_1KI": 3.360748214892196, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 561.931005191803, "W": 53.67, "J_1KI": 179.41602975472637, "W_1KI": 17.1360153256705, "W_D": 36.60175, "J_D": 383.22448610544205, "W_D_1KI": 11.68638250319285, "J_D_1KI": 3.731284324135648} diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_200000_2e-05.output b/pytorch/output_1core_before_test/xeon_4216_10_10_10_200000_2e-05.output deleted file mode 100644 index 899a345..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_200000_2e-05.output +++ /dev/null @@ -1,18 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 8, ..., 799986, 799990, - 799993]), - col_indices=tensor([69432, 79582, 87430, ..., 10270, 12760, 81192]), - values=tensor([-0.1311, 0.9659, -0.3616, ..., 0.1639, 0.6571, - -1.2217]), size=(200000, 200000), nnz=799993, - layout=torch.sparse_csr) -tensor([0.6434, 0.4090, 0.6675, ..., 0.0404, 0.9010, 0.8857]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 799993 -Density: 1.9999825e-05 -Time: 10.525863409042358 seconds - diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_200000_5e-05.json b/pytorch/output_1core_before_test/xeon_4216_10_10_10_200000_5e-05.json deleted file mode 100644 index 002132c..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_200000_5e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 1651, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 1999950, "MATRIX_DENSITY": 4.999875e-05, "TIME_S": 10.553916454315186, "TIME_S_1KI": 6.392438797283577, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 564.8515831375122, "W": 53.339999999999996, "J_1KI": 342.12694314809943, "W_1KI": 32.30769230769231, "W_D": 36.40625, "J_D": 385.52920788526535, "W_D_1KI": 22.051029678982434, "J_D_1KI": 13.356165765585969} diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_200000_5e-05.output b/pytorch/output_1core_before_test/xeon_4216_10_10_10_200000_5e-05.output deleted file mode 100644 index d22e057..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_200000_5e-05.output +++ /dev/null @@ -1,19 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 8, ..., 1999936, - 1999944, 1999950]), - col_indices=tensor([ 39319, 160273, 4099, ..., 121354, 180545, - 197573]), - values=tensor([ 1.4943, -1.7458, 1.6707, ..., -0.6169, -0.1147, - 0.6789]), size=(200000, 200000), nnz=1999950, - layout=torch.sparse_csr) -tensor([0.2955, 0.8940, 0.6701, ..., 0.8850, 0.8819, 0.4006]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 1999950 -Density: 4.999875e-05 -Time: 10.553916454315186 seconds - diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_200000_8e-05.json b/pytorch/output_1core_before_test/xeon_4216_10_10_10_200000_8e-05.json deleted file mode 100644 index b9579c1..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_200000_8e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 1107, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [200000, 200000], "MATRIX_ROWS": 200000, "MATRIX_SIZE": 40000000000, "MATRIX_NNZ": 3199885, "MATRIX_DENSITY": 7.9997125e-05, "TIME_S": 10.475299596786499, "TIME_S_1KI": 9.462781930249774, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 550.9257331466675, "W": 53.56, "J_1KI": 497.6745556880466, "W_1KI": 48.383017163504974, "W_D": 36.59375, "J_D": 376.4084866940975, "W_D_1KI": 33.056684733514004, "J_D_1KI": 29.86150382431256} diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_200000_8e-05.output b/pytorch/output_1core_before_test/xeon_4216_10_10_10_200000_8e-05.output deleted file mode 100644 index c213ccf..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_200000_8e-05.output +++ /dev/null @@ -1,19 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 15, 39, ..., 3199856, - 3199874, 3199885]), - col_indices=tensor([ 294, 12330, 32695, ..., 112063, 148152, - 177118]), - values=tensor([-1.3043, 0.7104, -0.2564, ..., 0.6246, 2.1306, - -0.1928]), size=(200000, 200000), nnz=3199885, - layout=torch.sparse_csr) -tensor([0.5180, 0.3001, 0.9087, ..., 0.3061, 0.2080, 0.9389]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([200000, 200000]) -Rows: 200000 -Size: 40000000000 -NNZ: 3199885 -Density: 7.9997125e-05 -Time: 10.475299596786499 seconds - diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_20000_0.0001.json b/pytorch/output_1core_before_test/xeon_4216_10_10_10_20000_0.0001.json deleted file mode 100644 index 480d529..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_20000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 44273, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 39999, "MATRIX_DENSITY": 9.99975e-05, "TIME_S": 10.459089040756226, "TIME_S_1KI": 0.23624080231193337, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 557.0045497059822, "W": 52.69, "J_1KI": 12.581134093148922, "W_1KI": 1.190115871976148, "W_D": 35.266, "J_D": 372.80930821657176, "W_D_1KI": 0.7965577214103403, "J_D_1KI": 0.01799195268923137} diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_20000_0.0001.output b/pytorch/output_1core_before_test/xeon_4216_10_10_10_20000_0.0001.output deleted file mode 100644 index 7275a8b..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_20000_0.0001.output +++ /dev/null @@ -1,17 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 3, ..., 39995, 39997, 39999]), - col_indices=tensor([ 7012, 11968, 9053, ..., 6971, 15593, 16542]), - values=tensor([-2.1502, -1.9705, 0.1480, ..., 0.8229, 0.6247, - 0.7831]), size=(20000, 20000), nnz=39999, - layout=torch.sparse_csr) -tensor([0.2005, 0.9207, 0.3531, ..., 0.6340, 0.4714, 0.5186]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 39999 -Density: 9.99975e-05 -Time: 10.459089040756226 seconds - diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_20000_1e-05.json b/pytorch/output_1core_before_test/xeon_4216_10_10_10_20000_1e-05.json deleted file mode 100644 index b787feb..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_20000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 142974, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 4000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.4083993434906, "TIME_S_1KI": 0.07279924562151581, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 541.0924538183212, "W": 52.02, "J_1KI": 3.784551413671865, "W_1KI": 0.3638423769356667, "W_D": 35.168000000000006, "J_D": 365.80429480743413, "W_D_1KI": 0.24597479261963717, "J_D_1KI": 0.0017204162478467214} diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_20000_1e-05.output b/pytorch/output_1core_before_test/xeon_4216_10_10_10_20000_1e-05.output deleted file mode 100644 index d13bc0e..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_20000_1e-05.output +++ /dev/null @@ -1,17 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 4000, 4000, 4000]), - col_indices=tensor([ 5231, 5938, 19459, ..., 13633, 3949, 16414]), - values=tensor([-1.6748, -0.1947, 0.7756, ..., 0.2577, 0.1580, - -0.8287]), size=(20000, 20000), nnz=4000, - layout=torch.sparse_csr) -tensor([0.2630, 0.2472, 0.4027, ..., 0.4730, 0.0575, 0.8195]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 4000 -Density: 1e-05 -Time: 10.4083993434906 seconds - diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_20000_2e-05.json b/pytorch/output_1core_before_test/xeon_4216_10_10_10_20000_2e-05.json deleted file mode 100644 index 89ed33c..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_20000_2e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 92500, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 7999, "MATRIX_DENSITY": 1.99975e-05, "TIME_S": 10.679702520370483, "TIME_S_1KI": 0.11545624346346468, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 553.4974876880646, "W": 52.15, "J_1KI": 5.9837566236547515, "W_1KI": 0.5637837837837838, "W_D": 35.262249999999995, "J_D": 374.25823173975937, "W_D_1KI": 0.38121351351351346, "J_D_1KI": 0.004121227173119065} diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_20000_2e-05.output b/pytorch/output_1core_before_test/xeon_4216_10_10_10_20000_2e-05.output deleted file mode 100644 index 9400515..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_20000_2e-05.output +++ /dev/null @@ -1,17 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 7998, 7999, 7999]), - col_indices=tensor([14192, 2507, 4894, ..., 19237, 17104, 15211]), - values=tensor([-0.4109, -0.3991, 0.2568, ..., -0.0663, 0.8781, - 0.9772]), size=(20000, 20000), nnz=7999, - layout=torch.sparse_csr) -tensor([0.7904, 0.1579, 0.6812, ..., 0.1088, 0.7878, 0.1293]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 7999 -Density: 1.99975e-05 -Time: 10.679702520370483 seconds - diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_20000_5e-05.json b/pytorch/output_1core_before_test/xeon_4216_10_10_10_20000_5e-05.json deleted file mode 100644 index fabd67a..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_20000_5e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 58390, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 20000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.614689350128174, "TIME_S_1KI": 0.18178950762336316, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 549.0197164678574, "W": 52.38, "J_1KI": 9.402632582083532, "W_1KI": 0.8970714163384143, "W_D": 35.41074999999999, "J_D": 371.1569286925196, "W_D_1KI": 0.6064523034766226, "J_D_1KI": 0.010386235716331951} diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_20000_5e-05.output b/pytorch/output_1core_before_test/xeon_4216_10_10_10_20000_5e-05.output deleted file mode 100644 index 35ee076..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_20000_5e-05.output +++ /dev/null @@ -1,17 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 19999, 20000, 20000]), - col_indices=tensor([ 8980, 6120, 19029, ..., 9205, 16103, 19135]), - values=tensor([-0.4641, 0.0109, -1.1563, ..., 0.2696, -0.1796, - 0.1773]), size=(20000, 20000), nnz=20000, - layout=torch.sparse_csr) -tensor([0.9296, 0.2994, 0.2699, ..., 0.3241, 0.6988, 0.7273]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 20000 -Density: 5e-05 -Time: 10.614689350128174 seconds - diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_20000_8e-05.json b/pytorch/output_1core_before_test/xeon_4216_10_10_10_20000_8e-05.json deleted file mode 100644 index fab4885..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_20000_8e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 48997, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [20000, 20000], "MATRIX_ROWS": 20000, "MATRIX_SIZE": 400000000, "MATRIX_NNZ": 31997, "MATRIX_DENSITY": 7.99925e-05, "TIME_S": 10.585016965866089, "TIME_S_1KI": 0.2160339809756942, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 550.5725529026985, "W": 52.47, "J_1KI": 11.23686252020937, "W_1KI": 1.0708818907280038, "W_D": 35.204, "J_D": 369.39882127666476, "W_D_1KI": 0.7184929689572832, "J_D_1KI": 0.014664019612573896} diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_20000_8e-05.output b/pytorch/output_1core_before_test/xeon_4216_10_10_10_20000_8e-05.output deleted file mode 100644 index 0cee4bd..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_20000_8e-05.output +++ /dev/null @@ -1,17 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 31992, 31995, 31997]), - col_indices=tensor([12809, 16448, 4325, ..., 16885, 9633, 16568]), - values=tensor([-0.3122, -1.3985, -0.4105, ..., -1.1355, -0.3244, - 0.4583]), size=(20000, 20000), nnz=31997, - layout=torch.sparse_csr) -tensor([0.1517, 0.9693, 0.8291, ..., 0.9071, 0.8951, 0.7880]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([20000, 20000]) -Rows: 20000 -Size: 400000000 -NNZ: 31997 -Density: 7.99925e-05 -Time: 10.585016965866089 seconds - diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_50000_0.0001.json b/pytorch/output_1core_before_test/xeon_4216_10_10_10_50000_0.0001.json deleted file mode 100644 index 888ad11..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_50000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 11300, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 0.0001, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 249989, "MATRIX_DENSITY": 9.99956e-05, "TIME_S": 10.371392965316772, "TIME_S_1KI": 0.9178223863112188, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 552.8495553016662, "W": 53.55, "J_1KI": 48.9247394072271, "W_1KI": 4.738938053097345, "W_D": 36.521249999999995, "J_D": 377.04494531393044, "W_D_1KI": 3.231969026548672, "J_D_1KI": 0.2860149581016524} diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_50000_0.0001.output b/pytorch/output_1core_before_test/xeon_4216_10_10_10_50000_0.0001.output deleted file mode 100644 index 7ccd09b..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_50000_0.0001.output +++ /dev/null @@ -1,18 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 11, ..., 249981, 249987, - 249989]), - col_indices=tensor([ 7489, 8418, 40706, ..., 22187, 32578, 41156]), - values=tensor([ 0.2037, -0.4745, 1.0653, ..., 0.4878, 1.2850, - 0.4272]), size=(50000, 50000), nnz=249989, - layout=torch.sparse_csr) -tensor([0.5254, 0.8529, 0.0946, ..., 0.9952, 0.1590, 0.6374]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 249989 -Density: 9.99956e-05 -Time: 10.371392965316772 seconds - diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_50000_1e-05.json b/pytorch/output_1core_before_test/xeon_4216_10_10_10_50000_1e-05.json deleted file mode 100644 index 1b47b99..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_50000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 27052, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 1e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.098474740982056, "TIME_S_1KI": 0.37329863747530884, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 534.6541584348679, "W": 52.59, "J_1KI": 19.76394197970087, "W_1KI": 1.9440337128493272, "W_D": 35.43, "J_D": 360.19769601345064, "W_D_1KI": 1.3096998373502884, "J_D_1KI": 0.048414159298768605} diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_50000_1e-05.output b/pytorch/output_1core_before_test/xeon_4216_10_10_10_50000_1e-05.output deleted file mode 100644 index 9721bdc..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_50000_1e-05.output +++ /dev/null @@ -1,17 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 24998, 24999, 25000]), - col_indices=tensor([19630, 49584, 10455, ..., 49313, 43089, 38924]), - values=tensor([ 0.2251, -0.2284, -1.3832, ..., 0.1678, 0.5915, - 0.1257]), size=(50000, 50000), nnz=25000, - layout=torch.sparse_csr) -tensor([0.5666, 0.3218, 0.4428, ..., 0.4071, 0.9754, 0.9097]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 25000 -Density: 1e-05 -Time: 10.098474740982056 seconds - diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_50000_2e-05.json b/pytorch/output_1core_before_test/xeon_4216_10_10_10_50000_2e-05.json deleted file mode 100644 index ad98e21..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_50000_2e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 20543, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 2e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 49998, "MATRIX_DENSITY": 1.99992e-05, "TIME_S": 10.167993307113647, "TIME_S_1KI": 0.49496146167130634, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 535.5479633426665, "W": 52.72999999999999, "J_1KI": 26.069608301741056, "W_1KI": 2.566811079199727, "W_D": 35.55599999999999, "J_D": 361.1216268653869, "W_D_1KI": 1.7308085479238666, "J_D_1KI": 0.08425295954455857} diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_50000_2e-05.output b/pytorch/output_1core_before_test/xeon_4216_10_10_10_50000_2e-05.output deleted file mode 100644 index 653b0b6..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_50000_2e-05.output +++ /dev/null @@ -1,17 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 49996, 49998, 49998]), - col_indices=tensor([ 3791, 35461, 34988, ..., 44069, 11715, 19289]), - values=tensor([-0.0727, 1.0160, -0.4591, ..., -0.3445, -0.2380, - -0.2267]), size=(50000, 50000), nnz=49998, - layout=torch.sparse_csr) -tensor([0.2072, 0.8480, 0.6134, ..., 0.7630, 0.6776, 0.2121]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 49998 -Density: 1.99992e-05 -Time: 10.167993307113647 seconds - diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_50000_5e-05.json b/pytorch/output_1core_before_test/xeon_4216_10_10_10_50000_5e-05.json deleted file mode 100644 index c2575aa..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_50000_5e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 14210, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 5e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 124992, "MATRIX_DENSITY": 4.99968e-05, "TIME_S": 10.279333591461182, "TIME_S_1KI": 0.7233873041140874, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 542.236291847229, "W": 52.790000000000006, "J_1KI": 38.158781973767, "W_1KI": 3.714989444053484, "W_D": 35.93325000000001, "J_D": 369.09096863079077, "W_D_1KI": 2.5287297677691774, "J_D_1KI": 0.1779542412223207} diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_50000_5e-05.output b/pytorch/output_1core_before_test/xeon_4216_10_10_10_50000_5e-05.output deleted file mode 100644 index 485b57f..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_50000_5e-05.output +++ /dev/null @@ -1,18 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 9, ..., 124990, 124991, - 124992]), - col_indices=tensor([ 4851, 8352, 16860, ..., 45746, 14693, 10153]), - values=tensor([-0.9105, 1.3874, 0.6354, ..., 0.2352, -0.8611, - 0.7932]), size=(50000, 50000), nnz=124992, - layout=torch.sparse_csr) -tensor([0.5060, 0.3493, 0.8343, ..., 0.7032, 0.7918, 0.4184]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 124992 -Density: 4.99968e-05 -Time: 10.279333591461182 seconds - diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_50000_8e-05.json b/pytorch/output_1core_before_test/xeon_4216_10_10_10_50000_8e-05.json deleted file mode 100644 index d8184c9..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_50000_8e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 11477, "MATRIX_TYPE": "synthetic", "MATRIX_DENSITY_GROUP": 8e-05, "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 199985, "MATRIX_DENSITY": 7.9994e-05, "TIME_S": 10.294831275939941, "TIME_S_1KI": 0.8969967130730976, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 546.0525859498978, "W": 53.06, "J_1KI": 47.57798953994056, "W_1KI": 4.623159362202666, "W_D": 35.64775, "J_D": 366.8591419298053, "W_D_1KI": 3.106016380587262, "J_D_1KI": 0.2706296402010336} diff --git a/pytorch/output_1core_before_test/xeon_4216_10_10_10_50000_8e-05.output b/pytorch/output_1core_before_test/xeon_4216_10_10_10_50000_8e-05.output deleted file mode 100644 index cc5ade2..0000000 --- a/pytorch/output_1core_before_test/xeon_4216_10_10_10_50000_8e-05.output +++ /dev/null @@ -1,18 +0,0 @@ -/nfshomes/vut/ampere_research/pytorch/spmv.py:62: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 7, ..., 199973, 199976, - 199985]), - col_indices=tensor([25311, 37000, 2121, ..., 38652, 39422, 44717]), - values=tensor([ 0.6524, 0.5157, -0.9218, ..., 1.3667, -0.3124, - -1.4156]), size=(50000, 50000), nnz=199985, - layout=torch.sparse_csr) -tensor([0.8871, 0.1464, 0.6273, ..., 0.9688, 0.2760, 0.6852]) -Matrix Type: synthetic -Matrix: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 199985 -Density: 7.9994e-05 -Time: 10.294831275939941 seconds - diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_005.json b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_005.json new file mode 100644 index 0000000..0da2f0e --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_005.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 12209, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_005", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 70026, "MATRIX_DENSITY": 7.111825976492498e-05, "TIME_S": 21.80563497543335, "TIME_S_1KI": 1.7860295663390409, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 562.0210906982422, "W": 22.458640207331342, "J_1KI": 46.03334349236156, "W_1KI": 1.839515128784613, "W_D": 4.165640207331343, "J_D": 104.24396273183828, "W_D_1KI": 0.3411942179811076, "J_D_1KI": 0.027946123186264854} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_005.output b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_005.output new file mode 100644 index 0000000..053b7bd --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_005.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/as-caida_pruned/as-caida_G_005.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_005", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 70026, "MATRIX_DENSITY": 7.111825976492498e-05, "TIME_S": 1.837904691696167} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 63, 63, ..., 70025, 70025, 70026]), + col_indices=tensor([ 111, 761, 822, ..., 978, 978, 12170]), + values=tensor([4., 3., 3., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=70026, layout=torch.sparse_csr) +tensor([0.8303, 0.7531, 0.1625, ..., 0.4989, 0.4484, 0.5609]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_005 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 70026 +Density: 7.111825976492498e-05 +Time: 1.837904691696167 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 11426 -m matrices/as-caida_pruned/as-caida_G_005.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_005", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 70026, "MATRIX_DENSITY": 7.111825976492498e-05, "TIME_S": 19.65278959274292} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 63, 63, ..., 70025, 70025, 70026]), + col_indices=tensor([ 111, 761, 822, ..., 978, 978, 12170]), + values=tensor([4., 3., 3., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=70026, layout=torch.sparse_csr) +tensor([0.7847, 0.6846, 0.6702, ..., 0.1229, 0.5020, 0.8229]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_005 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 70026 +Density: 7.111825976492498e-05 +Time: 19.65278959274292 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 12209 -m matrices/as-caida_pruned/as-caida_G_005.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_005", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 70026, "MATRIX_DENSITY": 7.111825976492498e-05, "TIME_S": 21.80563497543335} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 63, 63, ..., 70025, 70025, 70026]), + col_indices=tensor([ 111, 761, 822, ..., 978, 978, 12170]), + values=tensor([4., 3., 3., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=70026, layout=torch.sparse_csr) +tensor([0.1222, 0.0255, 0.4770, ..., 0.2934, 0.1339, 0.8386]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_005 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 70026 +Density: 7.111825976492498e-05 +Time: 21.80563497543335 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 63, 63, ..., 70025, 70025, 70026]), + col_indices=tensor([ 111, 761, 822, ..., 978, 978, 12170]), + values=tensor([4., 3., 3., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=70026, layout=torch.sparse_csr) +tensor([0.1222, 0.0255, 0.4770, ..., 0.2934, 0.1339, 0.8386]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_005 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 70026 +Density: 7.111825976492498e-05 +Time: 21.80563497543335 seconds + +[20.68, 20.84, 20.8, 20.8, 20.72, 20.4, 20.36, 20.56, 20.56, 20.72] +[21.16, 21.36, 21.2, 24.8, 25.44, 26.84, 27.52, 25.48, 25.0, 24.24, 24.12, 24.16, 24.48, 24.52, 24.48, 24.68, 24.68, 24.48, 24.32, 24.16, 24.16, 24.24, 24.32, 24.32] +25.024715900421143 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 12209, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_005', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 70026, 'MATRIX_DENSITY': 7.111825976492498e-05, 'TIME_S': 21.80563497543335, 'TIME_S_1KI': 1.7860295663390409, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 562.0210906982422, 'W': 22.458640207331342} +[20.68, 20.84, 20.8, 20.8, 20.72, 20.4, 20.36, 20.56, 20.56, 20.72, 20.32, 20.04, 19.88, 19.96, 19.76, 19.96, 20.0, 20.12, 20.16, 20.16] +365.86 +18.293 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 12209, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_005', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 70026, 'MATRIX_DENSITY': 7.111825976492498e-05, 'TIME_S': 21.80563497543335, 'TIME_S_1KI': 1.7860295663390409, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 562.0210906982422, 'W': 22.458640207331342, 'J_1KI': 46.03334349236156, 'W_1KI': 1.839515128784613, 'W_D': 4.165640207331343, 'J_D': 104.24396273183828, 'W_D_1KI': 0.3411942179811076, 'J_D_1KI': 0.027946123186264854} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_010.json b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_010.json new file mode 100644 index 0000000..2fc09a0 --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_010.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 11376, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_010", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 74994, "MATRIX_DENSITY": 7.616375021864427e-05, "TIME_S": 20.835818767547607, "TIME_S_1KI": 1.8315593150094593, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 566.7462919044493, "W": 22.650686285788208, "J_1KI": 49.81947010411826, "W_1KI": 1.9910940827872898, "W_D": 4.273686285788209, "J_D": 106.93255933499329, "W_D_1KI": 0.375675658033422, "J_D_1KI": 0.03302352830814188} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_010.output b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_010.output new file mode 100644 index 0000000..388d100 --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_010.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/as-caida_pruned/as-caida_G_010.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_010", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 74994, "MATRIX_DENSITY": 7.616375021864427e-05, "TIME_S": 1.845872163772583} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 28, 28, ..., 74993, 74993, 74994]), + col_indices=tensor([ 1040, 2020, 2054, ..., 160, 160, 12170]), + values=tensor([1., 3., 3., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=74994, layout=torch.sparse_csr) +tensor([0.4293, 0.8542, 0.4763, ..., 0.7307, 0.0291, 0.7713]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_010 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 74994 +Density: 7.616375021864427e-05 +Time: 1.845872163772583 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 11376 -m matrices/as-caida_pruned/as-caida_G_010.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_010", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 74994, "MATRIX_DENSITY": 7.616375021864427e-05, "TIME_S": 20.835818767547607} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 28, 28, ..., 74993, 74993, 74994]), + col_indices=tensor([ 1040, 2020, 2054, ..., 160, 160, 12170]), + values=tensor([1., 3., 3., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=74994, layout=torch.sparse_csr) +tensor([0.4258, 0.8800, 0.2129, ..., 0.8885, 0.1727, 0.3221]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_010 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 74994 +Density: 7.616375021864427e-05 +Time: 20.835818767547607 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 28, 28, ..., 74993, 74993, 74994]), + col_indices=tensor([ 1040, 2020, 2054, ..., 160, 160, 12170]), + values=tensor([1., 3., 3., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=74994, layout=torch.sparse_csr) +tensor([0.4258, 0.8800, 0.2129, ..., 0.8885, 0.1727, 0.3221]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_010 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 74994 +Density: 7.616375021864427e-05 +Time: 20.835818767547607 seconds + +[20.52, 20.28, 20.24, 20.36, 20.44, 20.4, 20.4, 20.64, 20.44, 20.36] +[20.2, 20.72, 20.96, 23.56, 25.32, 26.4, 27.0, 27.32, 25.76, 24.92, 24.8, 24.56, 24.56, 24.52, 24.64, 24.44, 24.36, 24.64, 25.0, 25.12, 25.24, 25.16, 24.68, 24.88] +25.021153211593628 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 11376, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_010', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 74994, 'MATRIX_DENSITY': 7.616375021864427e-05, 'TIME_S': 20.835818767547607, 'TIME_S_1KI': 1.8315593150094593, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 566.7462919044493, 'W': 22.650686285788208} +[20.52, 20.28, 20.24, 20.36, 20.44, 20.4, 20.4, 20.64, 20.44, 20.36, 20.32, 20.28, 20.44, 20.28, 20.56, 20.4, 20.4, 20.52, 20.6, 20.52] +367.53999999999996 +18.377 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 11376, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_010', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 74994, 'MATRIX_DENSITY': 7.616375021864427e-05, 'TIME_S': 20.835818767547607, 'TIME_S_1KI': 1.8315593150094593, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 566.7462919044493, 'W': 22.650686285788208, 'J_1KI': 49.81947010411826, 'W_1KI': 1.9910940827872898, 'W_D': 4.273686285788209, 'J_D': 106.93255933499329, 'W_D_1KI': 0.375675658033422, 'J_D_1KI': 0.03302352830814188} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_015.json b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_015.json new file mode 100644 index 0000000..c226993 --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_015.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 11107, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_015", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 77124, "MATRIX_DENSITY": 7.832697378273889e-05, "TIME_S": 20.96405267715454, "TIME_S_1KI": 1.8874631022917567, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 578.1019878292084, "W": 23.091597044985463, "J_1KI": 52.04843682625447, "W_1KI": 2.0790129688471652, "W_D": 4.761597044985461, "J_D": 119.20737710714337, "W_D_1KI": 0.42870235391964173, "J_D_1KI": 0.038597492925150065} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_015.output b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_015.output new file mode 100644 index 0000000..a239af9 --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_015.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/as-caida_pruned/as-caida_G_015.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_015", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 77124, "MATRIX_DENSITY": 7.832697378273889e-05, "TIME_S": 1.890571117401123} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 4, ..., 77124, 77124, 77124]), + col_indices=tensor([1040, 2054, 4842, ..., 160, 160, 8230]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=77124, layout=torch.sparse_csr) +tensor([0.9144, 0.6462, 0.7780, ..., 0.4675, 0.5413, 0.7566]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_015 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 77124 +Density: 7.832697378273889e-05 +Time: 1.890571117401123 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 11107 -m matrices/as-caida_pruned/as-caida_G_015.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_015", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 77124, "MATRIX_DENSITY": 7.832697378273889e-05, "TIME_S": 20.96405267715454} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 4, ..., 77124, 77124, 77124]), + col_indices=tensor([1040, 2054, 4842, ..., 160, 160, 8230]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=77124, layout=torch.sparse_csr) +tensor([0.2503, 0.7657, 0.6477, ..., 0.2679, 0.8591, 0.3889]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_015 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 77124 +Density: 7.832697378273889e-05 +Time: 20.96405267715454 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 4, ..., 77124, 77124, 77124]), + col_indices=tensor([1040, 2054, 4842, ..., 160, 160, 8230]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=77124, layout=torch.sparse_csr) +tensor([0.2503, 0.7657, 0.6477, ..., 0.2679, 0.8591, 0.3889]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_015 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 77124 +Density: 7.832697378273889e-05 +Time: 20.96405267715454 seconds + +[20.44, 20.48, 20.12, 20.2, 20.32, 20.36, 20.36, 20.56, 20.56, 20.64] +[20.64, 20.76, 20.96, 25.32, 27.08, 28.0, 28.84, 26.6, 25.84, 24.76, 24.88, 25.28, 25.24, 25.24, 25.0, 25.24, 25.12, 25.0, 25.12, 25.0, 24.92, 25.08, 25.08, 25.08] +25.03516697883606 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 11107, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_015', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 77124, 'MATRIX_DENSITY': 7.832697378273889e-05, 'TIME_S': 20.96405267715454, 'TIME_S_1KI': 1.8874631022917567, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 578.1019878292084, 'W': 23.091597044985463} +[20.44, 20.48, 20.12, 20.2, 20.32, 20.36, 20.36, 20.56, 20.56, 20.64, 19.92, 19.8, 20.08, 20.16, 20.48, 20.6, 20.64, 20.64, 20.52, 20.44] +366.6 +18.330000000000002 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 11107, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_015', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 77124, 'MATRIX_DENSITY': 7.832697378273889e-05, 'TIME_S': 20.96405267715454, 'TIME_S_1KI': 1.8874631022917567, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 578.1019878292084, 'W': 23.091597044985463, 'J_1KI': 52.04843682625447, 'W_1KI': 2.0790129688471652, 'W_D': 4.761597044985461, 'J_D': 119.20737710714337, 'W_D_1KI': 0.42870235391964173, 'J_D_1KI': 0.038597492925150065} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_020.json b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_020.json new file mode 100644 index 0000000..cc613e9 --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_020.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 10707, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_020", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 80948, "MATRIX_DENSITY": 8.221062021893506e-05, "TIME_S": 20.933836698532104, "TIME_S_1KI": 1.9551542634287946, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 568.068498840332, "W": 22.70139850521993, "J_1KI": 53.05580450549473, "W_1KI": 2.120238956310818, "W_D": 4.667398505219932, "J_D": 116.79465746307376, "W_D_1KI": 0.4359202862818653, "J_D_1KI": 0.040713578619768875} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_020.output b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_020.output new file mode 100644 index 0000000..2f2e63d --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_020.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/as-caida_pruned/as-caida_G_020.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_020", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 80948, "MATRIX_DENSITY": 8.221062021893506e-05, "TIME_S": 2.106640338897705} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 80944, 80946, 80948]), + col_indices=tensor([ 1040, 5699, 106, ..., 31378, 17998, 31377]), + values=tensor([1., 1., 1., ..., 1., 1., 3.]), size=(31379, 31379), + nnz=80948, layout=torch.sparse_csr) +tensor([0.5035, 0.4030, 0.8560, ..., 0.0334, 0.8971, 0.2378]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_020 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 80948 +Density: 8.221062021893506e-05 +Time: 2.106640338897705 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 9968 -m matrices/as-caida_pruned/as-caida_G_020.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_020", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 80948, "MATRIX_DENSITY": 8.221062021893506e-05, "TIME_S": 19.549769401550293} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 80944, 80946, 80948]), + col_indices=tensor([ 1040, 5699, 106, ..., 31378, 17998, 31377]), + values=tensor([1., 1., 1., ..., 1., 1., 3.]), size=(31379, 31379), + nnz=80948, layout=torch.sparse_csr) +tensor([0.4258, 0.1400, 0.0055, ..., 0.5207, 0.0063, 0.5813]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_020 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 80948 +Density: 8.221062021893506e-05 +Time: 19.549769401550293 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 10707 -m matrices/as-caida_pruned/as-caida_G_020.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_020", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 80948, "MATRIX_DENSITY": 8.221062021893506e-05, "TIME_S": 20.933836698532104} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 80944, 80946, 80948]), + col_indices=tensor([ 1040, 5699, 106, ..., 31378, 17998, 31377]), + values=tensor([1., 1., 1., ..., 1., 1., 3.]), size=(31379, 31379), + nnz=80948, layout=torch.sparse_csr) +tensor([0.7953, 0.4027, 0.0650, ..., 0.1770, 0.5959, 0.3923]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_020 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 80948 +Density: 8.221062021893506e-05 +Time: 20.933836698532104 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 80944, 80946, 80948]), + col_indices=tensor([ 1040, 5699, 106, ..., 31378, 17998, 31377]), + values=tensor([1., 1., 1., ..., 1., 1., 3.]), size=(31379, 31379), + nnz=80948, layout=torch.sparse_csr) +tensor([0.7953, 0.4027, 0.0650, ..., 0.1770, 0.5959, 0.3923]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_020 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 80948 +Density: 8.221062021893506e-05 +Time: 20.933836698532104 seconds + +[20.04, 20.2, 20.08, 20.0, 20.08, 19.52, 19.52, 19.64, 19.64, 19.84] +[20.0, 20.24, 20.84, 22.24, 24.28, 25.28, 26.6, 26.24, 26.08, 25.08, 25.28, 25.44, 25.12, 25.12, 25.52, 25.76, 25.4, 25.24, 25.52, 25.04, 24.88, 24.84, 24.92, 25.04] +25.023502349853516 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 10707, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_020', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 80948, 'MATRIX_DENSITY': 8.221062021893506e-05, 'TIME_S': 20.933836698532104, 'TIME_S_1KI': 1.9551542634287946, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 568.068498840332, 'W': 22.70139850521993} +[20.04, 20.2, 20.08, 20.0, 20.08, 19.52, 19.52, 19.64, 19.64, 19.84, 19.84, 19.88, 19.76, 19.92, 20.24, 20.32, 20.64, 20.64, 20.48, 20.52] +360.68 +18.034 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 10707, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_020', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 80948, 'MATRIX_DENSITY': 8.221062021893506e-05, 'TIME_S': 20.933836698532104, 'TIME_S_1KI': 1.9551542634287946, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 568.068498840332, 'W': 22.70139850521993, 'J_1KI': 53.05580450549473, 'W_1KI': 2.120238956310818, 'W_D': 4.667398505219932, 'J_D': 116.79465746307376, 'W_D_1KI': 0.4359202862818653, 'J_D_1KI': 0.040713578619768875} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_025.json b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_025.json new file mode 100644 index 0000000..dc3725c --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_025.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 9916, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_025", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 85850, "MATRIX_DENSITY": 8.718908121010495e-05, "TIME_S": 21.444532871246338, "TIME_S_1KI": 2.162619289153523, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 542.0539724731443, "W": 22.567435567135146, "J_1KI": 54.6645797169367, "W_1KI": 2.275860787327062, "W_D": 4.178435567135143, "J_D": 100.36309137344335, "W_D_1KI": 0.4213831753867631, "J_D_1KI": 0.0424952778728079} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_025.output b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_025.output new file mode 100644 index 0000000..831a4fa --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_025.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/as-caida_pruned/as-caida_G_025.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_025", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 85850, "MATRIX_DENSITY": 8.718908121010495e-05, "TIME_S": 2.117697238922119} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 3, ..., 85845, 85847, 85850]), + col_indices=tensor([ 346, 13811, 21783, ..., 15310, 17998, 31377]), + values=tensor([1., 1., 1., ..., 1., 1., 3.]), size=(31379, 31379), + nnz=85850, layout=torch.sparse_csr) +tensor([0.8957, 0.2028, 0.9403, ..., 0.7663, 0.1318, 0.0065]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_025 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 85850 +Density: 8.718908121010495e-05 +Time: 2.117697238922119 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 9916 -m matrices/as-caida_pruned/as-caida_G_025.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_025", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 85850, "MATRIX_DENSITY": 8.718908121010495e-05, "TIME_S": 21.444532871246338} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 3, ..., 85845, 85847, 85850]), + col_indices=tensor([ 346, 13811, 21783, ..., 15310, 17998, 31377]), + values=tensor([1., 1., 1., ..., 1., 1., 3.]), size=(31379, 31379), + nnz=85850, layout=torch.sparse_csr) +tensor([0.3063, 0.4853, 0.6327, ..., 0.6312, 0.8565, 0.4336]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_025 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 85850 +Density: 8.718908121010495e-05 +Time: 21.444532871246338 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 3, ..., 85845, 85847, 85850]), + col_indices=tensor([ 346, 13811, 21783, ..., 15310, 17998, 31377]), + values=tensor([1., 1., 1., ..., 1., 1., 3.]), size=(31379, 31379), + nnz=85850, layout=torch.sparse_csr) +tensor([0.3063, 0.4853, 0.6327, ..., 0.6312, 0.8565, 0.4336]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_025 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 85850 +Density: 8.718908121010495e-05 +Time: 21.444532871246338 seconds + +[20.36, 20.36, 20.4, 20.8, 20.8, 20.76, 20.72, 20.44, 20.4, 20.36] +[20.28, 20.16, 21.36, 23.64, 25.36, 25.36, 26.76, 27.36, 25.96, 25.8, 24.64, 24.84, 25.08, 24.8, 25.0, 25.0, 24.92, 24.64, 24.76, 24.68, 24.6, 24.44, 24.56] +24.019298553466797 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 9916, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_025', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 85850, 'MATRIX_DENSITY': 8.718908121010495e-05, 'TIME_S': 21.444532871246338, 'TIME_S_1KI': 2.162619289153523, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 542.0539724731443, 'W': 22.567435567135146} +[20.36, 20.36, 20.4, 20.8, 20.8, 20.76, 20.72, 20.44, 20.4, 20.36, 20.44, 20.48, 20.48, 20.48, 20.2, 20.0, 20.2, 20.24, 20.28, 20.32] +367.78000000000003 +18.389000000000003 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 9916, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_025', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 85850, 'MATRIX_DENSITY': 8.718908121010495e-05, 'TIME_S': 21.444532871246338, 'TIME_S_1KI': 2.162619289153523, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 542.0539724731443, 'W': 22.567435567135146, 'J_1KI': 54.6645797169367, 'W_1KI': 2.275860787327062, 'W_D': 4.178435567135143, 'J_D': 100.36309137344335, 'W_D_1KI': 0.4213831753867631, 'J_D_1KI': 0.0424952778728079} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_030.json b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_030.json new file mode 100644 index 0000000..a81eb61 --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_030.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 10039, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_030", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 86850, "MATRIX_DENSITY": 8.820467912752026e-05, "TIME_S": 21.360346794128418, "TIME_S_1KI": 2.1277365070354035, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 598.2952175140381, "W": 23.788110794418536, "J_1KI": 59.59709308835921, "W_1KI": 2.36956975738804, "W_D": 4.762110794418536, "J_D": 119.77193725872041, "W_D_1KI": 0.4743610712639243, "J_D_1KI": 0.047251825008857884} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_030.output b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_030.output new file mode 100644 index 0000000..f886974 --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_030.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/as-caida_pruned/as-caida_G_030.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_030", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 86850, "MATRIX_DENSITY": 8.820467912752026e-05, "TIME_S": 2.0916619300842285} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 86850, 86850, 86850]), + col_indices=tensor([ 1809, 21783, 106, ..., 7018, 160, 882]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=86850, layout=torch.sparse_csr) +tensor([0.1472, 0.3825, 0.3283, ..., 0.1400, 0.8130, 0.7912]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_030 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 86850 +Density: 8.820467912752026e-05 +Time: 2.0916619300842285 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 10039 -m matrices/as-caida_pruned/as-caida_G_030.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_030", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 86850, "MATRIX_DENSITY": 8.820467912752026e-05, "TIME_S": 21.360346794128418} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 86850, 86850, 86850]), + col_indices=tensor([ 1809, 21783, 106, ..., 7018, 160, 882]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=86850, layout=torch.sparse_csr) +tensor([0.4196, 0.6396, 0.6935, ..., 0.0985, 0.8486, 0.7321]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_030 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 86850 +Density: 8.820467912752026e-05 +Time: 21.360346794128418 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 86850, 86850, 86850]), + col_indices=tensor([ 1809, 21783, 106, ..., 7018, 160, 882]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=86850, layout=torch.sparse_csr) +tensor([0.4196, 0.6396, 0.6935, ..., 0.0985, 0.8486, 0.7321]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_030 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 86850 +Density: 8.820467912752026e-05 +Time: 21.360346794128418 seconds + +[20.2, 20.52, 20.2, 20.16, 20.16, 20.32, 20.2, 20.04, 20.28, 20.68] +[21.0, 21.6, 22.24, 26.64, 28.16, 29.0, 29.76, 27.64, 26.2, 25.24, 25.24, 24.96, 25.16, 25.08, 25.44, 25.8, 26.0, 26.08, 25.96, 25.88, 25.96, 26.04, 26.32, 26.72] +25.151018619537354 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 10039, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_030', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 86850, 'MATRIX_DENSITY': 8.820467912752026e-05, 'TIME_S': 21.360346794128418, 'TIME_S_1KI': 2.1277365070354035, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 598.2952175140381, 'W': 23.788110794418536} +[20.2, 20.52, 20.2, 20.16, 20.16, 20.32, 20.2, 20.04, 20.28, 20.68, 23.0, 23.0, 22.84, 22.68, 22.24, 21.76, 21.76, 21.36, 20.76, 20.6] +380.52 +19.026 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 10039, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_030', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 86850, 'MATRIX_DENSITY': 8.820467912752026e-05, 'TIME_S': 21.360346794128418, 'TIME_S_1KI': 2.1277365070354035, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 598.2952175140381, 'W': 23.788110794418536, 'J_1KI': 59.59709308835921, 'W_1KI': 2.36956975738804, 'W_D': 4.762110794418536, 'J_D': 119.77193725872041, 'W_D_1KI': 0.4743610712639243, 'J_D_1KI': 0.047251825008857884} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_035.json b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_035.json new file mode 100644 index 0000000..0c2d9e7 --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_035.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 9677, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_035", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 87560, "MATRIX_DENSITY": 8.892575364888514e-05, "TIME_S": 20.692888498306274, "TIME_S_1KI": 2.1383578069966185, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 539.5648220634461, "W": 22.45116273429364, "J_1KI": 55.75744776929276, "W_1KI": 2.320054018217799, "W_D": 3.9901627342936408, "J_D": 95.89487507677086, "W_D_1KI": 0.4123346837133038, "J_D_1KI": 0.0426097637401368} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_035.output b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_035.output new file mode 100644 index 0000000..6db68b8 --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_035.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/as-caida_pruned/as-caida_G_035.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_035", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 87560, "MATRIX_DENSITY": 8.892575364888514e-05, "TIME_S": 2.1698832511901855} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 87559, 87559, 87560]), + col_indices=tensor([ 1809, 21783, 106, ..., 10144, 882, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=87560, layout=torch.sparse_csr) +tensor([0.2348, 0.6582, 0.5673, ..., 0.6924, 0.1866, 0.1527]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_035 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 87560 +Density: 8.892575364888514e-05 +Time: 2.1698832511901855 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 9677 -m matrices/as-caida_pruned/as-caida_G_035.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_035", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 87560, "MATRIX_DENSITY": 8.892575364888514e-05, "TIME_S": 20.692888498306274} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 87559, 87559, 87560]), + col_indices=tensor([ 1809, 21783, 106, ..., 10144, 882, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=87560, layout=torch.sparse_csr) +tensor([0.8919, 0.0278, 0.6159, ..., 0.5785, 0.0363, 0.3275]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_035 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 87560 +Density: 8.892575364888514e-05 +Time: 20.692888498306274 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 87559, 87559, 87560]), + col_indices=tensor([ 1809, 21783, 106, ..., 10144, 882, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=87560, layout=torch.sparse_csr) +tensor([0.8919, 0.0278, 0.6159, ..., 0.5785, 0.0363, 0.3275]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_035 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 87560 +Density: 8.892575364888514e-05 +Time: 20.692888498306274 seconds + +[20.4, 20.28, 20.4, 20.4, 20.48, 20.48, 20.4, 20.56, 20.52, 20.32] +[20.36, 20.4, 20.44, 23.08, 25.0, 26.08, 27.04, 26.96, 25.56, 24.8, 24.88, 24.84, 24.84, 24.76, 24.8, 24.68, 24.36, 24.72, 24.92, 24.52, 24.84, 24.8, 24.52] +24.032823085784912 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 9677, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_035', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 87560, 'MATRIX_DENSITY': 8.892575364888514e-05, 'TIME_S': 20.692888498306274, 'TIME_S_1KI': 2.1383578069966185, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 539.5648220634461, 'W': 22.45116273429364} +[20.4, 20.28, 20.4, 20.4, 20.48, 20.48, 20.4, 20.56, 20.52, 20.32, 20.48, 20.72, 20.48, 20.32, 20.56, 20.6, 20.72, 20.72, 20.56, 20.84] +369.21999999999997 +18.461 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 9677, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_035', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 87560, 'MATRIX_DENSITY': 8.892575364888514e-05, 'TIME_S': 20.692888498306274, 'TIME_S_1KI': 2.1383578069966185, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 539.5648220634461, 'W': 22.45116273429364, 'J_1KI': 55.75744776929276, 'W_1KI': 2.320054018217799, 'W_D': 3.9901627342936408, 'J_D': 95.89487507677086, 'W_D_1KI': 0.4123346837133038, 'J_D_1KI': 0.0426097637401368} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_040.json b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_040.json new file mode 100644 index 0000000..c731828 --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_040.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 9728, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_040", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 89658, "MATRIX_DENSITY": 9.105647807962247e-05, "TIME_S": 21.267418146133423, "TIME_S_1KI": 2.1862066350877285, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 581.2839068508149, "W": 23.221852596012134, "J_1KI": 59.7536910825262, "W_1KI": 2.3871147816624316, "W_D": 4.965852596012134, "J_D": 124.30404447364818, "W_D_1KI": 0.5104700448203262, "J_D_1KI": 0.05247430559419472} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_040.output b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_040.output new file mode 100644 index 0000000..6f76b33 --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_040.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/as-caida_pruned/as-caida_G_040.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_040", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 89658, "MATRIX_DENSITY": 9.105647807962247e-05, "TIME_S": 2.1586642265319824} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 89657, 89657, 89658]), + col_indices=tensor([ 106, 329, 1040, ..., 10144, 882, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=89658, layout=torch.sparse_csr) +tensor([0.4394, 0.0545, 0.4200, ..., 0.1476, 0.7961, 0.2520]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_040 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 89658 +Density: 9.105647807962247e-05 +Time: 2.1586642265319824 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 9728 -m matrices/as-caida_pruned/as-caida_G_040.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_040", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 89658, "MATRIX_DENSITY": 9.105647807962247e-05, "TIME_S": 21.267418146133423} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 89657, 89657, 89658]), + col_indices=tensor([ 106, 329, 1040, ..., 10144, 882, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=89658, layout=torch.sparse_csr) +tensor([0.5888, 0.7637, 0.7439, ..., 0.7386, 0.5844, 0.5965]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_040 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 89658 +Density: 9.105647807962247e-05 +Time: 21.267418146133423 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 89657, 89657, 89658]), + col_indices=tensor([ 106, 329, 1040, ..., 10144, 882, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=89658, layout=torch.sparse_csr) +tensor([0.5888, 0.7637, 0.7439, ..., 0.7386, 0.5844, 0.5965]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_040 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 89658 +Density: 9.105647807962247e-05 +Time: 21.267418146133423 seconds + +[20.28, 20.24, 20.12, 20.52, 20.52, 20.64, 20.64, 20.52, 20.64, 20.36] +[20.4, 20.44, 21.0, 25.16, 27.08, 28.08, 29.24, 26.64, 26.24, 26.24, 25.4, 25.76, 26.08, 26.12, 25.92, 25.84, 25.28, 24.92, 24.8, 24.52, 24.48, 24.4, 24.32, 24.68] +25.031762838363647 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 9728, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_040', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 89658, 'MATRIX_DENSITY': 9.105647807962247e-05, 'TIME_S': 21.267418146133423, 'TIME_S_1KI': 2.1862066350877285, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 581.2839068508149, 'W': 23.221852596012134} +[20.28, 20.24, 20.12, 20.52, 20.52, 20.64, 20.64, 20.52, 20.64, 20.36, 19.76, 19.92, 19.88, 20.04, 20.2, 20.2, 20.12, 20.28, 20.28, 20.32] +365.12 +18.256 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 9728, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_040', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 89658, 'MATRIX_DENSITY': 9.105647807962247e-05, 'TIME_S': 21.267418146133423, 'TIME_S_1KI': 2.1862066350877285, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 581.2839068508149, 'W': 23.221852596012134, 'J_1KI': 59.7536910825262, 'W_1KI': 2.3871147816624316, 'W_D': 4.965852596012134, 'J_D': 124.30404447364818, 'W_D_1KI': 0.5104700448203262, 'J_D_1KI': 0.05247430559419472} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_045.json b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_045.json new file mode 100644 index 0000000..828d7d6 --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_045.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 9625, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_045", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 89152, "MATRIX_DENSITY": 9.054258553341032e-05, "TIME_S": 20.659363985061646, "TIME_S_1KI": 2.146427427019392, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 568.0362667846679, "W": 22.719405314768107, "J_1KI": 59.01675499061484, "W_1KI": 2.3604576950408425, "W_D": 4.469405314768107, "J_D": 111.74519203186027, "W_D_1KI": 0.46435379893694617, "J_D_1KI": 0.0482445505389035} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_045.output b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_045.output new file mode 100644 index 0000000..73f02ab --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_045.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/as-caida_pruned/as-caida_G_045.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_045", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 89152, "MATRIX_DENSITY": 9.054258553341032e-05, "TIME_S": 2.181617021560669} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 89150, 89150, 89152]), + col_indices=tensor([ 106, 329, 1040, ..., 160, 2232, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=89152, layout=torch.sparse_csr) +tensor([0.0120, 0.7096, 0.1498, ..., 0.6953, 0.2603, 0.3548]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_045 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 89152 +Density: 9.054258553341032e-05 +Time: 2.181617021560669 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 9625 -m matrices/as-caida_pruned/as-caida_G_045.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_045", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 89152, "MATRIX_DENSITY": 9.054258553341032e-05, "TIME_S": 20.659363985061646} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 89150, 89150, 89152]), + col_indices=tensor([ 106, 329, 1040, ..., 160, 2232, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=89152, layout=torch.sparse_csr) +tensor([0.9537, 0.8976, 0.1574, ..., 0.4725, 0.7538, 0.1084]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_045 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 89152 +Density: 9.054258553341032e-05 +Time: 20.659363985061646 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 89150, 89150, 89152]), + col_indices=tensor([ 106, 329, 1040, ..., 160, 2232, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=89152, layout=torch.sparse_csr) +tensor([0.9537, 0.8976, 0.1574, ..., 0.4725, 0.7538, 0.1084]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_045 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 89152 +Density: 9.054258553341032e-05 +Time: 20.659363985061646 seconds + +[20.36, 20.04, 20.24, 20.68, 20.88, 20.64, 20.72, 20.72, 20.52, 20.4] +[20.4, 20.52, 20.52, 21.52, 23.0, 24.28, 25.52, 26.04, 26.08, 25.56, 25.52, 25.4, 25.4, 25.56, 25.56, 25.36, 25.68, 25.44, 25.88, 26.08, 25.52, 25.56, 25.28, 25.0] +25.00225067138672 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 9625, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_045', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 89152, 'MATRIX_DENSITY': 9.054258553341032e-05, 'TIME_S': 20.659363985061646, 'TIME_S_1KI': 2.146427427019392, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 568.0362667846679, 'W': 22.719405314768107} +[20.36, 20.04, 20.24, 20.68, 20.88, 20.64, 20.72, 20.72, 20.52, 20.4, 20.0, 20.12, 20.0, 19.84, 20.12, 20.08, 19.96, 20.04, 20.04, 19.96] +365.0 +18.25 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 9625, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_045', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 89152, 'MATRIX_DENSITY': 9.054258553341032e-05, 'TIME_S': 20.659363985061646, 'TIME_S_1KI': 2.146427427019392, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 568.0362667846679, 'W': 22.719405314768107, 'J_1KI': 59.01675499061484, 'W_1KI': 2.3604576950408425, 'W_D': 4.469405314768107, 'J_D': 111.74519203186027, 'W_D_1KI': 0.46435379893694617, 'J_D_1KI': 0.0482445505389035} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_050.json b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_050.json new file mode 100644 index 0000000..d71d4cf --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_050.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 9615, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_050", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 90392, "MATRIX_DENSITY": 9.180192695100532e-05, "TIME_S": 21.49118208885193, "TIME_S_1KI": 2.2351723441343663, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 630.6274053192137, "W": 23.169251179886952, "J_1KI": 65.58787366814495, "W_1KI": 2.409698510648669, "W_D": 4.920251179886954, "J_D": 133.920825105667, "W_D_1KI": 0.511726591771914, "J_D_1KI": 0.05322169441205554} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_050.output b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_050.output new file mode 100644 index 0000000..a04680d --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_050.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/as-caida_pruned/as-caida_G_050.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_050", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 90392, "MATRIX_DENSITY": 9.180192695100532e-05, "TIME_S": 2.1840126514434814} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 90390, 90390, 90392]), + col_indices=tensor([ 5326, 106, 329, ..., 882, 2232, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=90392, layout=torch.sparse_csr) +tensor([0.5846, 0.6889, 0.6290, ..., 0.6852, 0.2836, 0.7780]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_050 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 90392 +Density: 9.180192695100532e-05 +Time: 2.1840126514434814 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 9615 -m matrices/as-caida_pruned/as-caida_G_050.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_050", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 90392, "MATRIX_DENSITY": 9.180192695100532e-05, "TIME_S": 21.49118208885193} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 90390, 90390, 90392]), + col_indices=tensor([ 5326, 106, 329, ..., 882, 2232, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=90392, layout=torch.sparse_csr) +tensor([0.8292, 0.2852, 0.2282, ..., 0.9402, 0.7149, 0.5530]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_050 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 90392 +Density: 9.180192695100532e-05 +Time: 21.49118208885193 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 90390, 90390, 90392]), + col_indices=tensor([ 5326, 106, 329, ..., 882, 2232, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=90392, layout=torch.sparse_csr) +tensor([0.8292, 0.2852, 0.2282, ..., 0.9402, 0.7149, 0.5530]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_050 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 90392 +Density: 9.180192695100532e-05 +Time: 21.49118208885193 seconds + +[20.2, 20.2, 20.04, 19.92, 19.88, 19.88, 20.2, 20.32, 20.44, 20.48] +[20.36, 20.28, 20.52, 25.32, 27.12, 28.16, 29.12, 26.64, 25.76, 25.36, 25.32, 25.52, 25.52, 25.4, 25.48, 25.0, 24.84, 24.64, 24.52, 24.44, 24.6, 24.56, 24.76, 24.92, 24.92, 24.68] +27.218290328979492 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 9615, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_050', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 90392, 'MATRIX_DENSITY': 9.180192695100532e-05, 'TIME_S': 21.49118208885193, 'TIME_S_1KI': 2.2351723441343663, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 630.6274053192137, 'W': 23.169251179886952} +[20.2, 20.2, 20.04, 19.92, 19.88, 19.88, 20.2, 20.32, 20.44, 20.48, 20.32, 20.24, 20.32, 20.64, 20.68, 20.52, 20.52, 20.4, 20.24, 20.08] +364.97999999999996 +18.249 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 9615, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_050', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 90392, 'MATRIX_DENSITY': 9.180192695100532e-05, 'TIME_S': 21.49118208885193, 'TIME_S_1KI': 2.2351723441343663, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 630.6274053192137, 'W': 23.169251179886952, 'J_1KI': 65.58787366814495, 'W_1KI': 2.409698510648669, 'W_D': 4.920251179886954, 'J_D': 133.920825105667, 'W_D_1KI': 0.511726591771914, 'J_D_1KI': 0.05322169441205554} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_055.json b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_055.json new file mode 100644 index 0000000..1a05f46 --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_055.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 9226, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_055", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 91476, "MATRIX_DENSITY": 9.290283509348351e-05, "TIME_S": 20.403888463974, "TIME_S_1KI": 2.211563891607847, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 558.9223634243011, "W": 22.333919636636303, "J_1KI": 60.58122300285076, "W_1KI": 2.420758685956677, "W_D": 4.057919636636303, "J_D": 101.55235045146938, "W_D_1KI": 0.4398352088268267, "J_D_1KI": 0.047673445569783944} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_055.output b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_055.output new file mode 100644 index 0000000..ba5d108 --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_055.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/as-caida_pruned/as-caida_G_055.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_055", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 91476, "MATRIX_DENSITY": 9.290283509348351e-05, "TIME_S": 2.275966167449951} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 91475, 91475, 91476]), + col_indices=tensor([21783, 106, 329, ..., 160, 882, 17255]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=91476, layout=torch.sparse_csr) +tensor([0.0642, 0.4867, 0.9075, ..., 0.7051, 0.9255, 0.2623]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_055 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 91476 +Density: 9.290283509348351e-05 +Time: 2.275966167449951 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 9226 -m matrices/as-caida_pruned/as-caida_G_055.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_055", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 91476, "MATRIX_DENSITY": 9.290283509348351e-05, "TIME_S": 20.403888463974} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 91475, 91475, 91476]), + col_indices=tensor([21783, 106, 329, ..., 160, 882, 17255]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=91476, layout=torch.sparse_csr) +tensor([0.9580, 0.2631, 0.3577, ..., 0.1983, 0.0447, 0.1835]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_055 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 91476 +Density: 9.290283509348351e-05 +Time: 20.403888463974 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 91475, 91475, 91476]), + col_indices=tensor([21783, 106, 329, ..., 160, 882, 17255]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=91476, layout=torch.sparse_csr) +tensor([0.9580, 0.2631, 0.3577, ..., 0.1983, 0.0447, 0.1835]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_055 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 91476 +Density: 9.290283509348351e-05 +Time: 20.403888463974 seconds + +[20.28, 20.2, 20.24, 20.2, 20.24, 20.28, 20.48, 20.16, 20.04, 20.08] +[20.16, 20.08, 20.8, 23.28, 23.28, 24.84, 26.2, 27.0, 25.88, 25.72, 24.8, 24.48, 24.04, 23.88, 23.88, 23.96, 24.12, 24.72, 25.2, 25.28, 25.28, 25.04, 24.36, 24.2] +25.02571749687195 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 9226, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_055', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 91476, 'MATRIX_DENSITY': 9.290283509348351e-05, 'TIME_S': 20.403888463974, 'TIME_S_1KI': 2.211563891607847, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 558.9223634243011, 'W': 22.333919636636303} +[20.28, 20.2, 20.24, 20.2, 20.24, 20.28, 20.48, 20.16, 20.04, 20.08, 20.4, 20.4, 20.4, 20.32, 20.36, 20.52, 20.56, 20.36, 20.28, 20.2] +365.52 +18.276 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 9226, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_055', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 91476, 'MATRIX_DENSITY': 9.290283509348351e-05, 'TIME_S': 20.403888463974, 'TIME_S_1KI': 2.211563891607847, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 558.9223634243011, 'W': 22.333919636636303, 'J_1KI': 60.58122300285076, 'W_1KI': 2.420758685956677, 'W_D': 4.057919636636303, 'J_D': 101.55235045146938, 'W_D_1KI': 0.4398352088268267, 'J_D_1KI': 0.047673445569783944} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_060.json b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_060.json new file mode 100644 index 0000000..7ced902 --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_060.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 9190, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_060", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 94180, "MATRIX_DENSITY": 9.564901186217454e-05, "TIME_S": 22.11389923095703, "TIME_S_1KI": 2.4063002427591984, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 598.2260414505005, "W": 23.945159224491736, "J_1KI": 65.09532551147993, "W_1KI": 2.6055668361797317, "W_D": 5.778159224491734, "J_D": 144.3567481565475, "W_D_1KI": 0.6287442028826696, "J_D_1KI": 0.0684161265378313} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_060.output b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_060.output new file mode 100644 index 0000000..2fa1720 --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_060.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/as-caida_pruned/as-caida_G_060.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_060", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 94180, "MATRIX_DENSITY": 9.564901186217454e-05, "TIME_S": 2.2849113941192627} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 94180, 94180, 94180]), + col_indices=tensor([ 106, 329, 1040, ..., 155, 160, 882]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=94180, layout=torch.sparse_csr) +tensor([0.4093, 0.2306, 0.7311, ..., 0.8999, 0.5614, 0.4734]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_060 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 94180 +Density: 9.564901186217454e-05 +Time: 2.2849113941192627 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 9190 -m matrices/as-caida_pruned/as-caida_G_060.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_060", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 94180, "MATRIX_DENSITY": 9.564901186217454e-05, "TIME_S": 22.11389923095703} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 94180, 94180, 94180]), + col_indices=tensor([ 106, 329, 1040, ..., 155, 160, 882]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=94180, layout=torch.sparse_csr) +tensor([0.0742, 0.7844, 0.8563, ..., 0.6817, 0.1643, 0.1471]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_060 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 94180 +Density: 9.564901186217454e-05 +Time: 22.11389923095703 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 94180, 94180, 94180]), + col_indices=tensor([ 106, 329, 1040, ..., 155, 160, 882]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=94180, layout=torch.sparse_csr) +tensor([0.0742, 0.7844, 0.8563, ..., 0.6817, 0.1643, 0.1471]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_060 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 94180 +Density: 9.564901186217454e-05 +Time: 22.11389923095703 seconds + +[19.64, 19.64, 19.44, 19.64, 19.72, 20.08, 20.72, 20.48, 20.6, 20.52] +[20.44, 20.08, 23.84, 25.08, 25.08, 27.28, 28.2, 29.0, 25.68, 24.56, 24.44, 24.8, 25.08, 24.96, 25.16, 24.88, 24.48, 24.76, 24.88, 24.88, 25.04, 24.76, 24.6, 24.6] +24.98317241668701 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 9190, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_060', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 94180, 'MATRIX_DENSITY': 9.564901186217454e-05, 'TIME_S': 22.11389923095703, 'TIME_S_1KI': 2.4063002427591984, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 598.2260414505005, 'W': 23.945159224491736} +[19.64, 19.64, 19.44, 19.64, 19.72, 20.08, 20.72, 20.48, 20.6, 20.52, 20.52, 20.52, 20.36, 20.16, 20.2, 20.04, 20.24, 20.48, 20.44, 20.48] +363.34000000000003 +18.167 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 9190, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_060', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 94180, 'MATRIX_DENSITY': 9.564901186217454e-05, 'TIME_S': 22.11389923095703, 'TIME_S_1KI': 2.4063002427591984, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 598.2260414505005, 'W': 23.945159224491736, 'J_1KI': 65.09532551147993, 'W_1KI': 2.6055668361797317, 'W_D': 5.778159224491734, 'J_D': 144.3567481565475, 'W_D_1KI': 0.6287442028826696, 'J_D_1KI': 0.0684161265378313} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_065.json b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_065.json new file mode 100644 index 0000000..36792d7 --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_065.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 9076, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_065", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 95068, "MATRIX_DENSITY": 9.655086281283934e-05, "TIME_S": 21.2085063457489, "TIME_S_1KI": 2.3367679975483586, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 558.9401651763917, "W": 22.28813113100922, "J_1KI": 61.58441661264783, "W_1KI": 2.4557218081764236, "W_D": 3.981131131009221, "J_D": 99.8385229732992, "W_D_1KI": 0.4386438002434135, "J_D_1KI": 0.04833007935692084} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_065.output b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_065.output new file mode 100644 index 0000000..b086af1 --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_065.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/as-caida_pruned/as-caida_G_065.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_065", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 95068, "MATRIX_DENSITY": 9.655086281283934e-05, "TIME_S": 2.313544511795044} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 95068, 95068, 95068]), + col_indices=tensor([ 106, 329, 1040, ..., 155, 160, 882]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=95068, layout=torch.sparse_csr) +tensor([0.6829, 0.7244, 0.5394, ..., 0.7222, 0.3310, 0.4980]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_065 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 95068 +Density: 9.655086281283934e-05 +Time: 2.313544511795044 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 9076 -m matrices/as-caida_pruned/as-caida_G_065.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_065", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 95068, "MATRIX_DENSITY": 9.655086281283934e-05, "TIME_S": 21.2085063457489} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 95068, 95068, 95068]), + col_indices=tensor([ 106, 329, 1040, ..., 155, 160, 882]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=95068, layout=torch.sparse_csr) +tensor([0.9701, 0.1991, 0.8018, ..., 0.2601, 0.1408, 0.4092]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_065 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 95068 +Density: 9.655086281283934e-05 +Time: 21.2085063457489 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 95068, 95068, 95068]), + col_indices=tensor([ 106, 329, 1040, ..., 155, 160, 882]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=95068, layout=torch.sparse_csr) +tensor([0.9701, 0.1991, 0.8018, ..., 0.2601, 0.1408, 0.4092]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_065 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 95068 +Density: 9.655086281283934e-05 +Time: 21.2085063457489 seconds + +[20.88, 20.56, 20.44, 20.24, 20.16, 20.12, 20.12, 20.2, 20.08, 20.0] +[20.04, 20.04, 20.8, 22.2, 22.2, 23.72, 24.72, 25.56, 25.36, 25.68, 24.8, 25.12, 25.16, 25.12, 25.08, 24.64, 24.84, 24.72, 24.96, 24.88, 25.16, 25.04, 24.88, 24.64] +25.0779287815094 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 9076, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_065', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 95068, 'MATRIX_DENSITY': 9.655086281283934e-05, 'TIME_S': 21.2085063457489, 'TIME_S_1KI': 2.3367679975483586, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 558.9401651763917, 'W': 22.28813113100922} +[20.88, 20.56, 20.44, 20.24, 20.16, 20.12, 20.12, 20.2, 20.08, 20.0, 20.2, 20.12, 20.12, 20.16, 20.0, 20.56, 20.6, 20.92, 20.92, 20.56] +366.14 +18.307 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 9076, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_065', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 95068, 'MATRIX_DENSITY': 9.655086281283934e-05, 'TIME_S': 21.2085063457489, 'TIME_S_1KI': 2.3367679975483586, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 558.9401651763917, 'W': 22.28813113100922, 'J_1KI': 61.58441661264783, 'W_1KI': 2.4557218081764236, 'W_D': 3.981131131009221, 'J_D': 99.8385229732992, 'W_D_1KI': 0.4386438002434135, 'J_D_1KI': 0.04833007935692084} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_070.json b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_070.json new file mode 100644 index 0000000..9101a31 --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_070.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 10809, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_070", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 78684, "MATRIX_DENSITY": 7.991130653390679e-05, "TIME_S": 21.410206079483032, "TIME_S_1KI": 1.9807758423057664, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 558.7609119701385, "W": 22.242561405084228, "J_1KI": 51.69404310945865, "W_1KI": 2.0577816083896967, "W_D": 3.862561405084225, "J_D": 97.03236484050737, "W_D_1KI": 0.3573467855568716, "J_D_1KI": 0.033060115233312204} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_070.output b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_070.output new file mode 100644 index 0000000..43316d1 --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_070.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/as-caida_pruned/as-caida_G_070.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_070", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 78684, "MATRIX_DENSITY": 7.991130653390679e-05, "TIME_S": 2.0396475791931152} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 78684, 78684, 78684]), + col_indices=tensor([ 106, 329, 1040, ..., 16263, 2242, 2242]), + values=tensor([1., 1., 1., ..., 3., 1., 1.]), size=(31379, 31379), + nnz=78684, layout=torch.sparse_csr) +tensor([0.6537, 0.6274, 0.7145, ..., 0.4928, 0.2502, 0.7369]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_070 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 78684 +Density: 7.991130653390679e-05 +Time: 2.0396475791931152 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 10295 -m matrices/as-caida_pruned/as-caida_G_070.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_070", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 78684, "MATRIX_DENSITY": 7.991130653390679e-05, "TIME_S": 19.999547004699707} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 78684, 78684, 78684]), + col_indices=tensor([ 106, 329, 1040, ..., 16263, 2242, 2242]), + values=tensor([1., 1., 1., ..., 3., 1., 1.]), size=(31379, 31379), + nnz=78684, layout=torch.sparse_csr) +tensor([0.0183, 0.3015, 0.8882, ..., 0.3770, 0.5818, 0.6311]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_070 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 78684 +Density: 7.991130653390679e-05 +Time: 19.999547004699707 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 10809 -m matrices/as-caida_pruned/as-caida_G_070.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_070", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 78684, "MATRIX_DENSITY": 7.991130653390679e-05, "TIME_S": 21.410206079483032} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 78684, 78684, 78684]), + col_indices=tensor([ 106, 329, 1040, ..., 16263, 2242, 2242]), + values=tensor([1., 1., 1., ..., 3., 1., 1.]), size=(31379, 31379), + nnz=78684, layout=torch.sparse_csr) +tensor([0.8003, 0.6599, 0.2526, ..., 0.8562, 0.7490, 0.1496]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_070 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 78684 +Density: 7.991130653390679e-05 +Time: 21.410206079483032 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 78684, 78684, 78684]), + col_indices=tensor([ 106, 329, 1040, ..., 16263, 2242, 2242]), + values=tensor([1., 1., 1., ..., 3., 1., 1.]), size=(31379, 31379), + nnz=78684, layout=torch.sparse_csr) +tensor([0.8003, 0.6599, 0.2526, ..., 0.8562, 0.7490, 0.1496]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_070 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 78684 +Density: 7.991130653390679e-05 +Time: 21.410206079483032 seconds + +[20.52, 20.36, 20.12, 20.24, 20.32, 20.56, 20.68, 20.52, 20.52, 20.44] +[20.36, 20.28, 20.52, 21.48, 23.52, 24.52, 25.28, 25.6, 25.68, 24.84, 24.76, 25.04, 24.96, 24.68, 24.68, 24.56, 24.76, 24.6, 24.6, 24.52, 24.76, 24.56, 24.8, 25.08] +25.121248483657837 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 10809, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_070', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 78684, 'MATRIX_DENSITY': 7.991130653390679e-05, 'TIME_S': 21.410206079483032, 'TIME_S_1KI': 1.9807758423057664, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 558.7609119701385, 'W': 22.242561405084228} +[20.52, 20.36, 20.12, 20.24, 20.32, 20.56, 20.68, 20.52, 20.52, 20.44, 20.04, 19.8, 19.96, 20.16, 20.32, 20.48, 20.92, 20.96, 20.84, 20.68] +367.6 +18.380000000000003 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 10809, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_070', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 78684, 'MATRIX_DENSITY': 7.991130653390679e-05, 'TIME_S': 21.410206079483032, 'TIME_S_1KI': 1.9807758423057664, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 558.7609119701385, 'W': 22.242561405084228, 'J_1KI': 51.69404310945865, 'W_1KI': 2.0577816083896967, 'W_D': 3.862561405084225, 'J_D': 97.03236484050737, 'W_D_1KI': 0.3573467855568716, 'J_D_1KI': 0.033060115233312204} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_075.json b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_075.json new file mode 100644 index 0000000..08b5075 --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_075.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 8590, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_075", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 97492, "MATRIX_DENSITY": 9.901267216465406e-05, "TIME_S": 21.520405054092407, "TIME_S_1KI": 2.5052858037360193, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 557.3124726676941, "W": 23.227243177062086, "J_1KI": 64.87921684140794, "W_1KI": 2.7039864001236418, "W_D": 4.781243177062088, "J_D": 114.72073707246791, "W_D_1KI": 0.556605724919917, "J_D_1KI": 0.06479694120138732} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_075.output b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_075.output new file mode 100644 index 0000000..7940b83 --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_075.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/as-caida_pruned/as-caida_G_075.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_075", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 97492, "MATRIX_DENSITY": 9.901267216465406e-05, "TIME_S": 2.4445319175720215} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 97491, 97491, 97492]), + col_indices=tensor([22754, 22754, 106, ..., 160, 882, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=97492, layout=torch.sparse_csr) +tensor([0.2885, 0.7608, 0.9904, ..., 0.0417, 0.9009, 0.3121]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_075 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 97492 +Density: 9.901267216465406e-05 +Time: 2.4445319175720215 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 8590 -m matrices/as-caida_pruned/as-caida_G_075.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_075", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 97492, "MATRIX_DENSITY": 9.901267216465406e-05, "TIME_S": 21.520405054092407} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 97491, 97491, 97492]), + col_indices=tensor([22754, 22754, 106, ..., 160, 882, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=97492, layout=torch.sparse_csr) +tensor([0.2855, 0.9833, 0.7956, ..., 0.4046, 0.0085, 0.0752]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_075 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 97492 +Density: 9.901267216465406e-05 +Time: 21.520405054092407 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 97491, 97491, 97492]), + col_indices=tensor([22754, 22754, 106, ..., 160, 882, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=97492, layout=torch.sparse_csr) +tensor([0.2855, 0.9833, 0.7956, ..., 0.4046, 0.0085, 0.0752]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_075 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 97492 +Density: 9.901267216465406e-05 +Time: 21.520405054092407 seconds + +[20.12, 20.04, 20.32, 20.32, 20.52, 20.56, 20.76, 20.36, 20.16, 20.12] +[20.04, 20.08, 20.48, 21.64, 23.44, 24.24, 25.32, 25.64, 25.64, 25.56, 24.48, 24.52, 24.4, 24.68, 24.56, 24.52, 25.08, 25.44, 25.72, 25.56, 25.32, 24.76, 24.24] +23.993913888931274 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 8590, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_075', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 97492, 'MATRIX_DENSITY': 9.901267216465406e-05, 'TIME_S': 21.520405054092407, 'TIME_S_1KI': 2.5052858037360193, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 557.3124726676941, 'W': 23.227243177062086} +[20.12, 20.04, 20.32, 20.32, 20.52, 20.56, 20.76, 20.36, 20.16, 20.12, 20.4, 20.6, 20.76, 21.0, 20.88, 20.88, 20.56, 20.48, 20.28, 20.24] +368.91999999999996 +18.445999999999998 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 8590, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_075', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 97492, 'MATRIX_DENSITY': 9.901267216465406e-05, 'TIME_S': 21.520405054092407, 'TIME_S_1KI': 2.5052858037360193, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 557.3124726676941, 'W': 23.227243177062086, 'J_1KI': 64.87921684140794, 'W_1KI': 2.7039864001236418, 'W_D': 4.781243177062088, 'J_D': 114.72073707246791, 'W_D_1KI': 0.556605724919917, 'J_D_1KI': 0.06479694120138732} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_080.json b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_080.json new file mode 100644 index 0000000..5391a13 --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_080.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 8701, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_080", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 98112, "MATRIX_DENSITY": 9.964234287345156e-05, "TIME_S": 20.515164136886597, "TIME_S_1KI": 2.35779383253495, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 563.6000692367552, "W": 23.504286243308524, "J_1KI": 64.77417184654122, "W_1KI": 2.7013315990470663, "W_D": 5.150286243308525, "J_D": 123.49669559288012, "W_D_1KI": 0.5919188878644437, "J_D_1KI": 0.06802883437127269} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_080.output b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_080.output new file mode 100644 index 0000000..b82f527 --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_080.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/as-caida_pruned/as-caida_G_080.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_080", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 98112, "MATRIX_DENSITY": 9.964234287345156e-05, "TIME_S": 2.413437604904175} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 98111, 98111, 98112]), + col_indices=tensor([22754, 22754, 106, ..., 4133, 31329, 12170]), + values=tensor([1., 1., 1., ..., 3., 3., 1.]), size=(31379, 31379), + nnz=98112, layout=torch.sparse_csr) +tensor([0.1789, 0.0870, 0.6029, ..., 0.0215, 0.5319, 0.9087]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_080 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 98112 +Density: 9.964234287345156e-05 +Time: 2.413437604904175 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 8701 -m matrices/as-caida_pruned/as-caida_G_080.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_080", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 98112, "MATRIX_DENSITY": 9.964234287345156e-05, "TIME_S": 20.515164136886597} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 98111, 98111, 98112]), + col_indices=tensor([22754, 22754, 106, ..., 4133, 31329, 12170]), + values=tensor([1., 1., 1., ..., 3., 3., 1.]), size=(31379, 31379), + nnz=98112, layout=torch.sparse_csr) +tensor([0.5721, 0.2403, 0.5330, ..., 0.6070, 0.5052, 0.6222]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_080 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 98112 +Density: 9.964234287345156e-05 +Time: 20.515164136886597 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 98111, 98111, 98112]), + col_indices=tensor([22754, 22754, 106, ..., 4133, 31329, 12170]), + values=tensor([1., 1., 1., ..., 3., 3., 1.]), size=(31379, 31379), + nnz=98112, layout=torch.sparse_csr) +tensor([0.5721, 0.2403, 0.5330, ..., 0.6070, 0.5052, 0.6222]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_080 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 98112 +Density: 9.964234287345156e-05 +Time: 20.515164136886597 seconds + +[19.92, 20.04, 20.28, 20.36, 20.24, 20.12, 20.04, 20.08, 20.08, 20.2] +[20.24, 20.36, 20.56, 22.32, 23.32, 25.24, 26.08, 26.16, 25.6, 24.68, 24.88, 25.0, 25.48, 25.0, 25.24, 25.04, 25.04, 25.12, 25.16, 25.52, 25.28, 25.08, 25.24] +23.97860813140869 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 8701, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_080', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 98112, 'MATRIX_DENSITY': 9.964234287345156e-05, 'TIME_S': 20.515164136886597, 'TIME_S_1KI': 2.35779383253495, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 563.6000692367552, 'W': 23.504286243308524} +[19.92, 20.04, 20.28, 20.36, 20.24, 20.12, 20.04, 20.08, 20.08, 20.2, 20.28, 20.4, 20.72, 20.52, 20.68, 21.0, 20.8, 20.68, 20.56, 20.56] +367.08 +18.354 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 8701, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_080', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 98112, 'MATRIX_DENSITY': 9.964234287345156e-05, 'TIME_S': 20.515164136886597, 'TIME_S_1KI': 2.35779383253495, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 563.6000692367552, 'W': 23.504286243308524, 'J_1KI': 64.77417184654122, 'W_1KI': 2.7013315990470663, 'W_D': 5.150286243308525, 'J_D': 123.49669559288012, 'W_D_1KI': 0.5919188878644437, 'J_D_1KI': 0.06802883437127269} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_085.json b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_085.json new file mode 100644 index 0000000..abf8017 --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_085.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 8645, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_085", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 99166, "MATRIX_DENSITY": 0.0001007127830784073, "TIME_S": 20.621427536010742, "TIME_S_1KI": 2.3853588821296405, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 582.1496765518187, "W": 24.26807387327404, "J_1KI": 67.33946518818031, "W_1KI": 2.8071803207951462, "W_D": 5.865073873274042, "J_D": 140.69311293959612, "W_D_1KI": 0.6784353815238915, "J_D_1KI": 0.07847719855684113} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_085.output b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_085.output new file mode 100644 index 0000000..6f31ebb --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_085.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/as-caida_pruned/as-caida_G_085.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_085", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 99166, "MATRIX_DENSITY": 0.0001007127830784073, "TIME_S": 2.4288856983184814} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 99165, 99165, 99166]), + col_indices=tensor([ 106, 329, 1040, ..., 882, 31211, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=99166, layout=torch.sparse_csr) +tensor([0.2083, 0.0876, 0.7345, ..., 0.8106, 0.1476, 0.8835]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_085 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 99166 +Density: 0.0001007127830784073 +Time: 2.4288856983184814 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 8645 -m matrices/as-caida_pruned/as-caida_G_085.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_085", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 99166, "MATRIX_DENSITY": 0.0001007127830784073, "TIME_S": 20.621427536010742} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 99165, 99165, 99166]), + col_indices=tensor([ 106, 329, 1040, ..., 882, 31211, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=99166, layout=torch.sparse_csr) +tensor([0.5655, 0.8631, 0.8601, ..., 0.4906, 0.7979, 0.8783]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_085 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 99166 +Density: 0.0001007127830784073 +Time: 20.621427536010742 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 99165, 99165, 99166]), + col_indices=tensor([ 106, 329, 1040, ..., 882, 31211, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=99166, layout=torch.sparse_csr) +tensor([0.5655, 0.8631, 0.8601, ..., 0.4906, 0.7979, 0.8783]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_085 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 99166 +Density: 0.0001007127830784073 +Time: 20.621427536010742 seconds + +[20.24, 20.24, 20.28, 20.12, 20.36, 20.32, 20.52, 20.52, 20.64, 20.8] +[20.8, 20.64, 20.52, 24.64, 26.4, 28.28, 29.64, 29.68, 26.72, 25.72, 25.44, 25.24, 25.24, 25.12, 25.32, 25.52, 25.48, 25.28, 25.12, 25.12, 25.04, 24.68, 24.8] +23.988293409347534 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 8645, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_085', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 99166, 'MATRIX_DENSITY': 0.0001007127830784073, 'TIME_S': 20.621427536010742, 'TIME_S_1KI': 2.3853588821296405, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 582.1496765518187, 'W': 24.26807387327404} +[20.24, 20.24, 20.28, 20.12, 20.36, 20.32, 20.52, 20.52, 20.64, 20.8, 20.36, 20.32, 20.28, 20.64, 20.56, 20.52, 20.56, 20.56, 20.52, 20.8] +368.06 +18.403 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 8645, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_085', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 99166, 'MATRIX_DENSITY': 0.0001007127830784073, 'TIME_S': 20.621427536010742, 'TIME_S_1KI': 2.3853588821296405, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 582.1496765518187, 'W': 24.26807387327404, 'J_1KI': 67.33946518818031, 'W_1KI': 2.8071803207951462, 'W_D': 5.865073873274042, 'J_D': 140.69311293959612, 'W_D_1KI': 0.6784353815238915, 'J_D_1KI': 0.07847719855684113} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_090.json b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_090.json new file mode 100644 index 0000000..c508881 --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_090.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 8274, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_090", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 100924, "MATRIX_DENSITY": 0.00010249820421722343, "TIME_S": 21.14634871482849, "TIME_S_1KI": 2.555758848782752, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 556.0772953796386, "W": 23.21960668645745, "J_1KI": 67.20779494556908, "W_1KI": 2.80633389974105, "W_D": 4.717606686457451, "J_D": 112.98012073564523, "W_D_1KI": 0.5701724300770427, "J_D_1KI": 0.0689113403525553} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_090.output b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_090.output new file mode 100644 index 0000000..ecc02cc --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_090.output @@ -0,0 +1,68 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/as-caida_pruned/as-caida_G_090.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_090", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 100924, "MATRIX_DENSITY": 0.00010249820421722343, "TIME_S": 2.537921190261841} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 100923, 100923, + 100924]), + col_indices=tensor([ 106, 329, 1040, ..., 882, 31211, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=100924, layout=torch.sparse_csr) +tensor([0.0065, 0.2873, 0.7515, ..., 0.9862, 0.5438, 0.1172]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_090 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 100924 +Density: 0.00010249820421722343 +Time: 2.537921190261841 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 8274 -m matrices/as-caida_pruned/as-caida_G_090.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_090", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 100924, "MATRIX_DENSITY": 0.00010249820421722343, "TIME_S": 21.14634871482849} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 100923, 100923, + 100924]), + col_indices=tensor([ 106, 329, 1040, ..., 882, 31211, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=100924, layout=torch.sparse_csr) +tensor([0.4904, 0.7822, 0.2251, ..., 0.3343, 0.1126, 0.6827]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_090 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 100924 +Density: 0.00010249820421722343 +Time: 21.14634871482849 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 100923, 100923, + 100924]), + col_indices=tensor([ 106, 329, 1040, ..., 882, 31211, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=100924, layout=torch.sparse_csr) +tensor([0.4904, 0.7822, 0.2251, ..., 0.3343, 0.1126, 0.6827]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_090 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 100924 +Density: 0.00010249820421722343 +Time: 21.14634871482849 seconds + +[20.92, 20.96, 20.96, 20.68, 20.88, 20.88, 20.76, 20.8, 20.68, 20.68] +[20.24, 20.36, 20.56, 21.4, 23.6, 24.48, 25.68, 25.68, 25.96, 24.92, 25.08, 25.08, 25.04, 25.08, 24.84, 24.72, 24.52, 24.36, 24.52, 24.48, 24.6, 24.84, 24.96] +23.948609590530396 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 8274, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_090', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 100924, 'MATRIX_DENSITY': 0.00010249820421722343, 'TIME_S': 21.14634871482849, 'TIME_S_1KI': 2.555758848782752, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 556.0772953796386, 'W': 23.21960668645745} +[20.92, 20.96, 20.96, 20.68, 20.88, 20.88, 20.76, 20.8, 20.68, 20.68, 20.36, 20.44, 20.36, 20.16, 20.24, 20.12, 20.12, 20.2, 20.48, 20.68] +370.03999999999996 +18.502 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 8274, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_090', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 100924, 'MATRIX_DENSITY': 0.00010249820421722343, 'TIME_S': 21.14634871482849, 'TIME_S_1KI': 2.555758848782752, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 556.0772953796386, 'W': 23.21960668645745, 'J_1KI': 67.20779494556908, 'W_1KI': 2.80633389974105, 'W_D': 4.717606686457451, 'J_D': 112.98012073564523, 'W_D_1KI': 0.5701724300770427, 'J_D_1KI': 0.0689113403525553} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_095.json b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_095.json new file mode 100644 index 0000000..7ad31de --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_095.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 8403, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_095", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 102290, "MATRIX_DENSITY": 0.00010388551097241275, "TIME_S": 20.63450598716736, "TIME_S_1KI": 2.4556118037804784, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 586.1911779022217, "W": 23.45694107354967, "J_1KI": 69.7597498396075, "W_1KI": 2.7914960220813603, "W_D": 4.934941073549673, "J_D": 123.32464457798017, "W_D_1KI": 0.5872832409317711, "J_D_1KI": 0.06988971092845069} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_095.output b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_095.output new file mode 100644 index 0000000..b058024 --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_095.output @@ -0,0 +1,68 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/as-caida_pruned/as-caida_G_095.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_095", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 102290, "MATRIX_DENSITY": 0.00010388551097241275, "TIME_S": 2.4990792274475098} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 102289, 102289, + 102290]), + col_indices=tensor([ 106, 329, 1040, ..., 882, 25970, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=102290, layout=torch.sparse_csr) +tensor([0.5889, 0.4566, 0.0264, ..., 0.7058, 0.2881, 0.7420]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_095 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 102290 +Density: 0.00010388551097241275 +Time: 2.4990792274475098 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 8403 -m matrices/as-caida_pruned/as-caida_G_095.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_095", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 102290, "MATRIX_DENSITY": 0.00010388551097241275, "TIME_S": 20.63450598716736} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 102289, 102289, + 102290]), + col_indices=tensor([ 106, 329, 1040, ..., 882, 25970, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=102290, layout=torch.sparse_csr) +tensor([0.6029, 0.1139, 0.0344, ..., 0.4128, 0.8841, 0.8807]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_095 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 102290 +Density: 0.00010388551097241275 +Time: 20.63450598716736 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 102289, 102289, + 102290]), + col_indices=tensor([ 106, 329, 1040, ..., 882, 25970, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=102290, layout=torch.sparse_csr) +tensor([0.6029, 0.1139, 0.0344, ..., 0.4128, 0.8841, 0.8807]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_095 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 102290 +Density: 0.00010388551097241275 +Time: 20.63450598716736 seconds + +[20.32, 20.32, 20.68, 20.92, 20.88, 20.76, 20.64, 20.64, 20.44, 20.2] +[19.92, 20.12, 20.32, 21.84, 22.64, 24.56, 25.6, 25.92, 25.84, 24.8, 24.8, 25.16, 25.2, 25.72, 25.72, 25.36, 25.28, 25.32, 25.08, 24.8, 25.04, 24.88, 24.8, 25.12] +24.990094661712646 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 8403, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_095', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 102290, 'MATRIX_DENSITY': 0.00010388551097241275, 'TIME_S': 20.63450598716736, 'TIME_S_1KI': 2.4556118037804784, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 586.1911779022217, 'W': 23.45694107354967} +[20.32, 20.32, 20.68, 20.92, 20.88, 20.76, 20.64, 20.64, 20.44, 20.2, 20.36, 20.4, 20.08, 20.36, 20.6, 20.8, 20.8, 20.8, 20.68, 20.4] +370.44 +18.522 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 8403, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_095', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 102290, 'MATRIX_DENSITY': 0.00010388551097241275, 'TIME_S': 20.63450598716736, 'TIME_S_1KI': 2.4556118037804784, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 586.1911779022217, 'W': 23.45694107354967, 'J_1KI': 69.7597498396075, 'W_1KI': 2.7914960220813603, 'W_D': 4.934941073549673, 'J_D': 123.32464457798017, 'W_D_1KI': 0.5872832409317711, 'J_D_1KI': 0.06988971092845069} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_100.json b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_100.json new file mode 100644 index 0000000..7948c06 --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_100.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 8516, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_100", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 102888, "MATRIX_DENSITY": 0.00010449283852702711, "TIME_S": 21.861371517181396, "TIME_S_1KI": 2.5670938841218174, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 576.373895225525, "W": 22.98149181435375, "J_1KI": 67.68129347411049, "W_1KI": 2.698625154339332, "W_D": 4.686491814353751, "J_D": 117.53682327508935, "W_D_1KI": 0.5503160890504639, "J_D_1KI": 0.0646214289631827} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_100.output b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_100.output new file mode 100644 index 0000000..72a3c4a --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_100.output @@ -0,0 +1,68 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/as-caida_pruned/as-caida_G_100.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_100", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 102888, "MATRIX_DENSITY": 0.00010449283852702711, "TIME_S": 2.465850830078125} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 102886, 102887, + 102888]), + col_indices=tensor([ 106, 329, 1040, ..., 25970, 5128, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=102888, layout=torch.sparse_csr) +tensor([0.8360, 0.2746, 0.7361, ..., 0.4088, 0.7297, 0.3024]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_100 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 102888 +Density: 0.00010449283852702711 +Time: 2.465850830078125 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 8516 -m matrices/as-caida_pruned/as-caida_G_100.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_100", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 102888, "MATRIX_DENSITY": 0.00010449283852702711, "TIME_S": 21.861371517181396} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 102886, 102887, + 102888]), + col_indices=tensor([ 106, 329, 1040, ..., 25970, 5128, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=102888, layout=torch.sparse_csr) +tensor([0.5964, 0.9344, 0.5005, ..., 0.9847, 0.5834, 0.0254]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_100 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 102888 +Density: 0.00010449283852702711 +Time: 21.861371517181396 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 102886, 102887, + 102888]), + col_indices=tensor([ 106, 329, 1040, ..., 25970, 5128, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=102888, layout=torch.sparse_csr) +tensor([0.5964, 0.9344, 0.5005, ..., 0.9847, 0.5834, 0.0254]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_100 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 102888 +Density: 0.00010449283852702711 +Time: 21.861371517181396 seconds + +[20.72, 20.56, 20.28, 20.36, 20.08, 20.2, 20.36, 20.32, 20.24, 20.12] +[20.24, 20.24, 20.16, 23.56, 25.16, 27.64, 29.0, 29.92, 26.44, 25.12, 24.76, 24.52, 24.48, 24.44, 24.68, 25.28, 24.92, 24.92, 25.36, 25.24, 25.28, 25.28, 25.24, 25.2] +25.07991647720337 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 8516, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_100', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 102888, 'MATRIX_DENSITY': 0.00010449283852702711, 'TIME_S': 21.861371517181396, 'TIME_S_1KI': 2.5670938841218174, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 576.373895225525, 'W': 22.98149181435375} +[20.72, 20.56, 20.28, 20.36, 20.08, 20.2, 20.36, 20.32, 20.24, 20.12, 20.52, 20.32, 20.2, 20.2, 20.28, 20.32, 20.44, 20.32, 20.56, 20.36] +365.9 +18.294999999999998 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 8516, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_100', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 102888, 'MATRIX_DENSITY': 0.00010449283852702711, 'TIME_S': 21.861371517181396, 'TIME_S_1KI': 2.5670938841218174, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 576.373895225525, 'W': 22.98149181435375, 'J_1KI': 67.68129347411049, 'W_1KI': 2.698625154339332, 'W_D': 4.686491814353751, 'J_D': 117.53682327508935, 'W_D_1KI': 0.5503160890504639, 'J_D_1KI': 0.0646214289631827} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_105.json b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_105.json new file mode 100644 index 0000000..8cd1bdb --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_105.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 8324, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_105", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 104726, "MATRIX_DENSITY": 0.00010635950749923647, "TIME_S": 21.21791172027588, "TIME_S_1KI": 2.5490042912392936, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 581.6480506896974, "W": 23.135149620040956, "J_1KI": 69.87602723326495, "W_1KI": 2.77933080490641, "W_D": 4.768149620040955, "J_D": 119.87754466438297, "W_D_1KI": 0.5728195122586442, "J_D_1KI": 0.06881541473554112} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_105.output b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_105.output new file mode 100644 index 0000000..e2b2b81 --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_105.output @@ -0,0 +1,68 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/as-caida_pruned/as-caida_G_105.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_105", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 104726, "MATRIX_DENSITY": 0.00010635950749923647, "TIME_S": 2.522615909576416} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 104725, 104725, + 104726]), + col_indices=tensor([ 106, 329, 1040, ..., 160, 882, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=104726, layout=torch.sparse_csr) +tensor([0.1482, 0.5158, 0.4126, ..., 0.7604, 0.2487, 0.5677]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_105 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 104726 +Density: 0.00010635950749923647 +Time: 2.522615909576416 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 8324 -m matrices/as-caida_pruned/as-caida_G_105.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_105", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 104726, "MATRIX_DENSITY": 0.00010635950749923647, "TIME_S": 21.21791172027588} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 104725, 104725, + 104726]), + col_indices=tensor([ 106, 329, 1040, ..., 160, 882, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=104726, layout=torch.sparse_csr) +tensor([0.3309, 0.8749, 0.3247, ..., 0.3544, 0.6828, 0.1442]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_105 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 104726 +Density: 0.00010635950749923647 +Time: 21.21791172027588 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 104725, 104725, + 104726]), + col_indices=tensor([ 106, 329, 1040, ..., 160, 882, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=104726, layout=torch.sparse_csr) +tensor([0.3309, 0.8749, 0.3247, ..., 0.3544, 0.6828, 0.1442]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_105 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 104726 +Density: 0.00010635950749923647 +Time: 21.21791172027588 seconds + +[20.64, 20.68, 20.68, 20.4, 20.4, 20.48, 20.44, 20.52, 20.64, 20.72] +[20.8, 20.76, 23.88, 25.0, 26.48, 27.64, 28.72, 28.72, 26.12, 25.88, 25.16, 25.28, 25.08, 25.24, 25.12, 25.0, 24.88, 24.68, 24.6, 24.52, 24.24, 24.28, 24.32, 24.4] +25.141313552856445 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 8324, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_105', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 104726, 'MATRIX_DENSITY': 0.00010635950749923647, 'TIME_S': 21.21791172027588, 'TIME_S_1KI': 2.5490042912392936, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 581.6480506896974, 'W': 23.135149620040956} +[20.64, 20.68, 20.68, 20.4, 20.4, 20.48, 20.44, 20.52, 20.64, 20.72, 20.32, 20.24, 20.12, 20.32, 20.32, 20.2, 20.4, 20.24, 20.28, 20.28] +367.34000000000003 +18.367 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 8324, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_105', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 104726, 'MATRIX_DENSITY': 0.00010635950749923647, 'TIME_S': 21.21791172027588, 'TIME_S_1KI': 2.5490042912392936, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 581.6480506896974, 'W': 23.135149620040956, 'J_1KI': 69.87602723326495, 'W_1KI': 2.77933080490641, 'W_D': 4.768149620040955, 'J_D': 119.87754466438297, 'W_D_1KI': 0.5728195122586442, 'J_D_1KI': 0.06881541473554112} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_110.json b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_110.json new file mode 100644 index 0000000..b130778 --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_110.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 8159, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_110", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 104846, "MATRIX_DENSITY": 0.0001064813792493263, "TIME_S": 22.441336631774902, "TIME_S_1KI": 2.7505008740991426, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 551.4319940948485, "W": 23.022171231399504, "J_1KI": 67.58573282201844, "W_1KI": 2.821690309032909, "W_D": 4.828171231399505, "J_D": 115.64539518022528, "W_D_1KI": 0.5917601705355442, "J_D_1KI": 0.07252851704075795} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_110.output b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_110.output new file mode 100644 index 0000000..b9d8372 --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_110.output @@ -0,0 +1,68 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/as-caida_pruned/as-caida_G_110.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_110", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 104846, "MATRIX_DENSITY": 0.0001064813792493263, "TIME_S": 2.573533296585083} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 104844, 104844, + 104846]), + col_indices=tensor([ 106, 329, 1040, ..., 882, 2616, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=104846, layout=torch.sparse_csr) +tensor([0.5332, 0.4400, 0.2870, ..., 0.4684, 0.6131, 0.1118]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_110 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 104846 +Density: 0.0001064813792493263 +Time: 2.573533296585083 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 8159 -m matrices/as-caida_pruned/as-caida_G_110.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_110", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 104846, "MATRIX_DENSITY": 0.0001064813792493263, "TIME_S": 22.441336631774902} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 104844, 104844, + 104846]), + col_indices=tensor([ 106, 329, 1040, ..., 882, 2616, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=104846, layout=torch.sparse_csr) +tensor([0.4307, 0.3342, 0.5692, ..., 0.6489, 0.9558, 0.4744]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_110 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 104846 +Density: 0.0001064813792493263 +Time: 22.441336631774902 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 104844, 104844, + 104846]), + col_indices=tensor([ 106, 329, 1040, ..., 882, 2616, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=104846, layout=torch.sparse_csr) +tensor([0.4307, 0.3342, 0.5692, ..., 0.6489, 0.9558, 0.4744]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_110 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 104846 +Density: 0.0001064813792493263 +Time: 22.441336631774902 seconds + +[20.16, 20.08, 19.88, 19.88, 20.08, 20.12, 20.36, 20.6, 20.32, 20.16] +[20.24, 20.12, 21.12, 21.12, 22.08, 23.72, 24.6, 25.12, 25.0, 24.4, 24.52, 24.24, 24.56, 24.6, 24.56, 24.72, 24.96, 24.8, 25.12, 25.24, 25.0, 25.0, 25.28] +23.95221495628357 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 8159, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_110', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 104846, 'MATRIX_DENSITY': 0.0001064813792493263, 'TIME_S': 22.441336631774902, 'TIME_S_1KI': 2.7505008740991426, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 551.4319940948485, 'W': 23.022171231399504} +[20.16, 20.08, 19.88, 19.88, 20.08, 20.12, 20.36, 20.6, 20.32, 20.16, 19.92, 19.92, 20.24, 20.24, 20.36, 20.36, 20.44, 20.32, 20.28, 20.56] +363.88 +18.194 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 8159, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_110', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 104846, 'MATRIX_DENSITY': 0.0001064813792493263, 'TIME_S': 22.441336631774902, 'TIME_S_1KI': 2.7505008740991426, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 551.4319940948485, 'W': 23.022171231399504, 'J_1KI': 67.58573282201844, 'W_1KI': 2.821690309032909, 'W_D': 4.828171231399505, 'J_D': 115.64539518022528, 'W_D_1KI': 0.5917601705355442, 'J_D_1KI': 0.07252851704075795} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_115.json b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_115.json new file mode 100644 index 0000000..8d4c81a --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_115.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 8211, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_115", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 106312, "MATRIX_DENSITY": 0.00010797024579625715, "TIME_S": 21.333778381347656, "TIME_S_1KI": 2.5981949069964263, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 567.8365756225585, "W": 22.694927255772477, "J_1KI": 69.155593182628, "W_1KI": 2.763966295916755, "W_D": 4.3229272557724805, "J_D": 108.16144867610933, "W_D_1KI": 0.5264799970493825, "J_D_1KI": 0.06411886457792992} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_115.output b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_115.output new file mode 100644 index 0000000..a629dc6 --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_115.output @@ -0,0 +1,68 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/as-caida_pruned/as-caida_G_115.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_115", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 106312, "MATRIX_DENSITY": 0.00010797024579625715, "TIME_S": 2.5573296546936035} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 106311, 106311, + 106312]), + col_indices=tensor([ 106, 329, 1040, ..., 160, 882, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=106312, layout=torch.sparse_csr) +tensor([0.9699, 0.6985, 0.1908, ..., 0.1751, 0.6358, 0.0840]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_115 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 106312 +Density: 0.00010797024579625715 +Time: 2.5573296546936035 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 8211 -m matrices/as-caida_pruned/as-caida_G_115.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_115", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 106312, "MATRIX_DENSITY": 0.00010797024579625715, "TIME_S": 21.333778381347656} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 106311, 106311, + 106312]), + col_indices=tensor([ 106, 329, 1040, ..., 160, 882, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=106312, layout=torch.sparse_csr) +tensor([0.6810, 0.6294, 0.8611, ..., 0.9171, 0.2548, 0.6035]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_115 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 106312 +Density: 0.00010797024579625715 +Time: 21.333778381347656 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 106311, 106311, + 106312]), + col_indices=tensor([ 106, 329, 1040, ..., 160, 882, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=106312, layout=torch.sparse_csr) +tensor([0.6810, 0.6294, 0.8611, ..., 0.9171, 0.2548, 0.6035]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_115 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 106312 +Density: 0.00010797024579625715 +Time: 21.333778381347656 seconds + +[20.28, 20.4, 20.2, 20.12, 20.08, 20.24, 20.24, 20.44, 20.64, 20.88] +[20.6, 20.48, 20.88, 21.96, 24.08, 25.0, 26.28, 26.24, 26.08, 25.2, 25.6, 25.64, 25.64, 25.84, 25.84, 25.96, 25.8, 25.44, 25.04, 24.84, 24.44, 24.28, 24.32, 24.32] +25.020418405532837 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 8211, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_115', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 106312, 'MATRIX_DENSITY': 0.00010797024579625715, 'TIME_S': 21.333778381347656, 'TIME_S_1KI': 2.5981949069964263, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 567.8365756225585, 'W': 22.694927255772477} +[20.28, 20.4, 20.2, 20.12, 20.08, 20.24, 20.24, 20.44, 20.64, 20.88, 20.48, 20.36, 20.72, 20.6, 20.6, 20.48, 20.48, 20.6, 20.24, 20.36] +367.43999999999994 +18.371999999999996 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 8211, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_115', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 106312, 'MATRIX_DENSITY': 0.00010797024579625715, 'TIME_S': 21.333778381347656, 'TIME_S_1KI': 2.5981949069964263, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 567.8365756225585, 'W': 22.694927255772477, 'J_1KI': 69.155593182628, 'W_1KI': 2.763966295916755, 'W_D': 4.3229272557724805, 'J_D': 108.16144867610933, 'W_D_1KI': 0.5264799970493825, 'J_D_1KI': 0.06411886457792992} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_120.json b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_120.json new file mode 100644 index 0000000..0d20a55 --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_120.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 8081, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_120", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 106510, "MATRIX_DENSITY": 0.0001081713341839054, "TIME_S": 21.84908366203308, "TIME_S_1KI": 2.7037598888792327, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 589.5661828041078, "W": 23.61014815774482, "J_1KI": 72.95708239130155, "W_1KI": 2.9216864444678654, "W_D": 5.32814815774482, "J_D": 133.04854970788975, "W_D_1KI": 0.6593426751323871, "J_D_1KI": 0.0815917182443246} diff --git a/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_120.output b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_120.output new file mode 100644 index 0000000..8bb395b --- /dev/null +++ b/pytorch/output_as-caida/altra_1_csr_20_10_10_as-caida_G_120.output @@ -0,0 +1,68 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/as-caida_pruned/as-caida_G_120.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_120", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 106510, "MATRIX_DENSITY": 0.0001081713341839054, "TIME_S": 2.5984902381896973} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 106509, 106509, + 106510]), + col_indices=tensor([ 106, 329, 1040, ..., 155, 160, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=106510, layout=torch.sparse_csr) +tensor([0.6539, 0.1069, 0.6741, ..., 0.8905, 0.5463, 0.5314]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_120 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 106510 +Density: 0.0001081713341839054 +Time: 2.5984902381896973 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 8081 -m matrices/as-caida_pruned/as-caida_G_120.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_120", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 106510, "MATRIX_DENSITY": 0.0001081713341839054, "TIME_S": 21.84908366203308} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 106509, 106509, + 106510]), + col_indices=tensor([ 106, 329, 1040, ..., 155, 160, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=106510, layout=torch.sparse_csr) +tensor([0.8419, 0.2337, 0.0233, ..., 0.5994, 0.4430, 0.6210]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_120 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 106510 +Density: 0.0001081713341839054 +Time: 21.84908366203308 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 106509, 106509, + 106510]), + col_indices=tensor([ 106, 329, 1040, ..., 155, 160, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=106510, layout=torch.sparse_csr) +tensor([0.8419, 0.2337, 0.0233, ..., 0.5994, 0.4430, 0.6210]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_120 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 106510 +Density: 0.0001081713341839054 +Time: 21.84908366203308 seconds + +[20.48, 20.56, 20.32, 20.32, 20.24, 20.08, 20.24, 20.12, 20.24, 20.32] +[20.44, 20.4, 21.12, 22.16, 24.08, 25.04, 25.72, 25.68, 25.68, 25.96, 25.36, 25.8, 25.68, 25.32, 25.08, 25.08, 24.6, 24.6, 24.6, 24.88, 24.8, 25.2, 25.44, 25.2] +24.970880270004272 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 8081, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_120', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 106510, 'MATRIX_DENSITY': 0.0001081713341839054, 'TIME_S': 21.84908366203308, 'TIME_S_1KI': 2.7037598888792327, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 589.5661828041078, 'W': 23.61014815774482} +[20.48, 20.56, 20.32, 20.32, 20.24, 20.08, 20.24, 20.12, 20.24, 20.32, 20.28, 20.32, 20.4, 20.32, 20.32, 20.4, 20.4, 20.32, 20.36, 20.28] +365.64 +18.282 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 8081, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_120', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 106510, 'MATRIX_DENSITY': 0.0001081713341839054, 'TIME_S': 21.84908366203308, 'TIME_S_1KI': 2.7037598888792327, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 589.5661828041078, 'W': 23.61014815774482, 'J_1KI': 72.95708239130155, 'W_1KI': 2.9216864444678654, 'W_D': 5.32814815774482, 'J_D': 133.04854970788975, 'W_D_1KI': 0.6593426751323871, 'J_D_1KI': 0.0815917182443246} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_005.json b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_005.json new file mode 100644 index 0000000..fa6460a --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_005.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 82702, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_005", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 70026, "MATRIX_DENSITY": 7.111825976492498e-05, "TIME_S": 20.07656455039978, "TIME_S_1KI": 0.24275790851974294, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1471.9716919898988, "W": 64.89, "J_1KI": 17.79850175316073, "W_1KI": 0.7846243138013591, "W_D": 29.417, "J_D": 667.2983705234528, "W_D_1KI": 0.3556987739111509, "J_D_1KI": 0.004300969431345686} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_005.output b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_005.output new file mode 100644 index 0000000..1e64bcd --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_005.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_005.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_005", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 70026, "MATRIX_DENSITY": 7.111825976492498e-05, "TIME_S": 0.2539207935333252} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 63, 63, ..., 70025, 70025, 70026]), + col_indices=tensor([ 111, 761, 822, ..., 978, 978, 12170]), + values=tensor([4., 3., 3., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=70026, layout=torch.sparse_csr) +tensor([0.5534, 0.0758, 0.8783, ..., 0.8007, 0.8544, 0.9598]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_005 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 70026 +Density: 7.111825976492498e-05 +Time: 0.2539207935333252 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '82702', '-m', 'matrices/as-caida_pruned/as-caida_G_005.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_005", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 70026, "MATRIX_DENSITY": 7.111825976492498e-05, "TIME_S": 20.07656455039978} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 63, 63, ..., 70025, 70025, 70026]), + col_indices=tensor([ 111, 761, 822, ..., 978, 978, 12170]), + values=tensor([4., 3., 3., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=70026, layout=torch.sparse_csr) +tensor([0.7028, 0.2266, 0.3261, ..., 0.0864, 0.4634, 0.8737]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_005 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 70026 +Density: 7.111825976492498e-05 +Time: 20.07656455039978 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 63, 63, ..., 70025, 70025, 70026]), + col_indices=tensor([ 111, 761, 822, ..., 978, 978, 12170]), + values=tensor([4., 3., 3., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=70026, layout=torch.sparse_csr) +tensor([0.7028, 0.2266, 0.3261, ..., 0.0864, 0.4634, 0.8737]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_005 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 70026 +Density: 7.111825976492498e-05 +Time: 20.07656455039978 seconds + +[40.28, 38.91, 38.7, 38.44, 38.74, 38.39, 38.61, 38.45, 42.09, 49.32] +[64.89] +22.684106826782227 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 82702, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_005', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 70026, 'MATRIX_DENSITY': 7.111825976492498e-05, 'TIME_S': 20.07656455039978, 'TIME_S_1KI': 0.24275790851974294, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1471.9716919898988, 'W': 64.89} +[40.28, 38.91, 38.7, 38.44, 38.74, 38.39, 38.61, 38.45, 42.09, 49.32, 39.09, 38.56, 38.48, 38.49, 38.69, 38.42, 38.55, 38.46, 42.16, 41.95] +709.46 +35.473 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 82702, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_005', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 70026, 'MATRIX_DENSITY': 7.111825976492498e-05, 'TIME_S': 20.07656455039978, 'TIME_S_1KI': 0.24275790851974294, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1471.9716919898988, 'W': 64.89, 'J_1KI': 17.79850175316073, 'W_1KI': 0.7846243138013591, 'W_D': 29.417, 'J_D': 667.2983705234528, 'W_D_1KI': 0.3556987739111509, 'J_D_1KI': 0.004300969431345686} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_010.json b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_010.json new file mode 100644 index 0000000..0f42be8 --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_010.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 78992, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_010", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 74994, "MATRIX_DENSITY": 7.616375021864427e-05, "TIME_S": 20.234853267669678, "TIME_S_1KI": 0.2561633237248035, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1487.3350584197046, "W": 64.93, "J_1KI": 18.82893278331609, "W_1KI": 0.821981972858011, "W_D": 30.04825000000001, "J_D": 688.307649301708, "W_D_1KI": 0.38039611606238616, "J_D_1KI": 0.004815628368219391} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_010.output b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_010.output new file mode 100644 index 0000000..3171f6e --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_010.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_010.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_010", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 74994, "MATRIX_DENSITY": 7.616375021864427e-05, "TIME_S": 0.26584959030151367} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 28, 28, ..., 74993, 74993, 74994]), + col_indices=tensor([ 1040, 2020, 2054, ..., 160, 160, 12170]), + values=tensor([1., 3., 3., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=74994, layout=torch.sparse_csr) +tensor([0.1072, 0.1068, 0.3480, ..., 0.0585, 0.4984, 0.5877]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_010 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 74994 +Density: 7.616375021864427e-05 +Time: 0.26584959030151367 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '78992', '-m', 'matrices/as-caida_pruned/as-caida_G_010.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_010", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 74994, "MATRIX_DENSITY": 7.616375021864427e-05, "TIME_S": 20.234853267669678} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 28, 28, ..., 74993, 74993, 74994]), + col_indices=tensor([ 1040, 2020, 2054, ..., 160, 160, 12170]), + values=tensor([1., 3., 3., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=74994, layout=torch.sparse_csr) +tensor([0.1821, 0.5489, 0.4233, ..., 0.1498, 0.4922, 0.3408]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_010 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 74994 +Density: 7.616375021864427e-05 +Time: 20.234853267669678 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 28, 28, ..., 74993, 74993, 74994]), + col_indices=tensor([ 1040, 2020, 2054, ..., 160, 160, 12170]), + values=tensor([1., 3., 3., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=74994, layout=torch.sparse_csr) +tensor([0.1821, 0.5489, 0.4233, ..., 0.1498, 0.4922, 0.3408]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_010 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 74994 +Density: 7.616375021864427e-05 +Time: 20.234853267669678 seconds + +[40.13, 38.79, 38.57, 38.54, 38.57, 38.39, 38.46, 38.37, 38.87, 41.47] +[64.93] +22.90674662590027 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 78992, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_010', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 74994, 'MATRIX_DENSITY': 7.616375021864427e-05, 'TIME_S': 20.234853267669678, 'TIME_S_1KI': 0.2561633237248035, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1487.3350584197046, 'W': 64.93} +[40.13, 38.79, 38.57, 38.54, 38.57, 38.39, 38.46, 38.37, 38.87, 41.47, 40.21, 38.44, 38.69, 38.43, 38.43, 39.23, 38.47, 38.49, 38.39, 39.2] +697.635 +34.88175 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 78992, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_010', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 74994, 'MATRIX_DENSITY': 7.616375021864427e-05, 'TIME_S': 20.234853267669678, 'TIME_S_1KI': 0.2561633237248035, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1487.3350584197046, 'W': 64.93, 'J_1KI': 18.82893278331609, 'W_1KI': 0.821981972858011, 'W_D': 30.04825000000001, 'J_D': 688.307649301708, 'W_D_1KI': 0.38039611606238616, 'J_D_1KI': 0.004815628368219391} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_015.json b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_015.json new file mode 100644 index 0000000..1f064b4 --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_015.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 74097, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_015", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 77124, "MATRIX_DENSITY": 7.832697378273889e-05, "TIME_S": 20.0875027179718, "TIME_S_1KI": 0.271097382052874, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1463.0922887516022, "W": 64.78, "J_1KI": 19.74563462423043, "W_1KI": 0.8742594167105281, "W_D": 29.661749999999998, "J_D": 669.9271024371385, "W_D_1KI": 0.4003097291388315, "J_D_1KI": 0.005402509266756164} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_015.output b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_015.output new file mode 100644 index 0000000..ccc97e6 --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_015.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_015.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_015", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 77124, "MATRIX_DENSITY": 7.832697378273889e-05, "TIME_S": 0.28341221809387207} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 4, ..., 77124, 77124, 77124]), + col_indices=tensor([1040, 2054, 4842, ..., 160, 160, 8230]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=77124, layout=torch.sparse_csr) +tensor([0.6027, 0.3568, 0.0149, ..., 0.8074, 0.5322, 0.0088]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_015 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 77124 +Density: 7.832697378273889e-05 +Time: 0.28341221809387207 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '74097', '-m', 'matrices/as-caida_pruned/as-caida_G_015.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_015", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 77124, "MATRIX_DENSITY": 7.832697378273889e-05, "TIME_S": 20.0875027179718} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 4, ..., 77124, 77124, 77124]), + col_indices=tensor([1040, 2054, 4842, ..., 160, 160, 8230]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=77124, layout=torch.sparse_csr) +tensor([0.5340, 0.2586, 0.4916, ..., 0.1842, 0.3538, 0.9722]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_015 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 77124 +Density: 7.832697378273889e-05 +Time: 20.0875027179718 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 4, ..., 77124, 77124, 77124]), + col_indices=tensor([1040, 2054, 4842, ..., 160, 160, 8230]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=77124, layout=torch.sparse_csr) +tensor([0.5340, 0.2586, 0.4916, ..., 0.1842, 0.3538, 0.9722]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_015 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 77124 +Density: 7.832697378273889e-05 +Time: 20.0875027179718 seconds + +[39.85, 38.7, 38.49, 38.99, 38.97, 38.99, 38.69, 38.49, 43.67, 38.62] +[64.78] +22.58555555343628 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 74097, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_015', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 77124, 'MATRIX_DENSITY': 7.832697378273889e-05, 'TIME_S': 20.0875027179718, 'TIME_S_1KI': 0.271097382052874, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1463.0922887516022, 'W': 64.78} +[39.85, 38.7, 38.49, 38.99, 38.97, 38.99, 38.69, 38.49, 43.67, 38.62, 39.11, 38.45, 38.9, 38.38, 39.57, 38.37, 38.64, 38.36, 38.73, 38.37] +702.365 +35.11825 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 74097, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_015', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 77124, 'MATRIX_DENSITY': 7.832697378273889e-05, 'TIME_S': 20.0875027179718, 'TIME_S_1KI': 0.271097382052874, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1463.0922887516022, 'W': 64.78, 'J_1KI': 19.74563462423043, 'W_1KI': 0.8742594167105281, 'W_D': 29.661749999999998, 'J_D': 669.9271024371385, 'W_D_1KI': 0.4003097291388315, 'J_D_1KI': 0.005402509266756164} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_020.json b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_020.json new file mode 100644 index 0000000..a5a0104 --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_020.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 72530, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_020", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 80948, "MATRIX_DENSITY": 8.221062021893506e-05, "TIME_S": 20.32188630104065, "TIME_S_1KI": 0.280185941004283, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1484.4361194562914, "W": 64.62, "J_1KI": 20.4665120564772, "W_1KI": 0.8909416793051153, "W_D": 29.4585, "J_D": 676.7140424791575, "W_D_1KI": 0.4061560733489591, "J_D_1KI": 0.0055998355625115} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_020.output b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_020.output new file mode 100644 index 0000000..baa08c3 --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_020.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_020.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_020", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 80948, "MATRIX_DENSITY": 8.221062021893506e-05, "TIME_S": 0.2895321846008301} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 80944, 80946, 80948]), + col_indices=tensor([ 1040, 5699, 106, ..., 31378, 17998, 31377]), + values=tensor([1., 1., 1., ..., 1., 1., 3.]), size=(31379, 31379), + nnz=80948, layout=torch.sparse_csr) +tensor([0.5141, 0.4431, 0.2140, ..., 0.7970, 0.3682, 0.6567]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_020 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 80948 +Density: 8.221062021893506e-05 +Time: 0.2895321846008301 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '72530', '-m', 'matrices/as-caida_pruned/as-caida_G_020.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_020", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 80948, "MATRIX_DENSITY": 8.221062021893506e-05, "TIME_S": 20.32188630104065} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 80944, 80946, 80948]), + col_indices=tensor([ 1040, 5699, 106, ..., 31378, 17998, 31377]), + values=tensor([1., 1., 1., ..., 1., 1., 3.]), size=(31379, 31379), + nnz=80948, layout=torch.sparse_csr) +tensor([0.0355, 0.1618, 0.0920, ..., 0.1393, 0.2391, 0.3473]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_020 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 80948 +Density: 8.221062021893506e-05 +Time: 20.32188630104065 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 80944, 80946, 80948]), + col_indices=tensor([ 1040, 5699, 106, ..., 31378, 17998, 31377]), + values=tensor([1., 1., 1., ..., 1., 1., 3.]), size=(31379, 31379), + nnz=80948, layout=torch.sparse_csr) +tensor([0.0355, 0.1618, 0.0920, ..., 0.1393, 0.2391, 0.3473]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_020 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 80948 +Density: 8.221062021893506e-05 +Time: 20.32188630104065 seconds + +[39.18, 38.31, 38.43, 45.53, 39.06, 38.82, 38.58, 38.27, 38.53, 38.3] +[64.62] +22.97177529335022 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 72530, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_020', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 80948, 'MATRIX_DENSITY': 8.221062021893506e-05, 'TIME_S': 20.32188630104065, 'TIME_S_1KI': 0.280185941004283, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1484.4361194562914, 'W': 64.62} +[39.18, 38.31, 38.43, 45.53, 39.06, 38.82, 38.58, 38.27, 38.53, 38.3, 39.83, 39.04, 38.95, 38.81, 38.4, 38.84, 38.46, 38.41, 38.75, 38.77] +703.23 +35.161500000000004 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 72530, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_020', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 80948, 'MATRIX_DENSITY': 8.221062021893506e-05, 'TIME_S': 20.32188630104065, 'TIME_S_1KI': 0.280185941004283, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1484.4361194562914, 'W': 64.62, 'J_1KI': 20.4665120564772, 'W_1KI': 0.8909416793051153, 'W_D': 29.4585, 'J_D': 676.7140424791575, 'W_D_1KI': 0.4061560733489591, 'J_D_1KI': 0.0055998355625115} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_025.json b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_025.json new file mode 100644 index 0000000..7a7aa8f --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_025.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 69039, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_025", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 85850, "MATRIX_DENSITY": 8.718908121010495e-05, "TIME_S": 20.171581506729126, "TIME_S_1KI": 0.29217661766145403, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1473.2901193213463, "W": 64.51, "J_1KI": 21.339968993197267, "W_1KI": 0.9343993974420256, "W_D": 29.6025, "J_D": 676.0668230849504, "W_D_1KI": 0.4287793855646808, "J_D_1KI": 0.006210683607304289} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_025.output b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_025.output new file mode 100644 index 0000000..ccfc4ad --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_025.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_025.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_025", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 85850, "MATRIX_DENSITY": 8.718908121010495e-05, "TIME_S": 0.30417323112487793} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 3, ..., 85845, 85847, 85850]), + col_indices=tensor([ 346, 13811, 21783, ..., 15310, 17998, 31377]), + values=tensor([1., 1., 1., ..., 1., 1., 3.]), size=(31379, 31379), + nnz=85850, layout=torch.sparse_csr) +tensor([0.6655, 0.9099, 0.6419, ..., 0.6262, 0.6776, 0.0277]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_025 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 85850 +Density: 8.718908121010495e-05 +Time: 0.30417323112487793 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '69039', '-m', 'matrices/as-caida_pruned/as-caida_G_025.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_025", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 85850, "MATRIX_DENSITY": 8.718908121010495e-05, "TIME_S": 20.171581506729126} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 3, ..., 85845, 85847, 85850]), + col_indices=tensor([ 346, 13811, 21783, ..., 15310, 17998, 31377]), + values=tensor([1., 1., 1., ..., 1., 1., 3.]), size=(31379, 31379), + nnz=85850, layout=torch.sparse_csr) +tensor([0.3524, 0.1690, 0.3983, ..., 0.4779, 0.0867, 0.9985]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_025 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 85850 +Density: 8.718908121010495e-05 +Time: 20.171581506729126 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 3, ..., 85845, 85847, 85850]), + col_indices=tensor([ 346, 13811, 21783, ..., 15310, 17998, 31377]), + values=tensor([1., 1., 1., ..., 1., 1., 3.]), size=(31379, 31379), + nnz=85850, layout=torch.sparse_csr) +tensor([0.3524, 0.1690, 0.3983, ..., 0.4779, 0.0867, 0.9985]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_025 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 85850 +Density: 8.718908121010495e-05 +Time: 20.171581506729126 seconds + +[40.12, 38.43, 39.93, 38.85, 38.48, 38.72, 38.45, 38.5, 38.77, 38.35] +[64.51] +22.83816647529602 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 69039, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_025', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 85850, 'MATRIX_DENSITY': 8.718908121010495e-05, 'TIME_S': 20.171581506729126, 'TIME_S_1KI': 0.29217661766145403, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1473.2901193213463, 'W': 64.51} +[40.12, 38.43, 39.93, 38.85, 38.48, 38.72, 38.45, 38.5, 38.77, 38.35, 39.64, 38.37, 38.6, 38.37, 38.68, 38.56, 38.43, 39.92, 38.57, 38.93] +698.1500000000001 +34.907500000000006 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 69039, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_025', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 85850, 'MATRIX_DENSITY': 8.718908121010495e-05, 'TIME_S': 20.171581506729126, 'TIME_S_1KI': 0.29217661766145403, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1473.2901193213463, 'W': 64.51, 'J_1KI': 21.339968993197267, 'W_1KI': 0.9343993974420256, 'W_D': 29.6025, 'J_D': 676.0668230849504, 'W_D_1KI': 0.4287793855646808, 'J_D_1KI': 0.006210683607304289} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_030.json b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_030.json new file mode 100644 index 0000000..5fc3171 --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_030.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 67920, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_030", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 86850, "MATRIX_DENSITY": 8.820467912752026e-05, "TIME_S": 20.161845922470093, "TIME_S_1KI": 0.29684696587853493, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1467.5574081468583, "W": 64.61, "J_1KI": 21.607146763057397, "W_1KI": 0.951266195524146, "W_D": 29.384500000000003, "J_D": 667.4422018215657, "W_D_1KI": 0.43263398115429924, "J_D_1KI": 0.006369758261989093} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_030.output b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_030.output new file mode 100644 index 0000000..1b238df --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_030.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_030.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_030", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 86850, "MATRIX_DENSITY": 8.820467912752026e-05, "TIME_S": 0.30918335914611816} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 86850, 86850, 86850]), + col_indices=tensor([ 1809, 21783, 106, ..., 7018, 160, 882]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=86850, layout=torch.sparse_csr) +tensor([0.4553, 0.3912, 0.1533, ..., 0.2441, 0.0734, 0.2074]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_030 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 86850 +Density: 8.820467912752026e-05 +Time: 0.30918335914611816 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '67920', '-m', 'matrices/as-caida_pruned/as-caida_G_030.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_030", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 86850, "MATRIX_DENSITY": 8.820467912752026e-05, "TIME_S": 20.161845922470093} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 86850, 86850, 86850]), + col_indices=tensor([ 1809, 21783, 106, ..., 7018, 160, 882]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=86850, layout=torch.sparse_csr) +tensor([0.8140, 0.5205, 0.5473, ..., 0.3011, 0.6252, 0.6875]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_030 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 86850 +Density: 8.820467912752026e-05 +Time: 20.161845922470093 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 86850, 86850, 86850]), + col_indices=tensor([ 1809, 21783, 106, ..., 7018, 160, 882]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=86850, layout=torch.sparse_csr) +tensor([0.8140, 0.5205, 0.5473, ..., 0.3011, 0.6252, 0.6875]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_030 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 86850 +Density: 8.820467912752026e-05 +Time: 20.161845922470093 seconds + +[39.91, 38.55, 38.51, 38.41, 38.98, 38.82, 38.53, 38.84, 40.38, 38.45] +[64.61] +22.714090824127197 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 67920, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_030', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 86850, 'MATRIX_DENSITY': 8.820467912752026e-05, 'TIME_S': 20.161845922470093, 'TIME_S_1KI': 0.29684696587853493, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1467.5574081468583, 'W': 64.61} +[39.91, 38.55, 38.51, 38.41, 38.98, 38.82, 38.53, 38.84, 40.38, 38.45, 40.05, 38.73, 38.92, 38.39, 38.48, 44.19, 39.18, 38.53, 38.67, 38.39] +704.51 +35.2255 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 67920, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_030', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 86850, 'MATRIX_DENSITY': 8.820467912752026e-05, 'TIME_S': 20.161845922470093, 'TIME_S_1KI': 0.29684696587853493, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1467.5574081468583, 'W': 64.61, 'J_1KI': 21.607146763057397, 'W_1KI': 0.951266195524146, 'W_D': 29.384500000000003, 'J_D': 667.4422018215657, 'W_D_1KI': 0.43263398115429924, 'J_D_1KI': 0.006369758261989093} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_035.json b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_035.json new file mode 100644 index 0000000..62d328f --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_035.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 68117, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_035", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 87560, "MATRIX_DENSITY": 8.892575364888514e-05, "TIME_S": 20.348753690719604, "TIME_S_1KI": 0.2987323823820721, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1490.6987690734863, "W": 64.86, "J_1KI": 21.88438670337047, "W_1KI": 0.9521852107403438, "W_D": 30.015249999999995, "J_D": 689.8503889675139, "W_D_1KI": 0.4406425708707077, "J_D_1KI": 0.0064689074808154745} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_035.output b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_035.output new file mode 100644 index 0000000..331f1da --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_035.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_035.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_035", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 87560, "MATRIX_DENSITY": 8.892575364888514e-05, "TIME_S": 0.3082902431488037} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 87559, 87559, 87560]), + col_indices=tensor([ 1809, 21783, 106, ..., 10144, 882, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=87560, layout=torch.sparse_csr) +tensor([0.7253, 0.6973, 0.1968, ..., 0.4575, 0.0429, 0.5459]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_035 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 87560 +Density: 8.892575364888514e-05 +Time: 0.3082902431488037 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '68117', '-m', 'matrices/as-caida_pruned/as-caida_G_035.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_035", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 87560, "MATRIX_DENSITY": 8.892575364888514e-05, "TIME_S": 20.348753690719604} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 87559, 87559, 87560]), + col_indices=tensor([ 1809, 21783, 106, ..., 10144, 882, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=87560, layout=torch.sparse_csr) +tensor([0.1776, 0.1204, 0.8158, ..., 0.5533, 0.2447, 0.1152]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_035 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 87560 +Density: 8.892575364888514e-05 +Time: 20.348753690719604 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 87559, 87559, 87560]), + col_indices=tensor([ 1809, 21783, 106, ..., 10144, 882, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=87560, layout=torch.sparse_csr) +tensor([0.1776, 0.1204, 0.8158, ..., 0.5533, 0.2447, 0.1152]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_035 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 87560 +Density: 8.892575364888514e-05 +Time: 20.348753690719604 seconds + +[39.56, 38.42, 39.02, 39.47, 38.83, 38.52, 38.45, 39.1, 38.74, 38.75] +[64.86] +22.98332977294922 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 68117, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_035', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 87560, 'MATRIX_DENSITY': 8.892575364888514e-05, 'TIME_S': 20.348753690719604, 'TIME_S_1KI': 0.2987323823820721, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1490.6987690734863, 'W': 64.86} +[39.56, 38.42, 39.02, 39.47, 38.83, 38.52, 38.45, 39.1, 38.74, 38.75, 39.11, 38.45, 38.87, 38.41, 38.88, 38.4, 38.43, 38.5, 38.53, 38.33] +696.8950000000001 +34.844750000000005 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 68117, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_035', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 87560, 'MATRIX_DENSITY': 8.892575364888514e-05, 'TIME_S': 20.348753690719604, 'TIME_S_1KI': 0.2987323823820721, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1490.6987690734863, 'W': 64.86, 'J_1KI': 21.88438670337047, 'W_1KI': 0.9521852107403438, 'W_D': 30.015249999999995, 'J_D': 689.8503889675139, 'W_D_1KI': 0.4406425708707077, 'J_D_1KI': 0.0064689074808154745} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_040.json b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_040.json new file mode 100644 index 0000000..db8e166 --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_040.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 66611, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_040", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 89658, "MATRIX_DENSITY": 9.105647807962247e-05, "TIME_S": 20.194962978363037, "TIME_S_1KI": 0.3031775979697503, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1484.4351484608649, "W": 64.77, "J_1KI": 22.285135314900916, "W_1KI": 0.9723619222050411, "W_D": 29.69625, "J_D": 680.5952953138948, "W_D_1KI": 0.44581600636531504, "J_D_1KI": 0.006692828607366877} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_040.output b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_040.output new file mode 100644 index 0000000..f6a1565 --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_040.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_040.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_040", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 89658, "MATRIX_DENSITY": 9.105647807962247e-05, "TIME_S": 0.3152611255645752} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 89657, 89657, 89658]), + col_indices=tensor([ 106, 329, 1040, ..., 10144, 882, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=89658, layout=torch.sparse_csr) +tensor([0.1098, 0.7906, 0.5773, ..., 0.8359, 0.7143, 0.2600]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_040 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 89658 +Density: 9.105647807962247e-05 +Time: 0.3152611255645752 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '66611', '-m', 'matrices/as-caida_pruned/as-caida_G_040.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_040", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 89658, "MATRIX_DENSITY": 9.105647807962247e-05, "TIME_S": 20.194962978363037} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 89657, 89657, 89658]), + col_indices=tensor([ 106, 329, 1040, ..., 10144, 882, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=89658, layout=torch.sparse_csr) +tensor([0.4019, 0.6551, 0.2937, ..., 0.2559, 0.0829, 0.6705]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_040 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 89658 +Density: 9.105647807962247e-05 +Time: 20.194962978363037 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 89657, 89657, 89658]), + col_indices=tensor([ 106, 329, 1040, ..., 10144, 882, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=89658, layout=torch.sparse_csr) +tensor([0.4019, 0.6551, 0.2937, ..., 0.2559, 0.0829, 0.6705]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_040 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 89658 +Density: 9.105647807962247e-05 +Time: 20.194962978363037 seconds + +[39.09, 38.48, 38.44, 38.37, 44.17, 38.44, 39.34, 38.45, 38.85, 38.69] +[64.77] +22.91856026649475 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 66611, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_040', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 89658, 'MATRIX_DENSITY': 9.105647807962247e-05, 'TIME_S': 20.194962978363037, 'TIME_S_1KI': 0.3031775979697503, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1484.4351484608649, 'W': 64.77} +[39.09, 38.48, 38.44, 38.37, 44.17, 38.44, 39.34, 38.45, 38.85, 38.69, 39.98, 38.99, 38.5, 38.34, 38.6, 38.35, 38.97, 38.38, 38.74, 38.37] +701.4749999999999 +35.07375 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 66611, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_040', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 89658, 'MATRIX_DENSITY': 9.105647807962247e-05, 'TIME_S': 20.194962978363037, 'TIME_S_1KI': 0.3031775979697503, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1484.4351484608649, 'W': 64.77, 'J_1KI': 22.285135314900916, 'W_1KI': 0.9723619222050411, 'W_D': 29.69625, 'J_D': 680.5952953138948, 'W_D_1KI': 0.44581600636531504, 'J_D_1KI': 0.006692828607366877} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_045.json b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_045.json new file mode 100644 index 0000000..4dbbd82 --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_045.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 65848, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_045", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 89152, "MATRIX_DENSITY": 9.054258553341032e-05, "TIME_S": 20.168872833251953, "TIME_S_1KI": 0.3062943875782401, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1461.650822734833, "W": 64.42, "J_1KI": 22.197345746793115, "W_1KI": 0.9783136921394726, "W_D": 29.341749999999998, "J_D": 665.746554299593, "W_D_1KI": 0.4455981958449763, "J_D_1KI": 0.006767072589068404} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_045.output b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_045.output new file mode 100644 index 0000000..92cb063 --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_045.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_045.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_045", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 89152, "MATRIX_DENSITY": 9.054258553341032e-05, "TIME_S": 0.3189120292663574} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 89150, 89150, 89152]), + col_indices=tensor([ 106, 329, 1040, ..., 160, 2232, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=89152, layout=torch.sparse_csr) +tensor([0.5569, 0.3292, 0.9791, ..., 0.7347, 0.1403, 0.6402]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_045 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 89152 +Density: 9.054258553341032e-05 +Time: 0.3189120292663574 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '65848', '-m', 'matrices/as-caida_pruned/as-caida_G_045.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_045", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 89152, "MATRIX_DENSITY": 9.054258553341032e-05, "TIME_S": 20.168872833251953} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 89150, 89150, 89152]), + col_indices=tensor([ 106, 329, 1040, ..., 160, 2232, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=89152, layout=torch.sparse_csr) +tensor([0.4820, 0.8409, 0.9052, ..., 0.8688, 0.0966, 0.5004]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_045 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 89152 +Density: 9.054258553341032e-05 +Time: 20.168872833251953 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 89150, 89150, 89152]), + col_indices=tensor([ 106, 329, 1040, ..., 160, 2232, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=89152, layout=torch.sparse_csr) +tensor([0.4820, 0.8409, 0.9052, ..., 0.8688, 0.0966, 0.5004]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_045 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 89152 +Density: 9.054258553341032e-05 +Time: 20.168872833251953 seconds + +[39.27, 38.4, 38.87, 38.51, 38.89, 38.75, 43.91, 38.5, 38.98, 38.4] +[64.42] +22.6893949508667 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 65848, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_045', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 89152, 'MATRIX_DENSITY': 9.054258553341032e-05, 'TIME_S': 20.168872833251953, 'TIME_S_1KI': 0.3062943875782401, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1461.650822734833, 'W': 64.42} +[39.27, 38.4, 38.87, 38.51, 38.89, 38.75, 43.91, 38.5, 38.98, 38.4, 39.17, 38.88, 38.39, 38.55, 38.92, 38.38, 38.66, 38.74, 38.6, 38.43] +701.565 +35.078250000000004 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 65848, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_045', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 89152, 'MATRIX_DENSITY': 9.054258553341032e-05, 'TIME_S': 20.168872833251953, 'TIME_S_1KI': 0.3062943875782401, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1461.650822734833, 'W': 64.42, 'J_1KI': 22.197345746793115, 'W_1KI': 0.9783136921394726, 'W_D': 29.341749999999998, 'J_D': 665.746554299593, 'W_D_1KI': 0.4455981958449763, 'J_D_1KI': 0.006767072589068404} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_050.json b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_050.json new file mode 100644 index 0000000..dc0ac63 --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_050.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 67653, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_050", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 90392, "MATRIX_DENSITY": 9.180192695100532e-05, "TIME_S": 20.963899612426758, "TIME_S_1KI": 0.3098739096925008, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1521.6521493887903, "W": 64.43, "J_1KI": 22.49201290983091, "W_1KI": 0.9523598362230796, "W_D": 29.412999999999997, "J_D": 694.6508562777042, "W_D_1KI": 0.434762686059746, "J_D_1KI": 0.006426362261241127} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_050.output b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_050.output new file mode 100644 index 0000000..78a65f2 --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_050.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_050.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_050", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 90392, "MATRIX_DENSITY": 9.180192695100532e-05, "TIME_S": 0.3442656993865967} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 90390, 90390, 90392]), + col_indices=tensor([ 5326, 106, 329, ..., 882, 2232, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=90392, layout=torch.sparse_csr) +tensor([0.6098, 0.6733, 0.1875, ..., 0.0155, 0.1603, 0.0542]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_050 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 90392 +Density: 9.180192695100532e-05 +Time: 0.3442656993865967 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '60999', '-m', 'matrices/as-caida_pruned/as-caida_G_050.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_050", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 90392, "MATRIX_DENSITY": 9.180192695100532e-05, "TIME_S": 18.93450689315796} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 90390, 90390, 90392]), + col_indices=tensor([ 5326, 106, 329, ..., 882, 2232, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=90392, layout=torch.sparse_csr) +tensor([0.0392, 0.0975, 0.8186, ..., 0.5653, 0.5730, 0.7356]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_050 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 90392 +Density: 9.180192695100532e-05 +Time: 18.93450689315796 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '67653', '-m', 'matrices/as-caida_pruned/as-caida_G_050.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_050", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 90392, "MATRIX_DENSITY": 9.180192695100532e-05, "TIME_S": 20.963899612426758} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 90390, 90390, 90392]), + col_indices=tensor([ 5326, 106, 329, ..., 882, 2232, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=90392, layout=torch.sparse_csr) +tensor([0.1847, 0.1096, 0.9077, ..., 0.4730, 0.5382, 0.5503]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_050 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 90392 +Density: 9.180192695100532e-05 +Time: 20.963899612426758 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 90390, 90390, 90392]), + col_indices=tensor([ 5326, 106, 329, ..., 882, 2232, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=90392, layout=torch.sparse_csr) +tensor([0.1847, 0.1096, 0.9077, ..., 0.4730, 0.5382, 0.5503]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_050 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 90392 +Density: 9.180192695100532e-05 +Time: 20.963899612426758 seconds + +[39.77, 38.62, 43.71, 39.32, 38.31, 38.53, 38.39, 38.76, 38.87, 38.22] +[64.43] +23.61713719367981 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 67653, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_050', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 90392, 'MATRIX_DENSITY': 9.180192695100532e-05, 'TIME_S': 20.963899612426758, 'TIME_S_1KI': 0.3098739096925008, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1521.6521493887903, 'W': 64.43} +[39.77, 38.62, 43.71, 39.32, 38.31, 38.53, 38.39, 38.76, 38.87, 38.22, 39.48, 38.43, 38.44, 38.62, 38.55, 38.29, 38.34, 38.26, 38.81, 38.71] +700.3400000000001 +35.01700000000001 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 67653, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_050', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 90392, 'MATRIX_DENSITY': 9.180192695100532e-05, 'TIME_S': 20.963899612426758, 'TIME_S_1KI': 0.3098739096925008, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1521.6521493887903, 'W': 64.43, 'J_1KI': 22.49201290983091, 'W_1KI': 0.9523598362230796, 'W_D': 29.412999999999997, 'J_D': 694.6508562777042, 'W_D_1KI': 0.434762686059746, 'J_D_1KI': 0.006426362261241127} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_055.json b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_055.json new file mode 100644 index 0000000..1f61b0b --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_055.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 64465, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_055", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 91476, "MATRIX_DENSITY": 9.290283509348351e-05, "TIME_S": 20.197006940841675, "TIME_S_1KI": 0.3133018993382715, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1479.964708185196, "W": 64.9, "J_1KI": 22.95764691204834, "W_1KI": 1.006747847669278, "W_D": 29.771000000000008, "J_D": 678.8910528101923, "W_D_1KI": 0.46181648956798277, "J_D_1KI": 0.007163832925897506} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_055.output b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_055.output new file mode 100644 index 0000000..40fe55c --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_055.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_055.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_055", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 91476, "MATRIX_DENSITY": 9.290283509348351e-05, "TIME_S": 0.32575511932373047} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 91475, 91475, 91476]), + col_indices=tensor([21783, 106, 329, ..., 160, 882, 17255]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=91476, layout=torch.sparse_csr) +tensor([0.8969, 0.2360, 0.8294, ..., 0.5085, 0.7144, 0.2405]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_055 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 91476 +Density: 9.290283509348351e-05 +Time: 0.32575511932373047 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '64465', '-m', 'matrices/as-caida_pruned/as-caida_G_055.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_055", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 91476, "MATRIX_DENSITY": 9.290283509348351e-05, "TIME_S": 20.197006940841675} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 91475, 91475, 91476]), + col_indices=tensor([21783, 106, 329, ..., 160, 882, 17255]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=91476, layout=torch.sparse_csr) +tensor([0.1713, 0.5108, 0.5338, ..., 0.2665, 0.1185, 0.4866]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_055 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 91476 +Density: 9.290283509348351e-05 +Time: 20.197006940841675 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 91475, 91475, 91476]), + col_indices=tensor([21783, 106, 329, ..., 160, 882, 17255]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=91476, layout=torch.sparse_csr) +tensor([0.1713, 0.5108, 0.5338, ..., 0.2665, 0.1185, 0.4866]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_055 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 91476 +Density: 9.290283509348351e-05 +Time: 20.197006940841675 seconds + +[39.92, 38.47, 38.51, 38.92, 38.85, 38.9, 38.49, 38.54, 38.51, 38.38] +[64.9] +22.803770542144775 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 64465, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_055', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 91476, 'MATRIX_DENSITY': 9.290283509348351e-05, 'TIME_S': 20.197006940841675, 'TIME_S_1KI': 0.3133018993382715, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1479.964708185196, 'W': 64.9} +[39.92, 38.47, 38.51, 38.92, 38.85, 38.9, 38.49, 38.54, 38.51, 38.38, 39.43, 38.43, 38.65, 38.62, 39.01, 38.66, 38.98, 43.91, 39.03, 38.47] +702.5799999999999 +35.129 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 64465, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_055', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 91476, 'MATRIX_DENSITY': 9.290283509348351e-05, 'TIME_S': 20.197006940841675, 'TIME_S_1KI': 0.3133018993382715, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1479.964708185196, 'W': 64.9, 'J_1KI': 22.95764691204834, 'W_1KI': 1.006747847669278, 'W_D': 29.771000000000008, 'J_D': 678.8910528101923, 'W_D_1KI': 0.46181648956798277, 'J_D_1KI': 0.007163832925897506} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_060.json b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_060.json new file mode 100644 index 0000000..e1d32c4 --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_060.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 64086, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_060", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 94180, "MATRIX_DENSITY": 9.564901186217454e-05, "TIME_S": 20.29684615135193, "TIME_S_1KI": 0.31671263850688025, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1482.1219630122187, "W": 64.65, "J_1KI": 23.127078660116386, "W_1KI": 1.0088006740941862, "W_D": 29.720750000000002, "J_D": 681.3577158885598, "W_D_1KI": 0.4637635364978311, "J_D_1KI": 0.007236581101922902} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_060.output b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_060.output new file mode 100644 index 0000000..6193f7b --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_060.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_060.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_060", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 94180, "MATRIX_DENSITY": 9.564901186217454e-05, "TIME_S": 0.327683687210083} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 94180, 94180, 94180]), + col_indices=tensor([ 106, 329, 1040, ..., 155, 160, 882]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=94180, layout=torch.sparse_csr) +tensor([0.3133, 0.2589, 0.8332, ..., 0.0019, 0.9862, 0.8235]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_060 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 94180 +Density: 9.564901186217454e-05 +Time: 0.327683687210083 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '64086', '-m', 'matrices/as-caida_pruned/as-caida_G_060.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_060", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 94180, "MATRIX_DENSITY": 9.564901186217454e-05, "TIME_S": 20.29684615135193} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 94180, 94180, 94180]), + col_indices=tensor([ 106, 329, 1040, ..., 155, 160, 882]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=94180, layout=torch.sparse_csr) +tensor([0.3861, 0.9218, 0.2349, ..., 0.8411, 0.9508, 0.7323]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_060 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 94180 +Density: 9.564901186217454e-05 +Time: 20.29684615135193 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 94180, 94180, 94180]), + col_indices=tensor([ 106, 329, 1040, ..., 155, 160, 882]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=94180, layout=torch.sparse_csr) +tensor([0.3861, 0.9218, 0.2349, ..., 0.8411, 0.9508, 0.7323]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_060 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 94180 +Density: 9.564901186217454e-05 +Time: 20.29684615135193 seconds + +[39.6, 38.84, 38.59, 38.31, 38.65, 38.36, 38.55, 38.33, 38.89, 38.77] +[64.65] +22.925320386886597 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 64086, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_060', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 94180, 'MATRIX_DENSITY': 9.564901186217454e-05, 'TIME_S': 20.29684615135193, 'TIME_S_1KI': 0.31671263850688025, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1482.1219630122187, 'W': 64.65} +[39.6, 38.84, 38.59, 38.31, 38.65, 38.36, 38.55, 38.33, 38.89, 38.77, 39.76, 40.65, 38.48, 38.75, 38.5, 38.85, 38.49, 39.0, 38.86, 38.84] +698.585 +34.92925 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 64086, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_060', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 94180, 'MATRIX_DENSITY': 9.564901186217454e-05, 'TIME_S': 20.29684615135193, 'TIME_S_1KI': 0.31671263850688025, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1482.1219630122187, 'W': 64.65, 'J_1KI': 23.127078660116386, 'W_1KI': 1.0088006740941862, 'W_D': 29.720750000000002, 'J_D': 681.3577158885598, 'W_D_1KI': 0.4637635364978311, 'J_D_1KI': 0.007236581101922902} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_065.json b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_065.json new file mode 100644 index 0000000..479d415 --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_065.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 63813, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_065", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 95068, "MATRIX_DENSITY": 9.655086281283934e-05, "TIME_S": 20.4384822845459, "TIME_S_1KI": 0.3202871246383323, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1496.5771602988243, "W": 64.95, "J_1KI": 23.452543530296715, "W_1KI": 1.0178176860514316, "W_D": 29.764250000000004, "J_D": 685.8275095215441, "W_D_1KI": 0.46642925422719517, "J_D_1KI": 0.007309313999141165} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_065.output b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_065.output new file mode 100644 index 0000000..fa2f4c9 --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_065.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_065.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_065", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 95068, "MATRIX_DENSITY": 9.655086281283934e-05, "TIME_S": 0.3290855884552002} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 95068, 95068, 95068]), + col_indices=tensor([ 106, 329, 1040, ..., 155, 160, 882]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=95068, layout=torch.sparse_csr) +tensor([0.5665, 0.2906, 0.4040, ..., 0.1075, 0.6118, 0.2472]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_065 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 95068 +Density: 9.655086281283934e-05 +Time: 0.3290855884552002 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '63813', '-m', 'matrices/as-caida_pruned/as-caida_G_065.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_065", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 95068, "MATRIX_DENSITY": 9.655086281283934e-05, "TIME_S": 20.4384822845459} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 95068, 95068, 95068]), + col_indices=tensor([ 106, 329, 1040, ..., 155, 160, 882]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=95068, layout=torch.sparse_csr) +tensor([0.8141, 0.7379, 0.0044, ..., 0.1399, 0.1556, 0.7555]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_065 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 95068 +Density: 9.655086281283934e-05 +Time: 20.4384822845459 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 95068, 95068, 95068]), + col_indices=tensor([ 106, 329, 1040, ..., 155, 160, 882]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=95068, layout=torch.sparse_csr) +tensor([0.8141, 0.7379, 0.0044, ..., 0.1399, 0.1556, 0.7555]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_065 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 95068 +Density: 9.655086281283934e-05 +Time: 20.4384822845459 seconds + +[39.89, 38.84, 39.18, 38.42, 38.56, 38.47, 38.59, 38.92, 38.49, 39.56] +[64.95] +23.041988611221313 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 63813, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_065', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 95068, 'MATRIX_DENSITY': 9.655086281283934e-05, 'TIME_S': 20.4384822845459, 'TIME_S_1KI': 0.3202871246383323, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1496.5771602988243, 'W': 64.95} +[39.89, 38.84, 39.18, 38.42, 38.56, 38.47, 38.59, 38.92, 38.49, 39.56, 39.5, 38.49, 38.48, 38.6, 38.45, 39.82, 44.34, 38.41, 38.63, 39.1] +703.715 +35.18575 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 63813, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_065', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 95068, 'MATRIX_DENSITY': 9.655086281283934e-05, 'TIME_S': 20.4384822845459, 'TIME_S_1KI': 0.3202871246383323, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1496.5771602988243, 'W': 64.95, 'J_1KI': 23.452543530296715, 'W_1KI': 1.0178176860514316, 'W_D': 29.764250000000004, 'J_D': 685.8275095215441, 'W_D_1KI': 0.46642925422719517, 'J_D_1KI': 0.007309313999141165} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_070.json b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_070.json new file mode 100644 index 0000000..36d144b --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_070.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 67374, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_070", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 78684, "MATRIX_DENSITY": 7.991130653390679e-05, "TIME_S": 20.209779500961304, "TIME_S_1KI": 0.2999640736925417, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1475.642830133438, "W": 64.45, "J_1KI": 21.902259478930123, "W_1KI": 0.956600469023659, "W_D": 29.329750000000004, "J_D": 671.5319673717023, "W_D_1KI": 0.4353274260100336, "J_D_1KI": 0.006461356398759664} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_070.output b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_070.output new file mode 100644 index 0000000..4ae6a1c --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_070.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_070.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_070", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 78684, "MATRIX_DENSITY": 7.991130653390679e-05, "TIME_S": 0.3116922378540039} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 78684, 78684, 78684]), + col_indices=tensor([ 106, 329, 1040, ..., 16263, 2242, 2242]), + values=tensor([1., 1., 1., ..., 3., 1., 1.]), size=(31379, 31379), + nnz=78684, layout=torch.sparse_csr) +tensor([0.8130, 0.1863, 0.1568, ..., 0.9725, 0.1195, 0.3778]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_070 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 78684 +Density: 7.991130653390679e-05 +Time: 0.3116922378540039 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '67374', '-m', 'matrices/as-caida_pruned/as-caida_G_070.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_070", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 78684, "MATRIX_DENSITY": 7.991130653390679e-05, "TIME_S": 20.209779500961304} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 78684, 78684, 78684]), + col_indices=tensor([ 106, 329, 1040, ..., 16263, 2242, 2242]), + values=tensor([1., 1., 1., ..., 3., 1., 1.]), size=(31379, 31379), + nnz=78684, layout=torch.sparse_csr) +tensor([0.4291, 0.0523, 0.3196, ..., 0.0396, 0.6455, 0.2082]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_070 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 78684 +Density: 7.991130653390679e-05 +Time: 20.209779500961304 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 78684, 78684, 78684]), + col_indices=tensor([ 106, 329, 1040, ..., 16263, 2242, 2242]), + values=tensor([1., 1., 1., ..., 3., 1., 1.]), size=(31379, 31379), + nnz=78684, layout=torch.sparse_csr) +tensor([0.4291, 0.0523, 0.3196, ..., 0.0396, 0.6455, 0.2082]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_070 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 78684 +Density: 7.991130653390679e-05 +Time: 20.209779500961304 seconds + +[39.59, 38.44, 38.59, 38.39, 38.41, 38.79, 40.19, 38.4, 39.21, 38.45] +[64.45] +22.8959321975708 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 67374, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_070', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 78684, 'MATRIX_DENSITY': 7.991130653390679e-05, 'TIME_S': 20.209779500961304, 'TIME_S_1KI': 0.2999640736925417, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1475.642830133438, 'W': 64.45} +[39.59, 38.44, 38.59, 38.39, 38.41, 38.79, 40.19, 38.4, 39.21, 38.45, 39.45, 38.39, 38.44, 43.88, 39.33, 38.61, 38.49, 38.34, 38.57, 38.38] +702.405 +35.12025 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 67374, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_070', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 78684, 'MATRIX_DENSITY': 7.991130653390679e-05, 'TIME_S': 20.209779500961304, 'TIME_S_1KI': 0.2999640736925417, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1475.642830133438, 'W': 64.45, 'J_1KI': 21.902259478930123, 'W_1KI': 0.956600469023659, 'W_D': 29.329750000000004, 'J_D': 671.5319673717023, 'W_D_1KI': 0.4353274260100336, 'J_D_1KI': 0.006461356398759664} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_075.json b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_075.json new file mode 100644 index 0000000..1b58570 --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_075.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 62635, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_075", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 97492, "MATRIX_DENSITY": 9.901267216465406e-05, "TIME_S": 20.409830331802368, "TIME_S_1KI": 0.3258534418743892, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1490.0797438573836, "W": 64.57, "J_1KI": 23.789889739879996, "W_1KI": 1.0308932705356428, "W_D": 29.603999999999992, "J_D": 683.1705240383146, "W_D_1KI": 0.4726430909236049, "J_D_1KI": 0.007545990116126844} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_075.output b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_075.output new file mode 100644 index 0000000..7842da8 --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_075.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_075.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_075", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 97492, "MATRIX_DENSITY": 9.901267216465406e-05, "TIME_S": 0.33527112007141113} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 97491, 97491, 97492]), + col_indices=tensor([22754, 22754, 106, ..., 160, 882, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=97492, layout=torch.sparse_csr) +tensor([0.4278, 0.8607, 0.0770, ..., 0.2395, 0.0430, 0.7313]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_075 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 97492 +Density: 9.901267216465406e-05 +Time: 0.33527112007141113 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '62635', '-m', 'matrices/as-caida_pruned/as-caida_G_075.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_075", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 97492, "MATRIX_DENSITY": 9.901267216465406e-05, "TIME_S": 20.409830331802368} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 97491, 97491, 97492]), + col_indices=tensor([22754, 22754, 106, ..., 160, 882, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=97492, layout=torch.sparse_csr) +tensor([0.6118, 0.6028, 0.4518, ..., 0.3749, 0.8375, 0.7375]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_075 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 97492 +Density: 9.901267216465406e-05 +Time: 20.409830331802368 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 97491, 97491, 97492]), + col_indices=tensor([22754, 22754, 106, ..., 160, 882, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=97492, layout=torch.sparse_csr) +tensor([0.6118, 0.6028, 0.4518, ..., 0.3749, 0.8375, 0.7375]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_075 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 97492 +Density: 9.901267216465406e-05 +Time: 20.409830331802368 seconds + +[39.8, 38.33, 38.51, 39.2, 38.87, 38.36, 38.66, 38.42, 38.48, 38.47] +[64.57] +23.076966762542725 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 62635, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_075', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 97492, 'MATRIX_DENSITY': 9.901267216465406e-05, 'TIME_S': 20.409830331802368, 'TIME_S_1KI': 0.3258534418743892, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1490.0797438573836, 'W': 64.57} +[39.8, 38.33, 38.51, 39.2, 38.87, 38.36, 38.66, 38.42, 38.48, 38.47, 44.6, 38.84, 39.05, 38.91, 38.82, 38.4, 38.51, 38.45, 38.9, 38.35] +699.32 +34.966 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 62635, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_075', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 97492, 'MATRIX_DENSITY': 9.901267216465406e-05, 'TIME_S': 20.409830331802368, 'TIME_S_1KI': 0.3258534418743892, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1490.0797438573836, 'W': 64.57, 'J_1KI': 23.789889739879996, 'W_1KI': 1.0308932705356428, 'W_D': 29.603999999999992, 'J_D': 683.1705240383146, 'W_D_1KI': 0.4726430909236049, 'J_D_1KI': 0.007545990116126844} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_080.json b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_080.json new file mode 100644 index 0000000..e9061cc --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_080.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 62358, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_080", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 98112, "MATRIX_DENSITY": 9.964234287345156e-05, "TIME_S": 20.405128240585327, "TIME_S_1KI": 0.32722550820400476, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1486.526406955719, "W": 64.56, "J_1KI": 23.83858377362518, "W_1KI": 1.035312229385163, "W_D": 29.343750000000007, "J_D": 675.6545733287932, "W_D_1KI": 0.4705691330703359, "J_D_1KI": 0.007546251211878762} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_080.output b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_080.output new file mode 100644 index 0000000..c7bda36 --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_080.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_080.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_080", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 98112, "MATRIX_DENSITY": 9.964234287345156e-05, "TIME_S": 0.3367600440979004} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 98111, 98111, 98112]), + col_indices=tensor([22754, 22754, 106, ..., 4133, 31329, 12170]), + values=tensor([1., 1., 1., ..., 3., 3., 1.]), size=(31379, 31379), + nnz=98112, layout=torch.sparse_csr) +tensor([0.7975, 0.7729, 0.5191, ..., 0.7354, 0.7977, 0.0281]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_080 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 98112 +Density: 9.964234287345156e-05 +Time: 0.3367600440979004 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '62358', '-m', 'matrices/as-caida_pruned/as-caida_G_080.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_080", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 98112, "MATRIX_DENSITY": 9.964234287345156e-05, "TIME_S": 20.405128240585327} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 98111, 98111, 98112]), + col_indices=tensor([22754, 22754, 106, ..., 4133, 31329, 12170]), + values=tensor([1., 1., 1., ..., 3., 3., 1.]), size=(31379, 31379), + nnz=98112, layout=torch.sparse_csr) +tensor([0.0871, 0.9165, 0.8965, ..., 0.5242, 0.9568, 0.3458]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_080 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 98112 +Density: 9.964234287345156e-05 +Time: 20.405128240585327 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 98111, 98111, 98112]), + col_indices=tensor([22754, 22754, 106, ..., 4133, 31329, 12170]), + values=tensor([1., 1., 1., ..., 3., 3., 1.]), size=(31379, 31379), + nnz=98112, layout=torch.sparse_csr) +tensor([0.0871, 0.9165, 0.8965, ..., 0.5242, 0.9568, 0.3458]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_080 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 98112 +Density: 9.964234287345156e-05 +Time: 20.405128240585327 seconds + +[39.22, 38.42, 38.53, 38.39, 38.51, 38.41, 39.93, 44.24, 38.83, 38.76] +[64.56] +23.02550196647644 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 62358, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_080', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 98112, 'MATRIX_DENSITY': 9.964234287345156e-05, 'TIME_S': 20.405128240585327, 'TIME_S_1KI': 0.32722550820400476, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1486.526406955719, 'W': 64.56} +[39.22, 38.42, 38.53, 38.39, 38.51, 38.41, 39.93, 44.24, 38.83, 38.76, 39.08, 38.6, 39.66, 38.4, 38.91, 38.65, 38.93, 39.25, 38.9, 38.47] +704.3249999999999 +35.216249999999995 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 62358, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_080', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 98112, 'MATRIX_DENSITY': 9.964234287345156e-05, 'TIME_S': 20.405128240585327, 'TIME_S_1KI': 0.32722550820400476, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1486.526406955719, 'W': 64.56, 'J_1KI': 23.83858377362518, 'W_1KI': 1.035312229385163, 'W_D': 29.343750000000007, 'J_D': 675.6545733287932, 'W_D_1KI': 0.4705691330703359, 'J_D_1KI': 0.007546251211878762} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_085.json b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_085.json new file mode 100644 index 0000000..6ea4e93 --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_085.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 61673, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_085", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 99166, "MATRIX_DENSITY": 0.0001007127830784073, "TIME_S": 20.35384511947632, "TIME_S_1KI": 0.3300284584741511, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1485.5005332756043, "W": 64.68, "J_1KI": 24.086724065240936, "W_1KI": 1.0487571546706014, "W_D": 29.448, "J_D": 676.3299273948669, "W_D_1KI": 0.47748609602257064, "J_D_1KI": 0.007742222626150352} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_085.output b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_085.output new file mode 100644 index 0000000..c214b55 --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_085.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_085.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_085", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 99166, "MATRIX_DENSITY": 0.0001007127830784073, "TIME_S": 0.34050512313842773} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 99165, 99165, 99166]), + col_indices=tensor([ 106, 329, 1040, ..., 882, 31211, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=99166, layout=torch.sparse_csr) +tensor([0.9686, 0.0729, 0.3294, ..., 0.4767, 0.1472, 0.5949]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_085 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 99166 +Density: 0.0001007127830784073 +Time: 0.34050512313842773 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '61673', '-m', 'matrices/as-caida_pruned/as-caida_G_085.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_085", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 99166, "MATRIX_DENSITY": 0.0001007127830784073, "TIME_S": 20.35384511947632} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 99165, 99165, 99166]), + col_indices=tensor([ 106, 329, 1040, ..., 882, 31211, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=99166, layout=torch.sparse_csr) +tensor([0.7301, 0.3905, 0.0759, ..., 0.0439, 0.0248, 0.9324]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_085 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 99166 +Density: 0.0001007127830784073 +Time: 20.35384511947632 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 99165, 99165, 99166]), + col_indices=tensor([ 106, 329, 1040, ..., 882, 31211, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=99166, layout=torch.sparse_csr) +tensor([0.7301, 0.3905, 0.0759, ..., 0.0439, 0.0248, 0.9324]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_085 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 99166 +Density: 0.0001007127830784073 +Time: 20.35384511947632 seconds + +[39.95, 39.13, 38.92, 38.82, 38.47, 38.42, 38.45, 39.05, 38.44, 38.77] +[64.68] +22.966922283172607 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 61673, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_085', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 99166, 'MATRIX_DENSITY': 0.0001007127830784073, 'TIME_S': 20.35384511947632, 'TIME_S_1KI': 0.3300284584741511, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1485.5005332756043, 'W': 64.68} +[39.95, 39.13, 38.92, 38.82, 38.47, 38.42, 38.45, 39.05, 38.44, 38.77, 39.1, 38.77, 38.92, 40.3, 44.39, 38.42, 38.85, 38.47, 38.49, 38.84] +704.6400000000001 +35.232000000000006 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 61673, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_085', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 99166, 'MATRIX_DENSITY': 0.0001007127830784073, 'TIME_S': 20.35384511947632, 'TIME_S_1KI': 0.3300284584741511, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1485.5005332756043, 'W': 64.68, 'J_1KI': 24.086724065240936, 'W_1KI': 1.0487571546706014, 'W_D': 29.448, 'J_D': 676.3299273948669, 'W_D_1KI': 0.47748609602257064, 'J_D_1KI': 0.007742222626150352} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_090.json b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_090.json new file mode 100644 index 0000000..f79e2e7 --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_090.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 60974, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_090", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 100924, "MATRIX_DENSITY": 0.00010249820421722343, "TIME_S": 20.4024760723114, "TIME_S_1KI": 0.3346094412751566, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1491.4849559259417, "W": 64.54, "J_1KI": 24.4609990475603, "W_1KI": 1.0584839439761211, "W_D": 29.465249999999997, "J_D": 680.9262023178934, "W_D_1KI": 0.48324285761144087, "J_D_1KI": 0.007925392095178943} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_090.output b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_090.output new file mode 100644 index 0000000..e242492 --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_090.output @@ -0,0 +1,68 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_090.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_090", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 100924, "MATRIX_DENSITY": 0.00010249820421722343, "TIME_S": 0.3444075584411621} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 100923, 100923, + 100924]), + col_indices=tensor([ 106, 329, 1040, ..., 882, 31211, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=100924, layout=torch.sparse_csr) +tensor([0.7658, 0.2535, 0.2994, ..., 0.5905, 0.1338, 0.2393]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_090 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 100924 +Density: 0.00010249820421722343 +Time: 0.3444075584411621 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '60974', '-m', 'matrices/as-caida_pruned/as-caida_G_090.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_090", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 100924, "MATRIX_DENSITY": 0.00010249820421722343, "TIME_S": 20.4024760723114} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 100923, 100923, + 100924]), + col_indices=tensor([ 106, 329, 1040, ..., 882, 31211, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=100924, layout=torch.sparse_csr) +tensor([0.2816, 0.0863, 0.6090, ..., 0.2133, 0.4263, 0.7091]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_090 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 100924 +Density: 0.00010249820421722343 +Time: 20.4024760723114 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 100923, 100923, + 100924]), + col_indices=tensor([ 106, 329, 1040, ..., 882, 31211, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=100924, layout=torch.sparse_csr) +tensor([0.2816, 0.0863, 0.6090, ..., 0.2133, 0.4263, 0.7091]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_090 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 100924 +Density: 0.00010249820421722343 +Time: 20.4024760723114 seconds + +[39.17, 39.49, 38.5, 38.54, 38.86, 39.31, 39.0, 38.39, 38.55, 38.42] +[64.54] +23.109466314315796 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 60974, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_090', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 100924, 'MATRIX_DENSITY': 0.00010249820421722343, 'TIME_S': 20.4024760723114, 'TIME_S_1KI': 0.3346094412751566, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1491.4849559259417, 'W': 64.54} +[39.17, 39.49, 38.5, 38.54, 38.86, 39.31, 39.0, 38.39, 38.55, 38.42, 40.49, 38.9, 39.25, 39.02, 39.01, 38.49, 38.5, 38.39, 38.42, 43.67] +701.4950000000001 +35.07475000000001 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 60974, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_090', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 100924, 'MATRIX_DENSITY': 0.00010249820421722343, 'TIME_S': 20.4024760723114, 'TIME_S_1KI': 0.3346094412751566, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1491.4849559259417, 'W': 64.54, 'J_1KI': 24.4609990475603, 'W_1KI': 1.0584839439761211, 'W_D': 29.465249999999997, 'J_D': 680.9262023178934, 'W_D_1KI': 0.48324285761144087, 'J_D_1KI': 0.007925392095178943} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_095.json b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_095.json new file mode 100644 index 0000000..97b995a --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_095.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 60553, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_095", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 102290, "MATRIX_DENSITY": 0.00010388551097241275, "TIME_S": 20.477957248687744, "TIME_S_1KI": 0.33818237327114664, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1487.5360245037077, "W": 64.44, "J_1KI": 24.565851807568702, "W_1KI": 1.064191699833204, "W_D": 29.283749999999998, "J_D": 675.9874776157736, "W_D_1KI": 0.4836052714151239, "J_D_1KI": 0.007986479140837346} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_095.output b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_095.output new file mode 100644 index 0000000..b0d9b97 --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_095.output @@ -0,0 +1,68 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_095.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_095", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 102290, "MATRIX_DENSITY": 0.00010388551097241275, "TIME_S": 0.3467981815338135} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 102289, 102289, + 102290]), + col_indices=tensor([ 106, 329, 1040, ..., 882, 25970, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=102290, layout=torch.sparse_csr) +tensor([0.1027, 0.4167, 0.4889, ..., 0.5899, 0.0189, 0.1698]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_095 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 102290 +Density: 0.00010388551097241275 +Time: 0.3467981815338135 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '60553', '-m', 'matrices/as-caida_pruned/as-caida_G_095.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_095", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 102290, "MATRIX_DENSITY": 0.00010388551097241275, "TIME_S": 20.477957248687744} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 102289, 102289, + 102290]), + col_indices=tensor([ 106, 329, 1040, ..., 882, 25970, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=102290, layout=torch.sparse_csr) +tensor([0.5494, 0.3118, 0.2013, ..., 0.3409, 0.4785, 0.2635]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_095 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 102290 +Density: 0.00010388551097241275 +Time: 20.477957248687744 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 102289, 102289, + 102290]), + col_indices=tensor([ 106, 329, 1040, ..., 882, 25970, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=102290, layout=torch.sparse_csr) +tensor([0.5494, 0.3118, 0.2013, ..., 0.3409, 0.4785, 0.2635]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_095 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 102290 +Density: 0.00010388551097241275 +Time: 20.477957248687744 seconds + +[39.21, 38.93, 38.47, 38.38, 39.58, 43.93, 38.88, 38.85, 38.87, 39.11] +[64.44] +23.084047555923462 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 60553, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_095', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 102290, 'MATRIX_DENSITY': 0.00010388551097241275, 'TIME_S': 20.477957248687744, 'TIME_S_1KI': 0.33818237327114664, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1487.5360245037077, 'W': 64.44} +[39.21, 38.93, 38.47, 38.38, 39.58, 43.93, 38.88, 38.85, 38.87, 39.11, 40.06, 38.39, 39.14, 38.54, 38.73, 38.36, 38.44, 38.76, 38.52, 38.33] +703.125 +35.15625 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 60553, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_095', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 102290, 'MATRIX_DENSITY': 0.00010388551097241275, 'TIME_S': 20.477957248687744, 'TIME_S_1KI': 0.33818237327114664, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1487.5360245037077, 'W': 64.44, 'J_1KI': 24.565851807568702, 'W_1KI': 1.064191699833204, 'W_D': 29.283749999999998, 'J_D': 675.9874776157736, 'W_D_1KI': 0.4836052714151239, 'J_D_1KI': 0.007986479140837346} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_100.json b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_100.json new file mode 100644 index 0000000..0c8c49a --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_100.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 59951, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_100", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 102888, "MATRIX_DENSITY": 0.00010449283852702711, "TIME_S": 20.38682246208191, "TIME_S_1KI": 0.34005808847361857, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1491.7912831115723, "W": 64.64, "J_1KI": 24.883509584687033, "W_1KI": 1.0782138746643093, "W_D": 29.595750000000002, "J_D": 683.0241625487209, "W_D_1KI": 0.49366566028923625, "J_D_1KI": 0.008234485834919121} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_100.output b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_100.output new file mode 100644 index 0000000..f66275b --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_100.output @@ -0,0 +1,68 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_100.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_100", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 102888, "MATRIX_DENSITY": 0.00010449283852702711, "TIME_S": 0.3502840995788574} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 102886, 102887, + 102888]), + col_indices=tensor([ 106, 329, 1040, ..., 25970, 5128, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=102888, layout=torch.sparse_csr) +tensor([0.3199, 0.4533, 0.2734, ..., 0.8158, 0.3895, 0.8815]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_100 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 102888 +Density: 0.00010449283852702711 +Time: 0.3502840995788574 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '59951', '-m', 'matrices/as-caida_pruned/as-caida_G_100.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_100", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 102888, "MATRIX_DENSITY": 0.00010449283852702711, "TIME_S": 20.38682246208191} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 102886, 102887, + 102888]), + col_indices=tensor([ 106, 329, 1040, ..., 25970, 5128, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=102888, layout=torch.sparse_csr) +tensor([0.0855, 0.5449, 0.1065, ..., 0.3145, 0.3819, 0.8955]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_100 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 102888 +Density: 0.00010449283852702711 +Time: 20.38682246208191 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 102886, 102887, + 102888]), + col_indices=tensor([ 106, 329, 1040, ..., 25970, 5128, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=102888, layout=torch.sparse_csr) +tensor([0.0855, 0.5449, 0.1065, ..., 0.3145, 0.3819, 0.8955]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_100 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 102888 +Density: 0.00010449283852702711 +Time: 20.38682246208191 seconds + +[39.39, 38.45, 38.89, 38.44, 38.45, 38.77, 38.51, 38.61, 38.55, 38.84] +[64.64] +23.07845425605774 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 59951, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_100', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 102888, 'MATRIX_DENSITY': 0.00010449283852702711, 'TIME_S': 20.38682246208191, 'TIME_S_1KI': 0.34005808847361857, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1491.7912831115723, 'W': 64.64} +[39.39, 38.45, 38.89, 38.44, 38.45, 38.77, 38.51, 38.61, 38.55, 38.84, 39.13, 43.45, 39.47, 38.38, 38.75, 38.77, 38.42, 38.54, 38.56, 38.39] +700.885 +35.04425 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 59951, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_100', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 102888, 'MATRIX_DENSITY': 0.00010449283852702711, 'TIME_S': 20.38682246208191, 'TIME_S_1KI': 0.34005808847361857, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1491.7912831115723, 'W': 64.64, 'J_1KI': 24.883509584687033, 'W_1KI': 1.0782138746643093, 'W_D': 29.595750000000002, 'J_D': 683.0241625487209, 'W_D_1KI': 0.49366566028923625, 'J_D_1KI': 0.008234485834919121} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_105.json b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_105.json new file mode 100644 index 0000000..c409ffe --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_105.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 58820, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_105", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 104726, "MATRIX_DENSITY": 0.00010635950749923647, "TIME_S": 20.285549879074097, "TIME_S_1KI": 0.34487504044668643, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1481.8248089790343, "W": 64.72, "J_1KI": 25.19253330464186, "W_1KI": 1.1003060183611015, "W_D": 29.537750000000003, "J_D": 676.2943564805389, "W_D_1KI": 0.5021718803128188, "J_D_1KI": 0.008537434211370602} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_105.output b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_105.output new file mode 100644 index 0000000..60ee712 --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_105.output @@ -0,0 +1,68 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_105.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_105", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 104726, "MATRIX_DENSITY": 0.00010635950749923647, "TIME_S": 0.35701918601989746} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 104725, 104725, + 104726]), + col_indices=tensor([ 106, 329, 1040, ..., 160, 882, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=104726, layout=torch.sparse_csr) +tensor([0.4581, 0.3425, 0.8722, ..., 0.6052, 0.7423, 0.6287]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_105 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 104726 +Density: 0.00010635950749923647 +Time: 0.35701918601989746 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '58820', '-m', 'matrices/as-caida_pruned/as-caida_G_105.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_105", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 104726, "MATRIX_DENSITY": 0.00010635950749923647, "TIME_S": 20.285549879074097} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 104725, 104725, + 104726]), + col_indices=tensor([ 106, 329, 1040, ..., 160, 882, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=104726, layout=torch.sparse_csr) +tensor([0.3229, 0.3384, 0.2910, ..., 0.5902, 0.9405, 0.5528]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_105 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 104726 +Density: 0.00010635950749923647 +Time: 20.285549879074097 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 104725, 104725, + 104726]), + col_indices=tensor([ 106, 329, 1040, ..., 160, 882, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=104726, layout=torch.sparse_csr) +tensor([0.3229, 0.3384, 0.2910, ..., 0.5902, 0.9405, 0.5528]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_105 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 104726 +Density: 0.00010635950749923647 +Time: 20.285549879074097 seconds + +[39.9, 38.52, 38.36, 38.62, 40.28, 38.52, 39.06, 38.38, 39.08, 38.88] +[64.72] +22.895933389663696 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 58820, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_105', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 104726, 'MATRIX_DENSITY': 0.00010635950749923647, 'TIME_S': 20.285549879074097, 'TIME_S_1KI': 0.34487504044668643, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1481.8248089790343, 'W': 64.72} +[39.9, 38.52, 38.36, 38.62, 40.28, 38.52, 39.06, 38.38, 39.08, 38.88, 38.99, 43.82, 38.93, 38.36, 38.51, 38.53, 38.92, 38.74, 38.73, 38.8] +703.645 +35.182249999999996 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 58820, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_105', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 104726, 'MATRIX_DENSITY': 0.00010635950749923647, 'TIME_S': 20.285549879074097, 'TIME_S_1KI': 0.34487504044668643, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1481.8248089790343, 'W': 64.72, 'J_1KI': 25.19253330464186, 'W_1KI': 1.1003060183611015, 'W_D': 29.537750000000003, 'J_D': 676.2943564805389, 'W_D_1KI': 0.5021718803128188, 'J_D_1KI': 0.008537434211370602} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_110.json b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_110.json new file mode 100644 index 0000000..1fb0ac1 --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_110.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 59032, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_110", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 104846, "MATRIX_DENSITY": 0.0001064813792493263, "TIME_S": 20.424701929092407, "TIME_S_1KI": 0.34599373101186487, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1486.627005004883, "W": 64.48, "J_1KI": 25.18340908329182, "W_1KI": 1.0922889280390298, "W_D": 29.6115, "J_D": 682.711779756546, "W_D_1KI": 0.5016177666350453, "J_D_1KI": 0.0084973872922321} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_110.output b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_110.output new file mode 100644 index 0000000..911963a --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_110.output @@ -0,0 +1,68 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_110.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_110", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 104846, "MATRIX_DENSITY": 0.0001064813792493263, "TIME_S": 0.35573506355285645} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 104844, 104844, + 104846]), + col_indices=tensor([ 106, 329, 1040, ..., 882, 2616, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=104846, layout=torch.sparse_csr) +tensor([0.9802, 0.1691, 0.5409, ..., 0.3456, 0.2777, 0.1678]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_110 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 104846 +Density: 0.0001064813792493263 +Time: 0.35573506355285645 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '59032', '-m', 'matrices/as-caida_pruned/as-caida_G_110.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_110", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 104846, "MATRIX_DENSITY": 0.0001064813792493263, "TIME_S": 20.424701929092407} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 104844, 104844, + 104846]), + col_indices=tensor([ 106, 329, 1040, ..., 882, 2616, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=104846, layout=torch.sparse_csr) +tensor([0.1586, 0.3589, 0.7410, ..., 0.9827, 0.5721, 0.3518]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_110 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 104846 +Density: 0.0001064813792493263 +Time: 20.424701929092407 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 104844, 104844, + 104846]), + col_indices=tensor([ 106, 329, 1040, ..., 882, 2616, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=104846, layout=torch.sparse_csr) +tensor([0.1586, 0.3589, 0.7410, ..., 0.9827, 0.5721, 0.3518]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_110 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 104846 +Density: 0.0001064813792493263 +Time: 20.424701929092407 seconds + +[39.16, 38.42, 39.3, 38.89, 38.99, 38.9, 38.48, 38.84, 38.42, 38.45] +[64.48] +23.05562973022461 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 59032, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_110', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 104846, 'MATRIX_DENSITY': 0.0001064813792493263, 'TIME_S': 20.424701929092407, 'TIME_S_1KI': 0.34599373101186487, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1486.627005004883, 'W': 64.48} +[39.16, 38.42, 39.3, 38.89, 38.99, 38.9, 38.48, 38.84, 38.42, 38.45, 39.84, 38.98, 39.13, 38.56, 38.79, 38.44, 38.45, 38.4, 38.47, 38.37] +697.3700000000001 +34.868500000000004 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 59032, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_110', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 104846, 'MATRIX_DENSITY': 0.0001064813792493263, 'TIME_S': 20.424701929092407, 'TIME_S_1KI': 0.34599373101186487, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1486.627005004883, 'W': 64.48, 'J_1KI': 25.18340908329182, 'W_1KI': 1.0922889280390298, 'W_D': 29.6115, 'J_D': 682.711779756546, 'W_D_1KI': 0.5016177666350453, 'J_D_1KI': 0.0084973872922321} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_115.json b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_115.json new file mode 100644 index 0000000..5c4ff6b --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_115.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 58521, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_115", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 106312, "MATRIX_DENSITY": 0.00010797024579625715, "TIME_S": 20.290428400039673, "TIME_S_1KI": 0.3467204661581257, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1485.235294933319, "W": 64.77, "J_1KI": 25.3795269208202, "W_1KI": 1.1067821807556262, "W_D": 29.889499999999998, "J_D": 685.3935517663955, "W_D_1KI": 0.5107482783957895, "J_D_1KI": 0.008727606814575786} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_115.output b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_115.output new file mode 100644 index 0000000..e4deb18 --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_115.output @@ -0,0 +1,68 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_115.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_115", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 106312, "MATRIX_DENSITY": 0.00010797024579625715, "TIME_S": 0.358839750289917} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 106311, 106311, + 106312]), + col_indices=tensor([ 106, 329, 1040, ..., 160, 882, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=106312, layout=torch.sparse_csr) +tensor([0.5030, 0.3160, 0.3950, ..., 0.4857, 0.4892, 0.0385]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_115 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 106312 +Density: 0.00010797024579625715 +Time: 0.358839750289917 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '58521', '-m', 'matrices/as-caida_pruned/as-caida_G_115.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_115", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 106312, "MATRIX_DENSITY": 0.00010797024579625715, "TIME_S": 20.290428400039673} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 106311, 106311, + 106312]), + col_indices=tensor([ 106, 329, 1040, ..., 160, 882, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=106312, layout=torch.sparse_csr) +tensor([0.0083, 0.2638, 0.6490, ..., 0.8518, 0.9646, 0.4918]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_115 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 106312 +Density: 0.00010797024579625715 +Time: 20.290428400039673 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 106311, 106311, + 106312]), + col_indices=tensor([ 106, 329, 1040, ..., 160, 882, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=106312, layout=torch.sparse_csr) +tensor([0.0083, 0.2638, 0.6490, ..., 0.8518, 0.9646, 0.4918]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_115 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 106312 +Density: 0.00010797024579625715 +Time: 20.290428400039673 seconds + +[39.14, 39.28, 38.57, 39.17, 38.64, 38.57, 39.01, 38.47, 38.51, 38.4] +[64.77] +22.9309139251709 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 58521, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_115', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 106312, 'MATRIX_DENSITY': 0.00010797024579625715, 'TIME_S': 20.290428400039673, 'TIME_S_1KI': 0.3467204661581257, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1485.235294933319, 'W': 64.77} +[39.14, 39.28, 38.57, 39.17, 38.64, 38.57, 39.01, 38.47, 38.51, 38.4, 39.4, 38.93, 38.88, 38.54, 38.88, 38.62, 38.85, 38.35, 38.64, 38.46] +697.61 +34.8805 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 58521, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_115', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 106312, 'MATRIX_DENSITY': 0.00010797024579625715, 'TIME_S': 20.290428400039673, 'TIME_S_1KI': 0.3467204661581257, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1485.235294933319, 'W': 64.77, 'J_1KI': 25.3795269208202, 'W_1KI': 1.1067821807556262, 'W_D': 29.889499999999998, 'J_D': 685.3935517663955, 'W_D_1KI': 0.5107482783957895, 'J_D_1KI': 0.008727606814575786} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_120.json b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_120.json new file mode 100644 index 0000000..2a477ec --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_120.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 58431, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_120", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 106510, "MATRIX_DENSITY": 0.0001081713341839054, "TIME_S": 21.711358785629272, "TIME_S_1KI": 0.37157260333777054, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1488.4902221775055, "W": 64.67, "J_1KI": 25.47432394067371, "W_1KI": 1.1067755129982373, "W_D": 29.694500000000005, "J_D": 683.4695052180291, "W_D_1KI": 0.5081977032739472, "J_D_1KI": 0.008697398697163274} diff --git a/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_120.output b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_120.output new file mode 100644 index 0000000..e12ab00 --- /dev/null +++ b/pytorch/output_as-caida/epyc_7313p_1_csr_20_10_10_as-caida_G_120.output @@ -0,0 +1,68 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_120.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_120", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 106510, "MATRIX_DENSITY": 0.0001081713341839054, "TIME_S": 0.35939526557922363} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 106509, 106509, + 106510]), + col_indices=tensor([ 106, 329, 1040, ..., 155, 160, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=106510, layout=torch.sparse_csr) +tensor([0.8836, 0.4525, 0.2702, ..., 0.9547, 0.0131, 0.6823]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_120 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 106510 +Density: 0.0001081713341839054 +Time: 0.35939526557922363 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '58431', '-m', 'matrices/as-caida_pruned/as-caida_G_120.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_120", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 106510, "MATRIX_DENSITY": 0.0001081713341839054, "TIME_S": 21.711358785629272} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 106509, 106509, + 106510]), + col_indices=tensor([ 106, 329, 1040, ..., 155, 160, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=106510, layout=torch.sparse_csr) +tensor([0.6034, 0.4908, 0.7399, ..., 0.6895, 0.1437, 0.2760]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_120 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 106510 +Density: 0.0001081713341839054 +Time: 21.711358785629272 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 106509, 106509, + 106510]), + col_indices=tensor([ 106, 329, 1040, ..., 155, 160, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=106510, layout=torch.sparse_csr) +tensor([0.6034, 0.4908, 0.7399, ..., 0.6895, 0.1437, 0.2760]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_120 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 106510 +Density: 0.0001081713341839054 +Time: 21.711358785629272 seconds + +[39.83, 38.72, 39.06, 38.39, 39.08, 38.38, 38.92, 38.49, 39.72, 38.89] +[64.67] +23.016703605651855 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 58431, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_120', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 106510, 'MATRIX_DENSITY': 0.0001081713341839054, 'TIME_S': 21.711358785629272, 'TIME_S_1KI': 0.37157260333777054, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1488.4902221775055, 'W': 64.67} +[39.83, 38.72, 39.06, 38.39, 39.08, 38.38, 38.92, 38.49, 39.72, 38.89, 39.08, 40.07, 38.45, 39.0, 39.02, 38.81, 38.51, 38.34, 38.4, 38.5] +699.51 +34.9755 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 58431, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_120', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 106510, 'MATRIX_DENSITY': 0.0001081713341839054, 'TIME_S': 21.711358785629272, 'TIME_S_1KI': 0.37157260333777054, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1488.4902221775055, 'W': 64.67, 'J_1KI': 25.47432394067371, 'W_1KI': 1.1067755129982373, 'W_D': 29.694500000000005, 'J_D': 683.4695052180291, 'W_D_1KI': 0.5081977032739472, 'J_D_1KI': 0.008697398697163274} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_005.json b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_005.json new file mode 100644 index 0000000..6fbc6e4 --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_005.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 48840, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_005", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 70026, "MATRIX_DENSITY": 7.111825976492498e-05, "TIME_S": 21.028095722198486, "TIME_S_1KI": 0.43055069046270444, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1314.1331713294983, "W": 53.14, "J_1KI": 26.906903589875068, "W_1KI": 1.0880425880425881, "W_D": 36.10325, "J_D": 892.8204444448949, "W_D_1KI": 0.739214782964783, "J_D_1KI": 0.015135437816641749} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_005.output b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_005.output new file mode 100644 index 0000000..37db2cb --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_005.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_005.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_005", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 70026, "MATRIX_DENSITY": 7.111825976492498e-05, "TIME_S": 0.4881734848022461} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 63, 63, ..., 70025, 70025, 70026]), + col_indices=tensor([ 111, 761, 822, ..., 978, 978, 12170]), + values=tensor([4., 3., 3., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=70026, layout=torch.sparse_csr) +tensor([0.6790, 0.5334, 0.3432, ..., 0.5183, 0.8292, 0.3502]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_005 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 70026 +Density: 7.111825976492498e-05 +Time: 0.4881734848022461 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '43017', '-m', 'matrices/as-caida_pruned/as-caida_G_005.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_005", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 70026, "MATRIX_DENSITY": 7.111825976492498e-05, "TIME_S": 18.49601435661316} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 63, 63, ..., 70025, 70025, 70026]), + col_indices=tensor([ 111, 761, 822, ..., 978, 978, 12170]), + values=tensor([4., 3., 3., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=70026, layout=torch.sparse_csr) +tensor([0.8994, 0.2991, 0.8467, ..., 0.9502, 0.4426, 0.6662]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_005 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 70026 +Density: 7.111825976492498e-05 +Time: 18.49601435661316 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '48840', '-m', 'matrices/as-caida_pruned/as-caida_G_005.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_005", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 70026, "MATRIX_DENSITY": 7.111825976492498e-05, "TIME_S": 21.028095722198486} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 63, 63, ..., 70025, 70025, 70026]), + col_indices=tensor([ 111, 761, 822, ..., 978, 978, 12170]), + values=tensor([4., 3., 3., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=70026, layout=torch.sparse_csr) +tensor([0.1044, 0.7711, 0.0895, ..., 0.1715, 0.6071, 0.7494]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_005 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 70026 +Density: 7.111825976492498e-05 +Time: 21.028095722198486 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 63, 63, ..., 70025, 70025, 70026]), + col_indices=tensor([ 111, 761, 822, ..., 978, 978, 12170]), + values=tensor([4., 3., 3., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=70026, layout=torch.sparse_csr) +tensor([0.1044, 0.7711, 0.0895, ..., 0.1715, 0.6071, 0.7494]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_005 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 70026 +Density: 7.111825976492498e-05 +Time: 21.028095722198486 seconds + +[18.94, 18.74, 22.53, 18.7, 18.61, 19.0, 18.58, 18.37, 18.93, 18.8] +[53.14] +24.729641914367676 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 48840, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_005', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 70026, 'MATRIX_DENSITY': 7.111825976492498e-05, 'TIME_S': 21.028095722198486, 'TIME_S_1KI': 0.43055069046270444, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1314.1331713294983, 'W': 53.14} +[18.94, 18.74, 22.53, 18.7, 18.61, 19.0, 18.58, 18.37, 18.93, 18.8, 19.35, 18.44, 18.5, 18.59, 18.77, 18.54, 18.6, 18.49, 18.84, 19.92] +340.735 +17.03675 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 48840, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_005', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 70026, 'MATRIX_DENSITY': 7.111825976492498e-05, 'TIME_S': 21.028095722198486, 'TIME_S_1KI': 0.43055069046270444, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1314.1331713294983, 'W': 53.14, 'J_1KI': 26.906903589875068, 'W_1KI': 1.0880425880425881, 'W_D': 36.10325, 'J_D': 892.8204444448949, 'W_D_1KI': 0.739214782964783, 'J_D_1KI': 0.015135437816641749} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_010.json b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_010.json new file mode 100644 index 0000000..50be046 --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_010.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 45184, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_010", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 74994, "MATRIX_DENSITY": 7.616375021864427e-05, "TIME_S": 22.546876192092896, "TIME_S_1KI": 0.49900133215503045, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1389.5694504547118, "W": 52.959999999999994, "J_1KI": 30.753573177556476, "W_1KI": 1.172096317280453, "W_D": 35.975249999999996, "J_D": 943.9219858850239, "W_D_1KI": 0.7961944493626061, "J_D_1KI": 0.017621159024491104} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_010.output b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_010.output new file mode 100644 index 0000000..3e9321c --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_010.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_010.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_010", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 74994, "MATRIX_DENSITY": 7.616375021864427e-05, "TIME_S": 0.4647641181945801} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 28, 28, ..., 74993, 74993, 74994]), + col_indices=tensor([ 1040, 2020, 2054, ..., 160, 160, 12170]), + values=tensor([1., 3., 3., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=74994, layout=torch.sparse_csr) +tensor([0.3729, 0.3894, 0.6658, ..., 0.4392, 0.4413, 0.1362]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_010 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 74994 +Density: 7.616375021864427e-05 +Time: 0.4647641181945801 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '45184', '-m', 'matrices/as-caida_pruned/as-caida_G_010.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_010", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 74994, "MATRIX_DENSITY": 7.616375021864427e-05, "TIME_S": 22.546876192092896} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 28, 28, ..., 74993, 74993, 74994]), + col_indices=tensor([ 1040, 2020, 2054, ..., 160, 160, 12170]), + values=tensor([1., 3., 3., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=74994, layout=torch.sparse_csr) +tensor([0.8378, 0.6978, 0.5812, ..., 0.6109, 0.6140, 0.7607]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_010 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 74994 +Density: 7.616375021864427e-05 +Time: 22.546876192092896 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 28, 28, ..., 74993, 74993, 74994]), + col_indices=tensor([ 1040, 2020, 2054, ..., 160, 160, 12170]), + values=tensor([1., 3., 3., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=74994, layout=torch.sparse_csr) +tensor([0.8378, 0.6978, 0.5812, ..., 0.6109, 0.6140, 0.7607]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_010 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 74994 +Density: 7.616375021864427e-05 +Time: 22.546876192092896 seconds + +[19.15, 18.42, 18.86, 18.95, 18.56, 18.71, 18.71, 18.96, 18.55, 18.41] +[52.96] +26.238093852996826 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 45184, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_010', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 74994, 'MATRIX_DENSITY': 7.616375021864427e-05, 'TIME_S': 22.546876192092896, 'TIME_S_1KI': 0.49900133215503045, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1389.5694504547118, 'W': 52.959999999999994} +[19.15, 18.42, 18.86, 18.95, 18.56, 18.71, 18.71, 18.96, 18.55, 18.41, 19.32, 18.59, 18.79, 18.38, 18.53, 18.47, 18.48, 21.68, 19.33, 18.57] +339.695 +16.98475 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 45184, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_010', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 74994, 'MATRIX_DENSITY': 7.616375021864427e-05, 'TIME_S': 22.546876192092896, 'TIME_S_1KI': 0.49900133215503045, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1389.5694504547118, 'W': 52.959999999999994, 'J_1KI': 30.753573177556476, 'W_1KI': 1.172096317280453, 'W_D': 35.975249999999996, 'J_D': 943.9219858850239, 'W_D_1KI': 0.7961944493626061, 'J_D_1KI': 0.017621159024491104} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_015.json b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_015.json new file mode 100644 index 0000000..04f069f --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_015.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 43567, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_015", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 77124, "MATRIX_DENSITY": 7.832697378273889e-05, "TIME_S": 20.371621131896973, "TIME_S_1KI": 0.4675929288658152, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1277.1053492355347, "W": 52.96, "J_1KI": 29.31359398708965, "W_1KI": 1.2155989625175017, "W_D": 35.949, "J_D": 866.8931306583881, "W_D_1KI": 0.8251428833750315, "J_D_1KI": 0.0189396305317105} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_015.output b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_015.output new file mode 100644 index 0000000..dd755e8 --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_015.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_015.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_015", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 77124, "MATRIX_DENSITY": 7.832697378273889e-05, "TIME_S": 0.48201489448547363} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 4, ..., 77124, 77124, 77124]), + col_indices=tensor([1040, 2054, 4842, ..., 160, 160, 8230]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=77124, layout=torch.sparse_csr) +tensor([0.2879, 0.2223, 0.5198, ..., 0.0988, 0.4445, 0.6931]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_015 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 77124 +Density: 7.832697378273889e-05 +Time: 0.48201489448547363 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '43567', '-m', 'matrices/as-caida_pruned/as-caida_G_015.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_015", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 77124, "MATRIX_DENSITY": 7.832697378273889e-05, "TIME_S": 20.371621131896973} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 4, ..., 77124, 77124, 77124]), + col_indices=tensor([1040, 2054, 4842, ..., 160, 160, 8230]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=77124, layout=torch.sparse_csr) +tensor([0.9891, 0.6030, 0.9392, ..., 0.5375, 0.7186, 0.5697]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_015 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 77124 +Density: 7.832697378273889e-05 +Time: 20.371621131896973 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 4, ..., 77124, 77124, 77124]), + col_indices=tensor([1040, 2054, 4842, ..., 160, 160, 8230]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=77124, layout=torch.sparse_csr) +tensor([0.9891, 0.6030, 0.9392, ..., 0.5375, 0.7186, 0.5697]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_015 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 77124 +Density: 7.832697378273889e-05 +Time: 20.371621131896973 seconds + +[18.77, 18.62, 19.2, 18.62, 18.59, 18.66, 18.68, 18.28, 18.49, 18.53] +[52.96] +24.114526987075806 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 43567, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_015', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 77124, 'MATRIX_DENSITY': 7.832697378273889e-05, 'TIME_S': 20.371621131896973, 'TIME_S_1KI': 0.4675929288658152, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1277.1053492355347, 'W': 52.96} +[18.77, 18.62, 19.2, 18.62, 18.59, 18.66, 18.68, 18.28, 18.49, 18.53, 19.13, 18.74, 18.62, 18.48, 18.58, 18.92, 18.75, 22.79, 18.69, 18.59] +340.22 +17.011000000000003 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 43567, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_015', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 77124, 'MATRIX_DENSITY': 7.832697378273889e-05, 'TIME_S': 20.371621131896973, 'TIME_S_1KI': 0.4675929288658152, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1277.1053492355347, 'W': 52.96, 'J_1KI': 29.31359398708965, 'W_1KI': 1.2155989625175017, 'W_D': 35.949, 'J_D': 866.8931306583881, 'W_D_1KI': 0.8251428833750315, 'J_D_1KI': 0.0189396305317105} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_020.json b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_020.json new file mode 100644 index 0000000..2e5bbfb --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_020.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 41950, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_020", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 80948, "MATRIX_DENSITY": 8.221062021893506e-05, "TIME_S": 22.586446285247803, "TIME_S_1KI": 0.5384134990523911, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1304.1438443374634, "W": 54.02, "J_1KI": 31.0880535002971, "W_1KI": 1.2877234803337307, "W_D": 24.107250000000008, "J_D": 581.9941075787547, "W_D_1KI": 0.5746662693682957, "J_D_1KI": 0.013698838363964141} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_020.output b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_020.output new file mode 100644 index 0000000..c85fca1 --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_020.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_020.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_020", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 80948, "MATRIX_DENSITY": 8.221062021893506e-05, "TIME_S": 0.5005884170532227} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 80944, 80946, 80948]), + col_indices=tensor([ 1040, 5699, 106, ..., 31378, 17998, 31377]), + values=tensor([1., 1., 1., ..., 1., 1., 3.]), size=(31379, 31379), + nnz=80948, layout=torch.sparse_csr) +tensor([0.5955, 0.6338, 0.5946, ..., 0.8819, 0.0819, 0.3468]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_020 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 80948 +Density: 8.221062021893506e-05 +Time: 0.5005884170532227 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '41950', '-m', 'matrices/as-caida_pruned/as-caida_G_020.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_020", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 80948, "MATRIX_DENSITY": 8.221062021893506e-05, "TIME_S": 22.586446285247803} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 80944, 80946, 80948]), + col_indices=tensor([ 1040, 5699, 106, ..., 31378, 17998, 31377]), + values=tensor([1., 1., 1., ..., 1., 1., 3.]), size=(31379, 31379), + nnz=80948, layout=torch.sparse_csr) +tensor([0.3482, 0.8694, 0.9047, ..., 0.5833, 0.3743, 0.0463]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_020 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 80948 +Density: 8.221062021893506e-05 +Time: 22.586446285247803 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 80944, 80946, 80948]), + col_indices=tensor([ 1040, 5699, 106, ..., 31378, 17998, 31377]), + values=tensor([1., 1., 1., ..., 1., 1., 3.]), size=(31379, 31379), + nnz=80948, layout=torch.sparse_csr) +tensor([0.3482, 0.8694, 0.9047, ..., 0.5833, 0.3743, 0.0463]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_020 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 80948 +Density: 8.221062021893506e-05 +Time: 22.586446285247803 seconds + +[19.02, 18.63, 19.06, 20.29, 18.99, 18.47, 19.08, 18.44, 20.58, 53.54] +[54.02] +24.141870498657227 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 41950, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_020', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 80948, 'MATRIX_DENSITY': 8.221062021893506e-05, 'TIME_S': 22.586446285247803, 'TIME_S_1KI': 0.5384134990523911, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1304.1438443374634, 'W': 54.02} +[19.02, 18.63, 19.06, 20.29, 18.99, 18.47, 19.08, 18.44, 20.58, 53.54, 44.68, 46.44, 46.33, 44.48, 44.67, 44.72, 44.86, 44.83, 46.48, 46.57] +598.2549999999999 +29.912749999999996 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 41950, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_020', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 80948, 'MATRIX_DENSITY': 8.221062021893506e-05, 'TIME_S': 22.586446285247803, 'TIME_S_1KI': 0.5384134990523911, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1304.1438443374634, 'W': 54.02, 'J_1KI': 31.0880535002971, 'W_1KI': 1.2877234803337307, 'W_D': 24.107250000000008, 'J_D': 581.9941075787547, 'W_D_1KI': 0.5746662693682957, 'J_D_1KI': 0.013698838363964141} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_025.json b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_025.json new file mode 100644 index 0000000..fa98518 --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_025.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 40552, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_025", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 85850, "MATRIX_DENSITY": 8.718908121010495e-05, "TIME_S": 20.386262893676758, "TIME_S_1KI": 0.5027190494593795, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1398.9001111173632, "W": 53.02000000000001, "J_1KI": 34.49645174386869, "W_1KI": 1.3074570921286253, "W_D": 36.02675000000001, "J_D": 950.5436548132302, "W_D_1KI": 0.8884087098046954, "J_D_1KI": 0.021907888878592807} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_025.output b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_025.output new file mode 100644 index 0000000..954532b --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_025.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_025.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_025", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 85850, "MATRIX_DENSITY": 8.718908121010495e-05, "TIME_S": 0.5178430080413818} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 3, ..., 85845, 85847, 85850]), + col_indices=tensor([ 346, 13811, 21783, ..., 15310, 17998, 31377]), + values=tensor([1., 1., 1., ..., 1., 1., 3.]), size=(31379, 31379), + nnz=85850, layout=torch.sparse_csr) +tensor([0.6292, 0.9483, 0.7293, ..., 0.8239, 0.9503, 0.9810]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_025 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 85850 +Density: 8.718908121010495e-05 +Time: 0.5178430080413818 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '40552', '-m', 'matrices/as-caida_pruned/as-caida_G_025.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_025", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 85850, "MATRIX_DENSITY": 8.718908121010495e-05, "TIME_S": 20.386262893676758} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 3, ..., 85845, 85847, 85850]), + col_indices=tensor([ 346, 13811, 21783, ..., 15310, 17998, 31377]), + values=tensor([1., 1., 1., ..., 1., 1., 3.]), size=(31379, 31379), + nnz=85850, layout=torch.sparse_csr) +tensor([0.2194, 0.8210, 0.7379, ..., 0.2356, 0.3239, 0.7496]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_025 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 85850 +Density: 8.718908121010495e-05 +Time: 20.386262893676758 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 3, ..., 85845, 85847, 85850]), + col_indices=tensor([ 346, 13811, 21783, ..., 15310, 17998, 31377]), + values=tensor([1., 1., 1., ..., 1., 1., 3.]), size=(31379, 31379), + nnz=85850, layout=torch.sparse_csr) +tensor([0.2194, 0.8210, 0.7379, ..., 0.2356, 0.3239, 0.7496]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_025 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 85850 +Density: 8.718908121010495e-05 +Time: 20.386262893676758 seconds + +[19.59, 18.47, 18.65, 18.69, 18.61, 18.54, 18.67, 18.95, 20.94, 18.9] +[53.02] +26.384385347366333 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 40552, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_025', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 85850, 'MATRIX_DENSITY': 8.718908121010495e-05, 'TIME_S': 20.386262893676758, 'TIME_S_1KI': 0.5027190494593795, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1398.9001111173632, 'W': 53.02000000000001} +[19.59, 18.47, 18.65, 18.69, 18.61, 18.54, 18.67, 18.95, 20.94, 18.9, 20.02, 18.72, 19.01, 18.6, 18.79, 18.54, 18.9, 18.59, 18.71, 18.46] +339.865 +16.99325 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 40552, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_025', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 85850, 'MATRIX_DENSITY': 8.718908121010495e-05, 'TIME_S': 20.386262893676758, 'TIME_S_1KI': 0.5027190494593795, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1398.9001111173632, 'W': 53.02000000000001, 'J_1KI': 34.49645174386869, 'W_1KI': 1.3074570921286253, 'W_D': 36.02675000000001, 'J_D': 950.5436548132302, 'W_D_1KI': 0.8884087098046954, 'J_D_1KI': 0.021907888878592807} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_030.json b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_030.json new file mode 100644 index 0000000..394dfea --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_030.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 41155, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_030", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 86850, "MATRIX_DENSITY": 8.820467912752026e-05, "TIME_S": 20.94717025756836, "TIME_S_1KI": 0.5089823899299808, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1315.9388547515869, "W": 52.91, "J_1KI": 31.975187820473497, "W_1KI": 1.2856275057708662, "W_D": 35.942499999999995, "J_D": 893.9355941581725, "W_D_1KI": 0.8733446725792734, "J_D_1KI": 0.021220864356196658} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_030.output b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_030.output new file mode 100644 index 0000000..bd89a5e --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_030.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_030.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_030", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 86850, "MATRIX_DENSITY": 8.820467912752026e-05, "TIME_S": 0.5767486095428467} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 86850, 86850, 86850]), + col_indices=tensor([ 1809, 21783, 106, ..., 7018, 160, 882]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=86850, layout=torch.sparse_csr) +tensor([0.7290, 0.4625, 0.9889, ..., 0.3533, 0.7320, 0.9038]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_030 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 86850 +Density: 8.820467912752026e-05 +Time: 0.5767486095428467 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '36411', '-m', 'matrices/as-caida_pruned/as-caida_G_030.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_030", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 86850, "MATRIX_DENSITY": 8.820467912752026e-05, "TIME_S": 18.579289436340332} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 86850, 86850, 86850]), + col_indices=tensor([ 1809, 21783, 106, ..., 7018, 160, 882]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=86850, layout=torch.sparse_csr) +tensor([0.1816, 0.6354, 0.9389, ..., 0.0230, 0.8001, 0.9688]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_030 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 86850 +Density: 8.820467912752026e-05 +Time: 18.579289436340332 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '41155', '-m', 'matrices/as-caida_pruned/as-caida_G_030.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_030", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 86850, "MATRIX_DENSITY": 8.820467912752026e-05, "TIME_S": 20.94717025756836} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 86850, 86850, 86850]), + col_indices=tensor([ 1809, 21783, 106, ..., 7018, 160, 882]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=86850, layout=torch.sparse_csr) +tensor([0.9233, 0.1936, 0.2333, ..., 0.9124, 0.0317, 0.1825]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_030 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 86850 +Density: 8.820467912752026e-05 +Time: 20.94717025756836 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 86850, 86850, 86850]), + col_indices=tensor([ 1809, 21783, 106, ..., 7018, 160, 882]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=86850, layout=torch.sparse_csr) +tensor([0.9233, 0.1936, 0.2333, ..., 0.9124, 0.0317, 0.1825]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_030 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 86850 +Density: 8.820467912752026e-05 +Time: 20.94717025756836 seconds + +[19.12, 18.56, 18.67, 18.3, 18.71, 18.42, 22.24, 19.12, 18.54, 18.85] +[52.91] +24.87126922607422 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 41155, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_030', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 86850, 'MATRIX_DENSITY': 8.820467912752026e-05, 'TIME_S': 20.94717025756836, 'TIME_S_1KI': 0.5089823899299808, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1315.9388547515869, 'W': 52.91} +[19.12, 18.56, 18.67, 18.3, 18.71, 18.42, 22.24, 19.12, 18.54, 18.85, 19.15, 18.49, 18.7, 18.54, 18.75, 18.67, 18.67, 18.45, 18.7, 18.52] +339.34999999999997 +16.967499999999998 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 41155, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_030', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 86850, 'MATRIX_DENSITY': 8.820467912752026e-05, 'TIME_S': 20.94717025756836, 'TIME_S_1KI': 0.5089823899299808, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1315.9388547515869, 'W': 52.91, 'J_1KI': 31.975187820473497, 'W_1KI': 1.2856275057708662, 'W_D': 35.942499999999995, 'J_D': 893.9355941581725, 'W_D_1KI': 0.8733446725792734, 'J_D_1KI': 0.021220864356196658} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_035.json b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_035.json new file mode 100644 index 0000000..d258b03 --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_035.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 36073, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_035", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 87560, "MATRIX_DENSITY": 8.892575364888514e-05, "TIME_S": 20.54652428627014, "TIME_S_1KI": 0.5695818004122236, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1182.299026594162, "W": 52.98, "J_1KI": 32.77517884828436, "W_1KI": 1.4686884927785322, "W_D": 35.8475, "J_D": 799.9710146439074, "W_D_1KI": 0.9937487871815485, "J_D_1KI": 0.02754827120509934} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_035.output b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_035.output new file mode 100644 index 0000000..2ec82a8 --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_035.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_035.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_035", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 87560, "MATRIX_DENSITY": 8.892575364888514e-05, "TIME_S": 0.5821480751037598} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 87559, 87559, 87560]), + col_indices=tensor([ 1809, 21783, 106, ..., 10144, 882, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=87560, layout=torch.sparse_csr) +tensor([0.2838, 0.6443, 0.1231, ..., 0.1471, 0.9948, 0.6829]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_035 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 87560 +Density: 8.892575364888514e-05 +Time: 0.5821480751037598 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '36073', '-m', 'matrices/as-caida_pruned/as-caida_G_035.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_035", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 87560, "MATRIX_DENSITY": 8.892575364888514e-05, "TIME_S": 20.54652428627014} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 87559, 87559, 87560]), + col_indices=tensor([ 1809, 21783, 106, ..., 10144, 882, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=87560, layout=torch.sparse_csr) +tensor([0.2494, 0.1317, 0.5446, ..., 0.2543, 0.9878, 0.5674]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_035 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 87560 +Density: 8.892575364888514e-05 +Time: 20.54652428627014 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 87559, 87559, 87560]), + col_indices=tensor([ 1809, 21783, 106, ..., 10144, 882, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=87560, layout=torch.sparse_csr) +tensor([0.2494, 0.1317, 0.5446, ..., 0.2543, 0.9878, 0.5674]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_035 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 87560 +Density: 8.892575364888514e-05 +Time: 20.54652428627014 seconds + +[19.22, 18.47, 18.52, 18.74, 19.03, 18.64, 18.54, 18.58, 18.88, 18.67] +[52.98] +22.3159499168396 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 36073, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_035', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 87560, 'MATRIX_DENSITY': 8.892575364888514e-05, 'TIME_S': 20.54652428627014, 'TIME_S_1KI': 0.5695818004122236, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1182.299026594162, 'W': 52.98} +[19.22, 18.47, 18.52, 18.74, 19.03, 18.64, 18.54, 18.58, 18.88, 18.67, 19.05, 18.91, 18.57, 22.93, 20.37, 18.76, 18.65, 18.47, 18.72, 18.8] +342.65 +17.1325 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 36073, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_035', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 87560, 'MATRIX_DENSITY': 8.892575364888514e-05, 'TIME_S': 20.54652428627014, 'TIME_S_1KI': 0.5695818004122236, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1182.299026594162, 'W': 52.98, 'J_1KI': 32.77517884828436, 'W_1KI': 1.4686884927785322, 'W_D': 35.8475, 'J_D': 799.9710146439074, 'W_D_1KI': 0.9937487871815485, 'J_D_1KI': 0.02754827120509934} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_040.json b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_040.json new file mode 100644 index 0000000..343aa06 --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_040.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 39974, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_040", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 89658, "MATRIX_DENSITY": 9.105647807962247e-05, "TIME_S": 23.07060408592224, "TIME_S_1KI": 0.5771402433062051, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1421.6360660672187, "W": 52.97, "J_1KI": 35.56401826355178, "W_1KI": 1.3251113223595337, "W_D": 36.0155, "J_D": 966.6024870198966, "W_D_1KI": 0.9009731325361486, "J_D_1KI": 0.022538978649525906} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_040.output b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_040.output new file mode 100644 index 0000000..8096441 --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_040.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_040.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_040", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 89658, "MATRIX_DENSITY": 9.105647807962247e-05, "TIME_S": 0.6011238098144531} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 89657, 89657, 89658]), + col_indices=tensor([ 106, 329, 1040, ..., 10144, 882, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=89658, layout=torch.sparse_csr) +tensor([0.6392, 0.9406, 0.8948, ..., 0.4194, 0.9795, 0.6508]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_040 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 89658 +Density: 9.105647807962247e-05 +Time: 0.6011238098144531 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '34934', '-m', 'matrices/as-caida_pruned/as-caida_G_040.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_040", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 89658, "MATRIX_DENSITY": 9.105647807962247e-05, "TIME_S": 18.351969480514526} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 89657, 89657, 89658]), + col_indices=tensor([ 106, 329, 1040, ..., 10144, 882, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=89658, layout=torch.sparse_csr) +tensor([0.9410, 0.3295, 0.6759, ..., 0.1310, 0.7943, 0.5178]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_040 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 89658 +Density: 9.105647807962247e-05 +Time: 18.351969480514526 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '39974', '-m', 'matrices/as-caida_pruned/as-caida_G_040.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_040", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 89658, "MATRIX_DENSITY": 9.105647807962247e-05, "TIME_S": 23.07060408592224} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 89657, 89657, 89658]), + col_indices=tensor([ 106, 329, 1040, ..., 10144, 882, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=89658, layout=torch.sparse_csr) +tensor([0.4614, 0.2038, 0.4498, ..., 0.6505, 0.4413, 0.5966]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_040 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 89658 +Density: 9.105647807962247e-05 +Time: 23.07060408592224 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 89657, 89657, 89658]), + col_indices=tensor([ 106, 329, 1040, ..., 10144, 882, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=89658, layout=torch.sparse_csr) +tensor([0.4614, 0.2038, 0.4498, ..., 0.6505, 0.4413, 0.5966]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_040 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 89658 +Density: 9.105647807962247e-05 +Time: 23.07060408592224 seconds + +[18.92, 18.56, 18.54, 18.35, 18.83, 19.88, 19.86, 18.53, 18.79, 18.6] +[52.97] +26.838513612747192 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 39974, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_040', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 89658, 'MATRIX_DENSITY': 9.105647807962247e-05, 'TIME_S': 23.07060408592224, 'TIME_S_1KI': 0.5771402433062051, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1421.6360660672187, 'W': 52.97} +[18.92, 18.56, 18.54, 18.35, 18.83, 19.88, 19.86, 18.53, 18.79, 18.6, 18.88, 18.72, 18.87, 18.67, 18.57, 18.78, 18.63, 18.98, 18.84, 18.98] +339.09 +16.9545 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 39974, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_040', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 89658, 'MATRIX_DENSITY': 9.105647807962247e-05, 'TIME_S': 23.07060408592224, 'TIME_S_1KI': 0.5771402433062051, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1421.6360660672187, 'W': 52.97, 'J_1KI': 35.56401826355178, 'W_1KI': 1.3251113223595337, 'W_D': 36.0155, 'J_D': 966.6024870198966, 'W_D_1KI': 0.9009731325361486, 'J_D_1KI': 0.022538978649525906} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_045.json b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_045.json new file mode 100644 index 0000000..458d44e --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_045.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 35445, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_045", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 89152, "MATRIX_DENSITY": 9.054258553341032e-05, "TIME_S": 20.511640071868896, "TIME_S_1KI": 0.5786892388734348, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1288.727378101349, "W": 52.94, "J_1KI": 36.358509750355445, "W_1KI": 1.4935816053039919, "W_D": 35.8035, "J_D": 871.570658894062, "W_D_1KI": 1.010114261531951, "J_D_1KI": 0.028498074806938948} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_045.output b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_045.output new file mode 100644 index 0000000..4908433 --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_045.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_045.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_045", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 89152, "MATRIX_DENSITY": 9.054258553341032e-05, "TIME_S": 0.5924630165100098} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 89150, 89150, 89152]), + col_indices=tensor([ 106, 329, 1040, ..., 160, 2232, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=89152, layout=torch.sparse_csr) +tensor([0.7715, 0.8313, 0.4670, ..., 0.4007, 0.8679, 0.4137]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_045 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 89152 +Density: 9.054258553341032e-05 +Time: 0.5924630165100098 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '35445', '-m', 'matrices/as-caida_pruned/as-caida_G_045.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_045", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 89152, "MATRIX_DENSITY": 9.054258553341032e-05, "TIME_S": 20.511640071868896} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 89150, 89150, 89152]), + col_indices=tensor([ 106, 329, 1040, ..., 160, 2232, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=89152, layout=torch.sparse_csr) +tensor([0.9006, 0.2634, 0.6832, ..., 0.5902, 0.3462, 0.9426]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_045 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 89152 +Density: 9.054258553341032e-05 +Time: 20.511640071868896 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 89150, 89150, 89152]), + col_indices=tensor([ 106, 329, 1040, ..., 160, 2232, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=89152, layout=torch.sparse_csr) +tensor([0.9006, 0.2634, 0.6832, ..., 0.5902, 0.3462, 0.9426]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_045 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 89152 +Density: 9.054258553341032e-05 +Time: 20.511640071868896 seconds + +[19.12, 18.42, 18.61, 18.53, 18.74, 22.86, 18.74, 18.8, 19.35, 18.72] +[52.94] +24.34316921234131 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 35445, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_045', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 89152, 'MATRIX_DENSITY': 9.054258553341032e-05, 'TIME_S': 20.511640071868896, 'TIME_S_1KI': 0.5786892388734348, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1288.727378101349, 'W': 52.94} +[19.12, 18.42, 18.61, 18.53, 18.74, 22.86, 18.74, 18.8, 19.35, 18.72, 19.51, 18.77, 18.69, 18.91, 19.45, 18.77, 18.69, 18.76, 18.61, 18.71] +342.73 +17.1365 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 35445, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_045', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 89152, 'MATRIX_DENSITY': 9.054258553341032e-05, 'TIME_S': 20.511640071868896, 'TIME_S_1KI': 0.5786892388734348, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1288.727378101349, 'W': 52.94, 'J_1KI': 36.358509750355445, 'W_1KI': 1.4935816053039919, 'W_D': 35.8035, 'J_D': 871.570658894062, 'W_D_1KI': 1.010114261531951, 'J_D_1KI': 0.028498074806938948} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_050.json b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_050.json new file mode 100644 index 0000000..e0b4cf7 --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_050.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 39539, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_050", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 90392, "MATRIX_DENSITY": 9.180192695100532e-05, "TIME_S": 23.2499098777771, "TIME_S_1KI": 0.5880247319805029, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1432.2488470888138, "W": 52.93, "J_1KI": 36.223699311788714, "W_1KI": 1.3386782670274917, "W_D": 35.8515, "J_D": 970.1165603892804, "W_D_1KI": 0.9067376514327626, "J_D_1KI": 0.022932741127311328} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_050.output b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_050.output new file mode 100644 index 0000000..0aa636a --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_050.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_050.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_050", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 90392, "MATRIX_DENSITY": 9.180192695100532e-05, "TIME_S": 0.5838258266448975} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 90390, 90390, 90392]), + col_indices=tensor([ 5326, 106, 329, ..., 882, 2232, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=90392, layout=torch.sparse_csr) +tensor([0.4493, 0.8902, 0.2283, ..., 0.0336, 0.7540, 0.4988]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_050 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 90392 +Density: 9.180192695100532e-05 +Time: 0.5838258266448975 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '35969', '-m', 'matrices/as-caida_pruned/as-caida_G_050.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_050", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 90392, "MATRIX_DENSITY": 9.180192695100532e-05, "TIME_S": 19.103416681289673} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 90390, 90390, 90392]), + col_indices=tensor([ 5326, 106, 329, ..., 882, 2232, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=90392, layout=torch.sparse_csr) +tensor([0.4134, 0.4401, 0.6258, ..., 0.9818, 0.6314, 0.7941]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_050 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 90392 +Density: 9.180192695100532e-05 +Time: 19.103416681289673 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '39539', '-m', 'matrices/as-caida_pruned/as-caida_G_050.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_050", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 90392, "MATRIX_DENSITY": 9.180192695100532e-05, "TIME_S": 23.2499098777771} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 90390, 90390, 90392]), + col_indices=tensor([ 5326, 106, 329, ..., 882, 2232, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=90392, layout=torch.sparse_csr) +tensor([0.8729, 0.8085, 0.9821, ..., 0.9383, 0.4826, 0.5123]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_050 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 90392 +Density: 9.180192695100532e-05 +Time: 23.2499098777771 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 90390, 90390, 90392]), + col_indices=tensor([ 5326, 106, 329, ..., 882, 2232, 16085]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=90392, layout=torch.sparse_csr) +tensor([0.8729, 0.8085, 0.9821, ..., 0.9383, 0.4826, 0.5123]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_050 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 90392 +Density: 9.180192695100532e-05 +Time: 23.2499098777771 seconds + +[19.25, 18.53, 18.54, 18.52, 19.03, 18.5, 18.62, 18.69, 18.75, 18.91] +[52.93] +27.05930185317993 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 39539, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_050', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 90392, 'MATRIX_DENSITY': 9.180192695100532e-05, 'TIME_S': 23.2499098777771, 'TIME_S_1KI': 0.5880247319805029, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1432.2488470888138, 'W': 52.93} +[19.25, 18.53, 18.54, 18.52, 19.03, 18.5, 18.62, 18.69, 18.75, 18.91, 19.02, 22.49, 19.18, 18.46, 19.16, 18.74, 18.68, 18.81, 18.94, 18.68] +341.57 +17.0785 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 39539, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_050', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 90392, 'MATRIX_DENSITY': 9.180192695100532e-05, 'TIME_S': 23.2499098777771, 'TIME_S_1KI': 0.5880247319805029, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1432.2488470888138, 'W': 52.93, 'J_1KI': 36.223699311788714, 'W_1KI': 1.3386782670274917, 'W_D': 35.8515, 'J_D': 970.1165603892804, 'W_D_1KI': 0.9067376514327626, 'J_D_1KI': 0.022932741127311328} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_055.json b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_055.json new file mode 100644 index 0000000..284f6de --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_055.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 38345, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_055", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 91476, "MATRIX_DENSITY": 9.290283509348351e-05, "TIME_S": 22.721609354019165, "TIME_S_1KI": 0.5925572917986482, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1295.1426379776, "W": 52.99, "J_1KI": 33.77605001897509, "W_1KI": 1.3819272395357936, "W_D": 35.99925, "J_D": 879.8672128744126, "W_D_1KI": 0.9388251401747295, "J_D_1KI": 0.02448363907092788} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_055.output b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_055.output new file mode 100644 index 0000000..7d0ab7a --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_055.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_055.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_055", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 91476, "MATRIX_DENSITY": 9.290283509348351e-05, "TIME_S": 0.5476551055908203} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 91475, 91475, 91476]), + col_indices=tensor([21783, 106, 329, ..., 160, 882, 17255]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=91476, layout=torch.sparse_csr) +tensor([0.6500, 0.0899, 0.0927, ..., 0.8780, 0.1384, 0.5202]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_055 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 91476 +Density: 9.290283509348351e-05 +Time: 0.5476551055908203 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '38345', '-m', 'matrices/as-caida_pruned/as-caida_G_055.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_055", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 91476, "MATRIX_DENSITY": 9.290283509348351e-05, "TIME_S": 22.721609354019165} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 91475, 91475, 91476]), + col_indices=tensor([21783, 106, 329, ..., 160, 882, 17255]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=91476, layout=torch.sparse_csr) +tensor([0.8924, 0.6812, 0.2121, ..., 0.9590, 0.1950, 0.8497]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_055 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 91476 +Density: 9.290283509348351e-05 +Time: 22.721609354019165 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 91475, 91475, 91476]), + col_indices=tensor([21783, 106, 329, ..., 160, 882, 17255]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=91476, layout=torch.sparse_csr) +tensor([0.8924, 0.6812, 0.2121, ..., 0.9590, 0.1950, 0.8497]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_055 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 91476 +Density: 9.290283509348351e-05 +Time: 22.721609354019165 seconds + +[19.13, 18.89, 18.41, 18.46, 19.09, 18.53, 18.69, 18.39, 18.75, 18.79] +[52.99] +24.441265106201172 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 38345, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_055', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 91476, 'MATRIX_DENSITY': 9.290283509348351e-05, 'TIME_S': 22.721609354019165, 'TIME_S_1KI': 0.5925572917986482, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1295.1426379776, 'W': 52.99} +[19.13, 18.89, 18.41, 18.46, 19.09, 18.53, 18.69, 18.39, 18.75, 18.79, 18.99, 18.52, 18.53, 18.46, 19.15, 19.44, 21.15, 18.72, 18.95, 18.46] +339.81500000000005 +16.990750000000002 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 38345, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_055', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 91476, 'MATRIX_DENSITY': 9.290283509348351e-05, 'TIME_S': 22.721609354019165, 'TIME_S_1KI': 0.5925572917986482, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1295.1426379776, 'W': 52.99, 'J_1KI': 33.77605001897509, 'W_1KI': 1.3819272395357936, 'W_D': 35.99925, 'J_D': 879.8672128744126, 'W_D_1KI': 0.9388251401747295, 'J_D_1KI': 0.02448363907092788} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_060.json b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_060.json new file mode 100644 index 0000000..ce6a9ec --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_060.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 38744, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_060", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 94180, "MATRIX_DENSITY": 9.564901186217454e-05, "TIME_S": 21.067097187042236, "TIME_S_1KI": 0.5437512179187032, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1312.662742536068, "W": 53.03, "J_1KI": 33.88041354883512, "W_1KI": 1.368728061119141, "W_D": 36.08125, "J_D": 893.1267693594098, "W_D_1KI": 0.9312732294032624, "J_D_1KI": 0.0240365793259153} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_060.output b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_060.output new file mode 100644 index 0000000..ae9e305 --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_060.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_060.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_060", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 94180, "MATRIX_DENSITY": 9.564901186217454e-05, "TIME_S": 0.6113817691802979} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 94180, 94180, 94180]), + col_indices=tensor([ 106, 329, 1040, ..., 155, 160, 882]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=94180, layout=torch.sparse_csr) +tensor([0.0072, 0.3076, 0.9240, ..., 0.5844, 0.7748, 0.1447]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_060 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 94180 +Density: 9.564901186217454e-05 +Time: 0.6113817691802979 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '34348', '-m', 'matrices/as-caida_pruned/as-caida_G_060.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_060", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 94180, "MATRIX_DENSITY": 9.564901186217454e-05, "TIME_S": 18.61709499359131} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 94180, 94180, 94180]), + col_indices=tensor([ 106, 329, 1040, ..., 155, 160, 882]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=94180, layout=torch.sparse_csr) +tensor([0.0158, 0.5063, 0.2913, ..., 0.3458, 0.1049, 0.7617]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_060 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 94180 +Density: 9.564901186217454e-05 +Time: 18.61709499359131 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '38744', '-m', 'matrices/as-caida_pruned/as-caida_G_060.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_060", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 94180, "MATRIX_DENSITY": 9.564901186217454e-05, "TIME_S": 21.067097187042236} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 94180, 94180, 94180]), + col_indices=tensor([ 106, 329, 1040, ..., 155, 160, 882]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=94180, layout=torch.sparse_csr) +tensor([0.7269, 0.9053, 0.3912, ..., 0.7899, 0.9932, 0.4611]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_060 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 94180 +Density: 9.564901186217454e-05 +Time: 21.067097187042236 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 94180, 94180, 94180]), + col_indices=tensor([ 106, 329, 1040, ..., 155, 160, 882]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=94180, layout=torch.sparse_csr) +tensor([0.7269, 0.9053, 0.3912, ..., 0.7899, 0.9932, 0.4611]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_060 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 94180 +Density: 9.564901186217454e-05 +Time: 21.067097187042236 seconds + +[19.36, 18.36, 22.56, 19.14, 18.42, 18.75, 18.44, 18.6, 18.8, 18.42] +[53.03] +24.753210306167603 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 38744, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_060', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 94180, 'MATRIX_DENSITY': 9.564901186217454e-05, 'TIME_S': 21.067097187042236, 'TIME_S_1KI': 0.5437512179187032, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1312.662742536068, 'W': 53.03} +[19.36, 18.36, 22.56, 19.14, 18.42, 18.75, 18.44, 18.6, 18.8, 18.42, 19.02, 18.41, 18.64, 18.47, 18.39, 18.49, 18.67, 18.48, 18.58, 18.75] +338.975 +16.94875 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 38744, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_060', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 94180, 'MATRIX_DENSITY': 9.564901186217454e-05, 'TIME_S': 21.067097187042236, 'TIME_S_1KI': 0.5437512179187032, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1312.662742536068, 'W': 53.03, 'J_1KI': 33.88041354883512, 'W_1KI': 1.368728061119141, 'W_D': 36.08125, 'J_D': 893.1267693594098, 'W_D_1KI': 0.9312732294032624, 'J_D_1KI': 0.0240365793259153} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_065.json b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_065.json new file mode 100644 index 0000000..25ddd58 --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_065.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 37367, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_065", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 95068, "MATRIX_DENSITY": 9.655086281283934e-05, "TIME_S": 20.45805835723877, "TIME_S_1KI": 0.547489987348162, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1288.2858899974822, "W": 52.94, "J_1KI": 34.476567291928234, "W_1KI": 1.4167581020686701, "W_D": 35.897, "J_D": 873.5473855919838, "W_D_1KI": 0.9606604758209114, "J_D_1KI": 0.025708793208470346} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_065.output b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_065.output new file mode 100644 index 0000000..c3e0049 --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_065.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_065.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_065", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 95068, "MATRIX_DENSITY": 9.655086281283934e-05, "TIME_S": 0.5619921684265137} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 95068, 95068, 95068]), + col_indices=tensor([ 106, 329, 1040, ..., 155, 160, 882]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=95068, layout=torch.sparse_csr) +tensor([0.6305, 0.9162, 0.2890, ..., 0.7248, 0.6673, 0.7676]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_065 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 95068 +Density: 9.655086281283934e-05 +Time: 0.5619921684265137 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '37367', '-m', 'matrices/as-caida_pruned/as-caida_G_065.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_065", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 95068, "MATRIX_DENSITY": 9.655086281283934e-05, "TIME_S": 20.45805835723877} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 95068, 95068, 95068]), + col_indices=tensor([ 106, 329, 1040, ..., 155, 160, 882]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=95068, layout=torch.sparse_csr) +tensor([0.7118, 0.4999, 0.1937, ..., 0.3055, 0.3081, 0.7961]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_065 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 95068 +Density: 9.655086281283934e-05 +Time: 20.45805835723877 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 95068, 95068, 95068]), + col_indices=tensor([ 106, 329, 1040, ..., 155, 160, 882]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=95068, layout=torch.sparse_csr) +tensor([0.7118, 0.4999, 0.1937, ..., 0.3055, 0.3081, 0.7961]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_065 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 95068 +Density: 9.655086281283934e-05 +Time: 20.45805835723877 seconds + +[19.13, 18.61, 18.7, 22.55, 19.05, 18.5, 19.32, 18.34, 18.74, 18.51] +[52.94] +24.334829807281494 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 37367, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_065', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 95068, 'MATRIX_DENSITY': 9.655086281283934e-05, 'TIME_S': 20.45805835723877, 'TIME_S_1KI': 0.547489987348162, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1288.2858899974822, 'W': 52.94} +[19.13, 18.61, 18.7, 22.55, 19.05, 18.5, 19.32, 18.34, 18.74, 18.51, 19.32, 18.72, 18.51, 18.49, 18.65, 18.34, 18.82, 18.48, 18.92, 19.28] +340.85999999999996 +17.043 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 37367, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_065', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 95068, 'MATRIX_DENSITY': 9.655086281283934e-05, 'TIME_S': 20.45805835723877, 'TIME_S_1KI': 0.547489987348162, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1288.2858899974822, 'W': 52.94, 'J_1KI': 34.476567291928234, 'W_1KI': 1.4167581020686701, 'W_D': 35.897, 'J_D': 873.5473855919838, 'W_D_1KI': 0.9606604758209114, 'J_D_1KI': 0.025708793208470346} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_070.json b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_070.json new file mode 100644 index 0000000..06fb396 --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_070.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 41452, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_070", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 78684, "MATRIX_DENSITY": 7.991130653390679e-05, "TIME_S": 20.99607229232788, "TIME_S_1KI": 0.5065153018510055, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1448.8844534826278, "W": 53.19, "J_1KI": 34.95330631773202, "W_1KI": 1.2831708964585544, "W_D": 27.894, "J_D": 759.8267145223617, "W_D_1KI": 0.6729228987744861, "J_D_1KI": 0.01623378603624641} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_070.output b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_070.output new file mode 100644 index 0000000..862702f --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_070.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_070.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_070", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 78684, "MATRIX_DENSITY": 7.991130653390679e-05, "TIME_S": 0.5792512893676758} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 78684, 78684, 78684]), + col_indices=tensor([ 106, 329, 1040, ..., 16263, 2242, 2242]), + values=tensor([1., 1., 1., ..., 3., 1., 1.]), size=(31379, 31379), + nnz=78684, layout=torch.sparse_csr) +tensor([0.9266, 0.9879, 0.4684, ..., 0.7679, 0.2724, 0.5187]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_070 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 78684 +Density: 7.991130653390679e-05 +Time: 0.5792512893676758 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '36253', '-m', 'matrices/as-caida_pruned/as-caida_G_070.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_070", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 78684, "MATRIX_DENSITY": 7.991130653390679e-05, "TIME_S": 18.365938186645508} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 78684, 78684, 78684]), + col_indices=tensor([ 106, 329, 1040, ..., 16263, 2242, 2242]), + values=tensor([1., 1., 1., ..., 3., 1., 1.]), size=(31379, 31379), + nnz=78684, layout=torch.sparse_csr) +tensor([0.7258, 0.0804, 0.3631, ..., 0.8561, 0.6576, 0.0795]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_070 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 78684 +Density: 7.991130653390679e-05 +Time: 18.365938186645508 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '41452', '-m', 'matrices/as-caida_pruned/as-caida_G_070.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_070", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 78684, "MATRIX_DENSITY": 7.991130653390679e-05, "TIME_S": 20.99607229232788} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 78684, 78684, 78684]), + col_indices=tensor([ 106, 329, 1040, ..., 16263, 2242, 2242]), + values=tensor([1., 1., 1., ..., 3., 1., 1.]), size=(31379, 31379), + nnz=78684, layout=torch.sparse_csr) +tensor([0.7295, 0.0617, 0.5775, ..., 0.6877, 0.3399, 0.2786]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_070 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 78684 +Density: 7.991130653390679e-05 +Time: 20.99607229232788 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 78684, 78684, 78684]), + col_indices=tensor([ 106, 329, 1040, ..., 16263, 2242, 2242]), + values=tensor([1., 1., 1., ..., 3., 1., 1.]), size=(31379, 31379), + nnz=78684, layout=torch.sparse_csr) +tensor([0.7295, 0.0617, 0.5775, ..., 0.6877, 0.3399, 0.2786]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_070 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 78684 +Density: 7.991130653390679e-05 +Time: 20.99607229232788 seconds + +[55.88, 54.5, 54.75, 53.63, 41.8, 28.28, 18.71, 19.18, 28.86, 19.23] +[53.19] +27.239790439605713 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 41452, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_070', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 78684, 'MATRIX_DENSITY': 7.991130653390679e-05, 'TIME_S': 20.99607229232788, 'TIME_S_1KI': 0.5065153018510055, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1448.8844534826278, 'W': 53.19} +[55.88, 54.5, 54.75, 53.63, 41.8, 28.28, 18.71, 19.18, 28.86, 19.23, 18.92, 18.49, 19.19, 18.65, 18.72, 18.62, 18.99, 18.47, 18.72, 18.69] +505.91999999999996 +25.296 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 41452, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_070', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 78684, 'MATRIX_DENSITY': 7.991130653390679e-05, 'TIME_S': 20.99607229232788, 'TIME_S_1KI': 0.5065153018510055, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1448.8844534826278, 'W': 53.19, 'J_1KI': 34.95330631773202, 'W_1KI': 1.2831708964585544, 'W_D': 27.894, 'J_D': 759.8267145223617, 'W_D_1KI': 0.6729228987744861, 'J_D_1KI': 0.01623378603624641} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_075.json b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_075.json new file mode 100644 index 0000000..b82e97a --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_075.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 36855, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_075", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 97492, "MATRIX_DENSITY": 9.901267216465406e-05, "TIME_S": 20.419607877731323, "TIME_S_1KI": 0.5540525811350244, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1290.3399892759323, "W": 52.980000000000004, "J_1KI": 35.01126005361368, "W_1KI": 1.4375254375254376, "W_D": 36.07575, "J_D": 878.633123218596, "W_D_1KI": 0.9788563288563289, "J_D_1KI": 0.02655966161596334} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_075.output b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_075.output new file mode 100644 index 0000000..452c2b4 --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_075.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_075.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_075", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 97492, "MATRIX_DENSITY": 9.901267216465406e-05, "TIME_S": 0.5697860717773438} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 97491, 97491, 97492]), + col_indices=tensor([22754, 22754, 106, ..., 160, 882, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=97492, layout=torch.sparse_csr) +tensor([0.6308, 0.5906, 0.2335, ..., 0.1400, 0.7305, 0.2890]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_075 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 97492 +Density: 9.901267216465406e-05 +Time: 0.5697860717773438 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '36855', '-m', 'matrices/as-caida_pruned/as-caida_G_075.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_075", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 97492, "MATRIX_DENSITY": 9.901267216465406e-05, "TIME_S": 20.419607877731323} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 97491, 97491, 97492]), + col_indices=tensor([22754, 22754, 106, ..., 160, 882, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=97492, layout=torch.sparse_csr) +tensor([0.0237, 0.9229, 0.3400, ..., 0.5947, 0.3448, 0.5028]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_075 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 97492 +Density: 9.901267216465406e-05 +Time: 20.419607877731323 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 97491, 97491, 97492]), + col_indices=tensor([22754, 22754, 106, ..., 160, 882, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=97492, layout=torch.sparse_csr) +tensor([0.0237, 0.9229, 0.3400, ..., 0.5947, 0.3448, 0.5028]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_075 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 97492 +Density: 9.901267216465406e-05 +Time: 20.419607877731323 seconds + +[19.27, 19.19, 18.72, 18.58, 18.78, 18.56, 18.57, 18.56, 18.92, 18.61] +[52.98] +24.355228185653687 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 36855, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_075', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 97492, 'MATRIX_DENSITY': 9.901267216465406e-05, 'TIME_S': 20.419607877731323, 'TIME_S_1KI': 0.5540525811350244, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1290.3399892759323, 'W': 52.980000000000004} +[19.27, 19.19, 18.72, 18.58, 18.78, 18.56, 18.57, 18.56, 18.92, 18.61, 19.16, 18.87, 19.03, 18.64, 18.73, 18.56, 18.79, 18.96, 18.79, 18.63] +338.08500000000004 +16.90425 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 36855, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_075', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 97492, 'MATRIX_DENSITY': 9.901267216465406e-05, 'TIME_S': 20.419607877731323, 'TIME_S_1KI': 0.5540525811350244, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1290.3399892759323, 'W': 52.980000000000004, 'J_1KI': 35.01126005361368, 'W_1KI': 1.4375254375254376, 'W_D': 36.07575, 'J_D': 878.633123218596, 'W_D_1KI': 0.9788563288563289, 'J_D_1KI': 0.02655966161596334} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_080.json b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_080.json new file mode 100644 index 0000000..6288462 --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_080.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 36802, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_080", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 98112, "MATRIX_DENSITY": 9.964234287345156e-05, "TIME_S": 20.515421152114868, "TIME_S_1KI": 0.5574539740262722, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1296.6938467216492, "W": 53.019999999999996, "J_1KI": 35.23433092553799, "W_1KI": 1.4406825715993694, "W_D": 35.983, "J_D": 880.0251732664108, "W_D_1KI": 0.9777457746861582, "J_D_1KI": 0.02656773476132162} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_080.output b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_080.output new file mode 100644 index 0000000..ae2c730 --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_080.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_080.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_080", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 98112, "MATRIX_DENSITY": 9.964234287345156e-05, "TIME_S": 0.5706076622009277} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 98111, 98111, 98112]), + col_indices=tensor([22754, 22754, 106, ..., 4133, 31329, 12170]), + values=tensor([1., 1., 1., ..., 3., 3., 1.]), size=(31379, 31379), + nnz=98112, layout=torch.sparse_csr) +tensor([0.5639, 0.5916, 0.5281, ..., 0.1252, 0.2839, 0.3672]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_080 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 98112 +Density: 9.964234287345156e-05 +Time: 0.5706076622009277 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '36802', '-m', 'matrices/as-caida_pruned/as-caida_G_080.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_080", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 98112, "MATRIX_DENSITY": 9.964234287345156e-05, "TIME_S": 20.515421152114868} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 98111, 98111, 98112]), + col_indices=tensor([22754, 22754, 106, ..., 4133, 31329, 12170]), + values=tensor([1., 1., 1., ..., 3., 3., 1.]), size=(31379, 31379), + nnz=98112, layout=torch.sparse_csr) +tensor([0.4580, 0.7792, 0.4296, ..., 0.5123, 0.6482, 0.6837]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_080 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 98112 +Density: 9.964234287345156e-05 +Time: 20.515421152114868 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 98111, 98111, 98112]), + col_indices=tensor([22754, 22754, 106, ..., 4133, 31329, 12170]), + values=tensor([1., 1., 1., ..., 3., 3., 1.]), size=(31379, 31379), + nnz=98112, layout=torch.sparse_csr) +tensor([0.4580, 0.7792, 0.4296, ..., 0.5123, 0.6482, 0.6837]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_080 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 98112 +Density: 9.964234287345156e-05 +Time: 20.515421152114868 seconds + +[19.31, 20.53, 20.84, 18.81, 18.98, 18.55, 18.7, 18.67, 18.85, 18.56] +[53.02] +24.456692695617676 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 36802, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_080', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 98112, 'MATRIX_DENSITY': 9.964234287345156e-05, 'TIME_S': 20.515421152114868, 'TIME_S_1KI': 0.5574539740262722, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1296.6938467216492, 'W': 53.019999999999996} +[19.31, 20.53, 20.84, 18.81, 18.98, 18.55, 18.7, 18.67, 18.85, 18.56, 19.07, 18.59, 18.87, 18.72, 18.64, 18.42, 18.82, 18.45, 18.55, 18.56] +340.74 +17.037 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 36802, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_080', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 98112, 'MATRIX_DENSITY': 9.964234287345156e-05, 'TIME_S': 20.515421152114868, 'TIME_S_1KI': 0.5574539740262722, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1296.6938467216492, 'W': 53.019999999999996, 'J_1KI': 35.23433092553799, 'W_1KI': 1.4406825715993694, 'W_D': 35.983, 'J_D': 880.0251732664108, 'W_D_1KI': 0.9777457746861582, 'J_D_1KI': 0.02656773476132162} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_085.json b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_085.json new file mode 100644 index 0000000..ce131f7 --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_085.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 36441, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_085", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 99166, "MATRIX_DENSITY": 0.0001007127830784073, "TIME_S": 20.454805850982666, "TIME_S_1KI": 0.5613129675635319, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1289.9737722873688, "W": 53.0, "J_1KI": 35.398967434685346, "W_1KI": 1.454405751763124, "W_D": 35.8855, "J_D": 873.4217699135542, "W_D_1KI": 0.984756181224445, "J_D_1KI": 0.02702330290673815} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_085.output b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_085.output new file mode 100644 index 0000000..5329a64 --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_085.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_085.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_085", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 99166, "MATRIX_DENSITY": 0.0001007127830784073, "TIME_S": 0.5762627124786377} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 99165, 99165, 99166]), + col_indices=tensor([ 106, 329, 1040, ..., 882, 31211, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=99166, layout=torch.sparse_csr) +tensor([0.6014, 0.3275, 0.9206, ..., 0.3129, 0.8080, 0.2066]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_085 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 99166 +Density: 0.0001007127830784073 +Time: 0.5762627124786377 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '36441', '-m', 'matrices/as-caida_pruned/as-caida_G_085.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_085", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 99166, "MATRIX_DENSITY": 0.0001007127830784073, "TIME_S": 20.454805850982666} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 99165, 99165, 99166]), + col_indices=tensor([ 106, 329, 1040, ..., 882, 31211, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=99166, layout=torch.sparse_csr) +tensor([0.8492, 0.9309, 0.9363, ..., 0.6404, 0.0656, 0.1254]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_085 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 99166 +Density: 0.0001007127830784073 +Time: 20.454805850982666 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 99165, 99165, 99166]), + col_indices=tensor([ 106, 329, 1040, ..., 882, 31211, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=99166, layout=torch.sparse_csr) +tensor([0.8492, 0.9309, 0.9363, ..., 0.6404, 0.0656, 0.1254]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_085 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 99166 +Density: 0.0001007127830784073 +Time: 20.454805850982666 seconds + +[19.08, 18.87, 22.36, 19.31, 19.03, 19.22, 18.67, 18.69, 18.79, 18.83] +[53.0] +24.339127779006958 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 36441, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_085', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 99166, 'MATRIX_DENSITY': 0.0001007127830784073, 'TIME_S': 20.454805850982666, 'TIME_S_1KI': 0.5613129675635319, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1289.9737722873688, 'W': 53.0} +[19.08, 18.87, 22.36, 19.31, 19.03, 19.22, 18.67, 18.69, 18.79, 18.83, 19.13, 18.76, 18.63, 18.35, 18.41, 18.98, 18.94, 18.54, 18.79, 18.86] +342.29 +17.1145 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 36441, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_085', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 99166, 'MATRIX_DENSITY': 0.0001007127830784073, 'TIME_S': 20.454805850982666, 'TIME_S_1KI': 0.5613129675635319, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1289.9737722873688, 'W': 53.0, 'J_1KI': 35.398967434685346, 'W_1KI': 1.454405751763124, 'W_D': 35.8855, 'J_D': 873.4217699135542, 'W_D_1KI': 0.984756181224445, 'J_D_1KI': 0.02702330290673815} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_090.json b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_090.json new file mode 100644 index 0000000..644c751 --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_090.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 35784, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_090", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 100924, "MATRIX_DENSITY": 0.00010249820421722343, "TIME_S": 20.52307367324829, "TIME_S_1KI": 0.5735265390467329, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1278.1363908004762, "W": 52.96000000000001, "J_1KI": 35.71809721664644, "W_1KI": 1.4799910574558464, "W_D": 36.09575000000001, "J_D": 871.1346606540086, "W_D_1KI": 1.0087119941873466, "J_D_1KI": 0.028188911082812053} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_090.output b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_090.output new file mode 100644 index 0000000..3c95aaa --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_090.output @@ -0,0 +1,68 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_090.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_090", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 100924, "MATRIX_DENSITY": 0.00010249820421722343, "TIME_S": 0.5868432521820068} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 100923, 100923, + 100924]), + col_indices=tensor([ 106, 329, 1040, ..., 882, 31211, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=100924, layout=torch.sparse_csr) +tensor([0.4076, 0.6334, 0.9862, ..., 0.8059, 0.3640, 0.0188]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_090 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 100924 +Density: 0.00010249820421722343 +Time: 0.5868432521820068 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '35784', '-m', 'matrices/as-caida_pruned/as-caida_G_090.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_090", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 100924, "MATRIX_DENSITY": 0.00010249820421722343, "TIME_S": 20.52307367324829} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 100923, 100923, + 100924]), + col_indices=tensor([ 106, 329, 1040, ..., 882, 31211, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=100924, layout=torch.sparse_csr) +tensor([0.1592, 0.2457, 0.4794, ..., 0.5220, 0.1742, 0.1461]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_090 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 100924 +Density: 0.00010249820421722343 +Time: 20.52307367324829 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 100923, 100923, + 100924]), + col_indices=tensor([ 106, 329, 1040, ..., 882, 31211, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=100924, layout=torch.sparse_csr) +tensor([0.1592, 0.2457, 0.4794, ..., 0.5220, 0.1742, 0.1461]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_090 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 100924 +Density: 0.00010249820421722343 +Time: 20.52307367324829 seconds + +[18.85, 19.01, 18.57, 18.65, 18.92, 18.39, 18.77, 18.58, 18.48, 18.72] +[52.96] +24.133995294570923 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 35784, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_090', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 100924, 'MATRIX_DENSITY': 0.00010249820421722343, 'TIME_S': 20.52307367324829, 'TIME_S_1KI': 0.5735265390467329, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1278.1363908004762, 'W': 52.96000000000001} +[18.85, 19.01, 18.57, 18.65, 18.92, 18.39, 18.77, 18.58, 18.48, 18.72, 19.27, 18.5, 18.71, 18.81, 18.66, 19.07, 18.66, 18.94, 18.85, 18.59] +337.28499999999997 +16.86425 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 35784, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_090', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 100924, 'MATRIX_DENSITY': 0.00010249820421722343, 'TIME_S': 20.52307367324829, 'TIME_S_1KI': 0.5735265390467329, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1278.1363908004762, 'W': 52.96000000000001, 'J_1KI': 35.71809721664644, 'W_1KI': 1.4799910574558464, 'W_D': 36.09575000000001, 'J_D': 871.1346606540086, 'W_D_1KI': 1.0087119941873466, 'J_D_1KI': 0.028188911082812053} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_095.json b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_095.json new file mode 100644 index 0000000..be0aa52 --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_095.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 35490, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_095", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 102290, "MATRIX_DENSITY": 0.00010388551097241275, "TIME_S": 20.46039319038391, "TIME_S_1KI": 0.5765115015605498, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1282.0347919940948, "W": 53.08, "J_1KI": 36.12383183978852, "W_1KI": 1.4956325725556494, "W_D": 35.9375, "J_D": 867.9940719157457, "W_D_1KI": 1.0126091856861086, "J_D_1KI": 0.028532239664302864} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_095.output b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_095.output new file mode 100644 index 0000000..d686e85 --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_095.output @@ -0,0 +1,68 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_095.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_095", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 102290, "MATRIX_DENSITY": 0.00010388551097241275, "TIME_S": 0.5917131900787354} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 102289, 102289, + 102290]), + col_indices=tensor([ 106, 329, 1040, ..., 882, 25970, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=102290, layout=torch.sparse_csr) +tensor([0.7218, 0.5046, 0.5040, ..., 0.4488, 0.1318, 0.7895]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_095 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 102290 +Density: 0.00010388551097241275 +Time: 0.5917131900787354 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '35490', '-m', 'matrices/as-caida_pruned/as-caida_G_095.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_095", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 102290, "MATRIX_DENSITY": 0.00010388551097241275, "TIME_S": 20.46039319038391} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 102289, 102289, + 102290]), + col_indices=tensor([ 106, 329, 1040, ..., 882, 25970, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=102290, layout=torch.sparse_csr) +tensor([0.9907, 0.5433, 0.8254, ..., 0.2702, 0.5909, 0.3363]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_095 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 102290 +Density: 0.00010388551097241275 +Time: 20.46039319038391 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 102289, 102289, + 102290]), + col_indices=tensor([ 106, 329, 1040, ..., 882, 25970, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=102290, layout=torch.sparse_csr) +tensor([0.9907, 0.5433, 0.8254, ..., 0.2702, 0.5909, 0.3363]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_095 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 102290 +Density: 0.00010388551097241275 +Time: 20.46039319038391 seconds + +[19.37, 19.07, 18.88, 18.75, 18.71, 18.77, 18.75, 18.54, 23.16, 18.94] +[53.08] +24.152878522872925 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 35490, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_095', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 102290, 'MATRIX_DENSITY': 0.00010388551097241275, 'TIME_S': 20.46039319038391, 'TIME_S_1KI': 0.5765115015605498, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1282.0347919940948, 'W': 53.08} +[19.37, 19.07, 18.88, 18.75, 18.71, 18.77, 18.75, 18.54, 23.16, 18.94, 19.49, 18.66, 18.86, 18.5, 18.87, 18.71, 18.93, 18.58, 18.99, 18.44] +342.84999999999997 +17.1425 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 35490, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_095', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 102290, 'MATRIX_DENSITY': 0.00010388551097241275, 'TIME_S': 20.46039319038391, 'TIME_S_1KI': 0.5765115015605498, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1282.0347919940948, 'W': 53.08, 'J_1KI': 36.12383183978852, 'W_1KI': 1.4956325725556494, 'W_D': 35.9375, 'J_D': 867.9940719157457, 'W_D_1KI': 1.0126091856861086, 'J_D_1KI': 0.028532239664302864} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_100.json b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_100.json new file mode 100644 index 0000000..644c009 --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_100.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 35610, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_100", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 102888, "MATRIX_DENSITY": 0.00010449283852702711, "TIME_S": 20.70893144607544, "TIME_S_1KI": 0.5815482012377264, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1289.4505507302283, "W": 53.10999999999999, "J_1KI": 36.210349641399276, "W_1KI": 1.4914349901713, "W_D": 36.02224999999999, "J_D": 874.5793654875157, "W_D_1KI": 1.0115768042684636, "J_D_1KI": 0.02840709924932501} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_100.output b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_100.output new file mode 100644 index 0000000..fe8f8d3 --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_100.output @@ -0,0 +1,68 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_100.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_100", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 102888, "MATRIX_DENSITY": 0.00010449283852702711, "TIME_S": 0.5897078514099121} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 102886, 102887, + 102888]), + col_indices=tensor([ 106, 329, 1040, ..., 25970, 5128, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=102888, layout=torch.sparse_csr) +tensor([0.6924, 0.7884, 0.0141, ..., 0.6234, 0.8060, 0.0777]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_100 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 102888 +Density: 0.00010449283852702711 +Time: 0.5897078514099121 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '35610', '-m', 'matrices/as-caida_pruned/as-caida_G_100.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_100", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 102888, "MATRIX_DENSITY": 0.00010449283852702711, "TIME_S": 20.70893144607544} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 102886, 102887, + 102888]), + col_indices=tensor([ 106, 329, 1040, ..., 25970, 5128, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=102888, layout=torch.sparse_csr) +tensor([0.7456, 0.2952, 0.3583, ..., 0.0846, 0.5164, 0.8564]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_100 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 102888 +Density: 0.00010449283852702711 +Time: 20.70893144607544 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 102886, 102887, + 102888]), + col_indices=tensor([ 106, 329, 1040, ..., 25970, 5128, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=102888, layout=torch.sparse_csr) +tensor([0.7456, 0.2952, 0.3583, ..., 0.0846, 0.5164, 0.8564]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_100 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 102888 +Density: 0.00010449283852702711 +Time: 20.70893144607544 seconds + +[19.14, 18.91, 18.67, 18.55, 22.58, 18.92, 18.55, 18.88, 18.67, 18.66] +[53.11] +24.278865575790405 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 35610, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_100', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 102888, 'MATRIX_DENSITY': 0.00010449283852702711, 'TIME_S': 20.70893144607544, 'TIME_S_1KI': 0.5815482012377264, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1289.4505507302283, 'W': 53.10999999999999} +[19.14, 18.91, 18.67, 18.55, 22.58, 18.92, 18.55, 18.88, 18.67, 18.66, 19.26, 18.96, 18.77, 18.67, 18.67, 19.16, 18.71, 18.52, 18.71, 18.65] +341.755 +17.08775 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 35610, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_100', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 102888, 'MATRIX_DENSITY': 0.00010449283852702711, 'TIME_S': 20.70893144607544, 'TIME_S_1KI': 0.5815482012377264, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1289.4505507302283, 'W': 53.10999999999999, 'J_1KI': 36.210349641399276, 'W_1KI': 1.4914349901713, 'W_D': 36.02224999999999, 'J_D': 874.5793654875157, 'W_D_1KI': 1.0115768042684636, 'J_D_1KI': 0.02840709924932501} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_105.json b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_105.json new file mode 100644 index 0000000..1b9fb1b --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_105.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 35136, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_105", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 104726, "MATRIX_DENSITY": 0.00010635950749923647, "TIME_S": 20.597471714019775, "TIME_S_1KI": 0.5862213033361731, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1291.1984325551987, "W": 53.02, "J_1KI": 36.74858926898903, "W_1KI": 1.5089936247723135, "W_D": 35.87925, "J_D": 873.7689807856678, "W_D_1KI": 1.0211535177595628, "J_D_1KI": 0.02906288472676351} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_105.output b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_105.output new file mode 100644 index 0000000..4b13cd8 --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_105.output @@ -0,0 +1,68 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_105.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_105", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 104726, "MATRIX_DENSITY": 0.00010635950749923647, "TIME_S": 0.5976669788360596} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 104725, 104725, + 104726]), + col_indices=tensor([ 106, 329, 1040, ..., 160, 882, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=104726, layout=torch.sparse_csr) +tensor([0.8290, 0.0647, 0.9027, ..., 0.9943, 0.1762, 0.7159]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_105 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 104726 +Density: 0.00010635950749923647 +Time: 0.5976669788360596 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '35136', '-m', 'matrices/as-caida_pruned/as-caida_G_105.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_105", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 104726, "MATRIX_DENSITY": 0.00010635950749923647, "TIME_S": 20.597471714019775} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 104725, 104725, + 104726]), + col_indices=tensor([ 106, 329, 1040, ..., 160, 882, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=104726, layout=torch.sparse_csr) +tensor([0.7842, 0.0579, 0.1525, ..., 0.2518, 0.1124, 0.7231]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_105 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 104726 +Density: 0.00010635950749923647 +Time: 20.597471714019775 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 104725, 104725, + 104726]), + col_indices=tensor([ 106, 329, 1040, ..., 160, 882, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=104726, layout=torch.sparse_csr) +tensor([0.7842, 0.0579, 0.1525, ..., 0.2518, 0.1124, 0.7231]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_105 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 104726 +Density: 0.00010635950749923647 +Time: 20.597471714019775 seconds + +[22.03, 18.53, 18.95, 18.83, 18.77, 18.5, 18.89, 21.99, 19.17, 18.59] +[53.02] +24.353044748306274 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 35136, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_105', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 104726, 'MATRIX_DENSITY': 0.00010635950749923647, 'TIME_S': 20.597471714019775, 'TIME_S_1KI': 0.5862213033361731, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1291.1984325551987, 'W': 53.02} +[22.03, 18.53, 18.95, 18.83, 18.77, 18.5, 18.89, 21.99, 19.17, 18.59, 19.02, 18.5, 18.86, 18.56, 18.96, 18.64, 19.16, 18.77, 18.66, 18.51] +342.815 +17.14075 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 35136, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_105', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 104726, 'MATRIX_DENSITY': 0.00010635950749923647, 'TIME_S': 20.597471714019775, 'TIME_S_1KI': 0.5862213033361731, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1291.1984325551987, 'W': 53.02, 'J_1KI': 36.74858926898903, 'W_1KI': 1.5089936247723135, 'W_D': 35.87925, 'J_D': 873.7689807856678, 'W_D_1KI': 1.0211535177595628, 'J_D_1KI': 0.02906288472676351} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_110.json b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_110.json new file mode 100644 index 0000000..7f31706 --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_110.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 35075, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_110", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 104846, "MATRIX_DENSITY": 0.0001064813792493263, "TIME_S": 20.378410577774048, "TIME_S_1KI": 0.5809953122672572, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1296.0259760141373, "W": 53.11, "J_1KI": 36.950134740246256, "W_1KI": 1.5141838916607269, "W_D": 36.210499999999996, "J_D": 883.6329995191096, "W_D_1KI": 1.0323734853884532, "J_D_1KI": 0.029433313909863243} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_110.output b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_110.output new file mode 100644 index 0000000..bacacd1 --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_110.output @@ -0,0 +1,68 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_110.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_110", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 104846, "MATRIX_DENSITY": 0.0001064813792493263, "TIME_S": 0.5987081527709961} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 104844, 104844, + 104846]), + col_indices=tensor([ 106, 329, 1040, ..., 882, 2616, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=104846, layout=torch.sparse_csr) +tensor([0.5411, 0.2711, 0.3094, ..., 0.4107, 0.6671, 0.3201]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_110 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 104846 +Density: 0.0001064813792493263 +Time: 0.5987081527709961 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '35075', '-m', 'matrices/as-caida_pruned/as-caida_G_110.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_110", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 104846, "MATRIX_DENSITY": 0.0001064813792493263, "TIME_S": 20.378410577774048} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 104844, 104844, + 104846]), + col_indices=tensor([ 106, 329, 1040, ..., 882, 2616, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=104846, layout=torch.sparse_csr) +tensor([0.4809, 0.8376, 0.9816, ..., 0.1928, 0.1271, 0.2375]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_110 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 104846 +Density: 0.0001064813792493263 +Time: 20.378410577774048 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 104844, 104844, + 104846]), + col_indices=tensor([ 106, 329, 1040, ..., 882, 2616, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=104846, layout=torch.sparse_csr) +tensor([0.4809, 0.8376, 0.9816, ..., 0.1928, 0.1271, 0.2375]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_110 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 104846 +Density: 0.0001064813792493263 +Time: 20.378410577774048 seconds + +[19.5, 18.39, 19.38, 18.42, 18.84, 18.85, 18.75, 18.49, 18.54, 19.0] +[53.11] +24.40267324447632 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 35075, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_110', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 104846, 'MATRIX_DENSITY': 0.0001064813792493263, 'TIME_S': 20.378410577774048, 'TIME_S_1KI': 0.5809953122672572, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1296.0259760141373, 'W': 53.11} +[19.5, 18.39, 19.38, 18.42, 18.84, 18.85, 18.75, 18.49, 18.54, 19.0, 18.92, 18.45, 18.57, 18.32, 18.38, 18.79, 18.36, 20.76, 18.69, 18.6] +337.99 +16.8995 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 35075, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_110', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 104846, 'MATRIX_DENSITY': 0.0001064813792493263, 'TIME_S': 20.378410577774048, 'TIME_S_1KI': 0.5809953122672572, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1296.0259760141373, 'W': 53.11, 'J_1KI': 36.950134740246256, 'W_1KI': 1.5141838916607269, 'W_D': 36.210499999999996, 'J_D': 883.6329995191096, 'W_D_1KI': 1.0323734853884532, 'J_D_1KI': 0.029433313909863243} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_115.json b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_115.json new file mode 100644 index 0000000..e5f8250 --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_115.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 34900, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_115", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 106312, "MATRIX_DENSITY": 0.00010797024579625715, "TIME_S": 20.62199878692627, "TIME_S_1KI": 0.5908882173904375, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1289.0678378415107, "W": 52.93, "J_1KI": 36.93604119889715, "W_1KI": 1.5166189111747852, "W_D": 35.918499999999995, "J_D": 874.7663543077706, "W_D_1KI": 1.0291833810888251, "J_D_1KI": 0.029489495160138254} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_115.output b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_115.output new file mode 100644 index 0000000..feb794a --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_115.output @@ -0,0 +1,68 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_115.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_115", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 106312, "MATRIX_DENSITY": 0.00010797024579625715, "TIME_S": 0.6017181873321533} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 106311, 106311, + 106312]), + col_indices=tensor([ 106, 329, 1040, ..., 160, 882, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=106312, layout=torch.sparse_csr) +tensor([0.1380, 0.1254, 0.9934, ..., 0.7043, 0.9230, 0.4629]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_115 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 106312 +Density: 0.00010797024579625715 +Time: 0.6017181873321533 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '34900', '-m', 'matrices/as-caida_pruned/as-caida_G_115.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_115", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 106312, "MATRIX_DENSITY": 0.00010797024579625715, "TIME_S": 20.62199878692627} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 106311, 106311, + 106312]), + col_indices=tensor([ 106, 329, 1040, ..., 160, 882, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=106312, layout=torch.sparse_csr) +tensor([0.2198, 0.5515, 0.2549, ..., 0.7323, 0.7765, 0.9258]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_115 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 106312 +Density: 0.00010797024579625715 +Time: 20.62199878692627 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 106311, 106311, + 106312]), + col_indices=tensor([ 106, 329, 1040, ..., 160, 882, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=106312, layout=torch.sparse_csr) +tensor([0.2198, 0.5515, 0.2549, ..., 0.7323, 0.7765, 0.9258]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_115 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 106312 +Density: 0.00010797024579625715 +Time: 20.62199878692627 seconds + +[19.07, 18.55, 18.76, 18.35, 18.57, 18.47, 18.42, 18.52, 18.61, 18.54] +[52.93] +24.35420060157776 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 34900, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_115', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 106312, 'MATRIX_DENSITY': 0.00010797024579625715, 'TIME_S': 20.62199878692627, 'TIME_S_1KI': 0.5908882173904375, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1289.0678378415107, 'W': 52.93} +[19.07, 18.55, 18.76, 18.35, 18.57, 18.47, 18.42, 18.52, 18.61, 18.54, 18.97, 18.49, 18.73, 18.79, 22.79, 18.7, 19.02, 19.23, 18.69, 18.5] +340.23 +17.0115 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 34900, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_115', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 106312, 'MATRIX_DENSITY': 0.00010797024579625715, 'TIME_S': 20.62199878692627, 'TIME_S_1KI': 0.5908882173904375, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1289.0678378415107, 'W': 52.93, 'J_1KI': 36.93604119889715, 'W_1KI': 1.5166189111747852, 'W_D': 35.918499999999995, 'J_D': 874.7663543077706, 'W_D_1KI': 1.0291833810888251, 'J_D_1KI': 0.029489495160138254} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_120.json b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_120.json new file mode 100644 index 0000000..7c722a9 --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_120.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 34674, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_120", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 106510, "MATRIX_DENSITY": 0.0001081713341839054, "TIME_S": 20.45119857788086, "TIME_S_1KI": 0.5898136522432041, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1286.023119688034, "W": 53.0, "J_1KI": 37.08897501551693, "W_1KI": 1.528522812481975, "W_D": 36.206, "J_D": 878.5236428570748, "W_D_1KI": 1.044182961296649, "J_D_1KI": 0.03011429201409266} diff --git a/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_120.output b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_120.output new file mode 100644 index 0000000..cb25913 --- /dev/null +++ b/pytorch/output_as-caida/xeon_4216_1_csr_20_10_10_as-caida_G_120.output @@ -0,0 +1,68 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/as-caida_pruned/as-caida_G_120.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_120", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 106510, "MATRIX_DENSITY": 0.0001081713341839054, "TIME_S": 0.6056373119354248} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 106509, 106509, + 106510]), + col_indices=tensor([ 106, 329, 1040, ..., 155, 160, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=106510, layout=torch.sparse_csr) +tensor([0.2481, 0.2147, 0.0211, ..., 0.6298, 0.9573, 0.6782]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_120 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 106510 +Density: 0.0001081713341839054 +Time: 0.6056373119354248 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '34674', '-m', 'matrices/as-caida_pruned/as-caida_G_120.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_120", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 106510, "MATRIX_DENSITY": 0.0001081713341839054, "TIME_S": 20.45119857788086} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 106509, 106509, + 106510]), + col_indices=tensor([ 106, 329, 1040, ..., 155, 160, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=106510, layout=torch.sparse_csr) +tensor([0.8740, 0.9922, 0.5164, ..., 0.2142, 0.4361, 0.8790]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_120 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 106510 +Density: 0.0001081713341839054 +Time: 20.45119857788086 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 106509, 106509, + 106510]), + col_indices=tensor([ 106, 329, 1040, ..., 155, 160, 12170]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(31379, 31379), + nnz=106510, layout=torch.sparse_csr) +tensor([0.8740, 0.9922, 0.5164, ..., 0.2142, 0.4361, 0.8790]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_120 +Matrix Format: csr +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 106510 +Density: 0.0001081713341839054 +Time: 20.45119857788086 seconds + +[19.22, 18.59, 18.76, 18.5, 18.47, 18.66, 18.69, 18.58, 18.75, 18.62] +[53.0] +24.26458716392517 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 34674, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_120', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 106510, 'MATRIX_DENSITY': 0.0001081713341839054, 'TIME_S': 20.45119857788086, 'TIME_S_1KI': 0.5898136522432041, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1286.023119688034, 'W': 53.0} +[19.22, 18.59, 18.76, 18.5, 18.47, 18.66, 18.69, 18.58, 18.75, 18.62, 19.39, 18.37, 18.62, 18.41, 18.73, 18.59, 18.88, 18.37, 18.81, 18.97] +335.88 +16.794 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 34674, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_120', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 106510, 'MATRIX_DENSITY': 0.0001081713341839054, 'TIME_S': 20.45119857788086, 'TIME_S_1KI': 0.5898136522432041, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1286.023119688034, 'W': 53.0, 'J_1KI': 37.08897501551693, 'W_1KI': 1.528522812481975, 'W_D': 36.206, 'J_D': 878.5236428570748, 'W_D_1KI': 1.044182961296649, 'J_D_1KI': 0.03011429201409266} diff --git a/pytorch/run.py b/pytorch/run.py index d3c8ff5..4ad32f4 100644 --- a/pytorch/run.py +++ b/pytorch/run.py @@ -160,17 +160,19 @@ if args.power: assert(len(baseline_list) == args.baseline_time_s) # Power Collection + start_time = time.time() power_process = subprocess.run( power + ['-1'] + program( args.cpu, args.cores, args.matrix_type, args.format, result[Stat.ITERATIONS.name], args.matrix_file, args.synthetic_size, args.synthetic_density), stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) + power_time_s = time.time() - start_time power_process.check_returncode() power_list = [float(x) for x in power_process.stdout.strip().split('\n')] - power_time_s = json.loads(power_process.stderr)[Stat.TIME_S.name] + #power_time_s = json.loads(power_process.stderr)[Stat.TIME_S.name] if args.debug: print(power_list, file=sys.stderr) print(power_time_s, file=sys.stderr) diff --git a/pytorch/spmv.py b/pytorch/spmv.py index f83f5f1..1394cb9 100644 --- a/pytorch/spmv.py +++ b/pytorch/spmv.py @@ -45,16 +45,29 @@ elif args.matrix_type == MatrixType.SYNTHETIC: if args.synthetic_size is None and args.synthetic_density is None: exit("Synthetic matrix parameters not specified!") - nnz = int((args.synthetic_size ** 2) * args.synthetic_density) - row_indices = torch.randint(0, args.synthetic_size, (nnz,)) - col_indices = torch.randint(0, args.synthetic_size, (nnz,)) - indices = torch.stack([row_indices, col_indices]) - values = torch.randn(nnz) - + matrix = scipy.sparse.random( + args.synthetic_size, args.synthetic_size, + density=args.synthetic_density, + format='coo', dtype=np.float32, + random_state=np.random.default_rng()) + indices = torch.tensor(np.vstack([matrix.row, matrix.col]), + dtype=torch.float32, device=device) + values = torch.tensor(matrix.data, + dtype=torch.float32, device=device) matrix = torch.sparse_coo_tensor( - indices, values, - size=(args.synthetic_size, args.synthetic_size), - device=device, dtype=torch.float32) + indices, values, size=matrix.shape, + dtype=torch.float32, device=device) + +# nnz = int((args.synthetic_size ** 2) * args.synthetic_density) +# row_indices = torch.randint(0, args.synthetic_size, (nnz,)) +# col_indices = torch.randint(0, args.synthetic_size, (nnz,)) +# indices = torch.stack([row_indices, col_indices]) +# values = torch.randn(nnz) +# +# matrix = torch.sparse_coo_tensor( +# indices, values, +# size=(args.synthetic_size, args.synthetic_size), +# device=device, dtype=torch.float32) else: exit("Unrecognized matrix type!") @@ -72,7 +85,8 @@ print(vector, file=sys.stderr) start = time.time() for i in range(0, args.iterations): - torch.mv(matrix, vector) + torch.mm(matrix, vector.unsqueeze(-1)) + #torch.mv(matrix, vector) #torch.sparse.mm(matrix, vector.unsqueeze(-1)).squeeze(-1) #print(i) end = time.time() @@ -85,11 +99,9 @@ print(f"Matrix Type: {result[Stat.MATRIX_TYPE.name]}", file=sys.stderr) if args.matrix_type == MatrixType.SUITESPARSE: result[Stat.MATRIX_FILE.name] = os.path.splitext(os.path.basename(args.matrix_file))[0] print(f"Matrix: {result[Stat.MATRIX_FILE.name]}", file=sys.stderr) -elif args.matrix_type == MatrixType.SYNTHETIC: - result[Stat.MATRIX_DENSITY_GROUP.name] = args.synthetic_density result[Stat.MATRIX_FORMAT.name] = args.format.value -print(f"Matrix: {result[Stat.MATRIX_FORMAT.name]}", file=sys.stderr) +print(f"Matrix Format: {result[Stat.MATRIX_FORMAT.name]}", file=sys.stderr) result[Stat.MATRIX_SHAPE.name] = matrix.shape print(f"Shape: {result[Stat.MATRIX_SHAPE.name]}", file=sys.stderr) @@ -100,10 +112,17 @@ print(f"Rows: {result[Stat.MATRIX_ROWS.name]}", file=sys.stderr) result[Stat.MATRIX_SIZE.name] = matrix.shape[0] * matrix.shape[1] print(f"Size: {result[Stat.MATRIX_SIZE.name]}", file=sys.stderr) -result[Stat.MATRIX_NNZ.name] = matrix.values().shape[0] +if args.format == Format.CSR: + rows = matrix.values().shape[0] +elif args.format == Format.COO: + rows = matrix.coalesce().values().shape[0] +else: + exit("Unrecognized format!") + +result[Stat.MATRIX_NNZ.name] = rows print(f"NNZ: {result[Stat.MATRIX_NNZ.name]}", file=sys.stderr) -result[Stat.MATRIX_DENSITY.name] = matrix.values().shape[0] / (matrix.shape[0] * matrix.shape[1]) +result[Stat.MATRIX_DENSITY.name] = rows / result[Stat.MATRIX_SIZE.name] print(f"Density: {result[Stat.MATRIX_DENSITY.name]}", file=sys.stderr) result[Stat.TIME_S.name] = end - start