diff --git a/pytorch/batch.py b/pytorch/batch.py index f391f3c..0581479 100644 --- a/pytorch/batch.py +++ b/pytorch/batch.py @@ -117,7 +117,7 @@ elif args.matrix_type == MatrixType.SYNTHETIC: parameter_list = enumerate([(size, density) for size in args.synthetic_size for density in args.synthetic_density - if size ** 2 * density < 10000000]) + if size ** 2 * density <= 10000000]) #for i, matrix in enumerate(glob.glob(f'{args.matrix_dir.rstrip("/")}/*.mtx')): for i, parameter in parameter_list: diff --git a/pytorch/output_389000+_16core/altra_16_csr_10_10_10_amazon0312.json b/pytorch/output_389000+_16core/altra_16_csr_10_10_10_amazon0312.json new file mode 100644 index 0000000..bf1ce83 --- /dev/null +++ b/pytorch/output_389000+_16core/altra_16_csr_10_10_10_amazon0312.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 1345, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "amazon0312", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400727, 400727], "MATRIX_ROWS": 400727, "MATRIX_SIZE": 160582128529, "MATRIX_NNZ": 3200440, "MATRIX_DENSITY": 1.9930237750099465e-05, "TIME_S": 10.307875871658325, "TIME_S_1KI": 7.663848231716227, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 549.730076084137, "W": 38.131256134653135, "J_1KI": 408.7212461592096, "W_1KI": 28.350376308292294, "W_D": 16.28825613465314, "J_D": 234.82426732969293, "W_D_1KI": 12.110227609407538, "J_D_1KI": 9.003886698444267} diff --git a/pytorch/output_389000+_16core/altra_16_csr_10_10_10_amazon0312.output b/pytorch/output_389000+_16core/altra_16_csr_10_10_10_amazon0312.output new file mode 100644 index 0000000..7077482 --- /dev/null +++ b/pytorch/output_389000+_16core/altra_16_csr_10_10_10_amazon0312.output @@ -0,0 +1,71 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/389000+_cols/amazon0312.mtx -c 16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "amazon0312", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400727, 400727], "MATRIX_ROWS": 400727, "MATRIX_SIZE": 160582128529, "MATRIX_NNZ": 3200440, "MATRIX_DENSITY": 1.9930237750099465e-05, "TIME_S": 7.806152820587158} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 10, ..., 3200428, + 3200438, 3200440]), + col_indices=tensor([ 1, 2, 3, ..., 400724, 6009, + 400707]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(400727, 400727), + nnz=3200440, layout=torch.sparse_csr) +tensor([0.5486, 0.8485, 0.8195, ..., 0.3778, 0.3275, 0.7623]) +Matrix Type: SuiteSparse +Matrix: amazon0312 +Matrix Format: csr +Shape: torch.Size([400727, 400727]) +Rows: 400727 +Size: 160582128529 +NNZ: 3200440 +Density: 1.9930237750099465e-05 +Time: 7.806152820587158 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1345 -m matrices/389000+_cols/amazon0312.mtx -c 16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "amazon0312", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400727, 400727], "MATRIX_ROWS": 400727, "MATRIX_SIZE": 160582128529, "MATRIX_NNZ": 3200440, "MATRIX_DENSITY": 1.9930237750099465e-05, "TIME_S": 10.307875871658325} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 10, ..., 3200428, + 3200438, 3200440]), + col_indices=tensor([ 1, 2, 3, ..., 400724, 6009, + 400707]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(400727, 400727), + nnz=3200440, layout=torch.sparse_csr) +tensor([0.6343, 0.9450, 0.3421, ..., 0.5967, 0.9759, 0.2168]) +Matrix Type: SuiteSparse +Matrix: amazon0312 +Matrix Format: csr +Shape: torch.Size([400727, 400727]) +Rows: 400727 +Size: 160582128529 +NNZ: 3200440 +Density: 1.9930237750099465e-05 +Time: 10.307875871658325 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 10, ..., 3200428, + 3200438, 3200440]), + col_indices=tensor([ 1, 2, 3, ..., 400724, 6009, + 400707]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(400727, 400727), + nnz=3200440, layout=torch.sparse_csr) +tensor([0.6343, 0.9450, 0.3421, ..., 0.5967, 0.9759, 0.2168]) +Matrix Type: SuiteSparse +Matrix: amazon0312 +Matrix Format: csr +Shape: torch.Size([400727, 400727]) +Rows: 400727 +Size: 160582128529 +NNZ: 3200440 +Density: 1.9930237750099465e-05 +Time: 10.307875871658325 seconds + +[24.12, 24.04, 24.4, 24.52, 24.56, 24.68, 24.68, 24.64, 24.56, 24.28] +[24.24, 24.0, 24.0, 24.8, 25.52, 28.6, 36.84, 43.04, 50.0, 55.48, 57.44, 57.44, 57.68, 57.56] +14.416783809661865 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1345, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'amazon0312', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [400727, 400727], 'MATRIX_ROWS': 400727, 'MATRIX_SIZE': 160582128529, 'MATRIX_NNZ': 3200440, 'MATRIX_DENSITY': 1.9930237750099465e-05, 'TIME_S': 10.307875871658325, 'TIME_S_1KI': 7.663848231716227, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 549.730076084137, 'W': 38.131256134653135} +[24.12, 24.04, 24.4, 24.52, 24.56, 24.68, 24.68, 24.64, 24.56, 24.28, 24.12, 24.24, 24.0, 23.96, 23.96, 24.08, 24.08, 24.08, 24.08, 24.08] +436.85999999999996 +21.842999999999996 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1345, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'amazon0312', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [400727, 400727], 'MATRIX_ROWS': 400727, 'MATRIX_SIZE': 160582128529, 'MATRIX_NNZ': 3200440, 'MATRIX_DENSITY': 1.9930237750099465e-05, 'TIME_S': 10.307875871658325, 'TIME_S_1KI': 7.663848231716227, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 549.730076084137, 'W': 38.131256134653135, 'J_1KI': 408.7212461592096, 'W_1KI': 28.350376308292294, 'W_D': 16.28825613465314, 'J_D': 234.82426732969293, 'W_D_1KI': 12.110227609407538, 'J_D_1KI': 9.003886698444267} diff --git a/pytorch/output_389000+_16core/altra_16_csr_10_10_10_darcy003.json b/pytorch/output_389000+_16core/altra_16_csr_10_10_10_darcy003.json new file mode 100644 index 0000000..629b0ac --- /dev/null +++ b/pytorch/output_389000+_16core/altra_16_csr_10_10_10_darcy003.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 1641, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "darcy003", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 10.180182933807373, "TIME_S_1KI": 6.203645907256169, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 572.6159508895875, "W": 39.25282662523282, "J_1KI": 348.94329731236286, "W_1KI": 23.920064975766497, "W_D": 17.70882662523282, "J_D": 258.33443012809755, "W_D_1KI": 10.791484841701902, "J_D_1KI": 6.576163827971908} diff --git a/pytorch/output_389000+_16core/altra_16_csr_10_10_10_darcy003.output b/pytorch/output_389000+_16core/altra_16_csr_10_10_10_darcy003.output new file mode 100644 index 0000000..ee1fc72 --- /dev/null +++ b/pytorch/output_389000+_16core/altra_16_csr_10_10_10_darcy003.output @@ -0,0 +1,71 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/389000+_cols/darcy003.mtx -c 16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "darcy003", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 6.3963303565979} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.2730, 0.2238, 0.6515, ..., 0.6572, 0.5843, 0.9667]) +Matrix Type: SuiteSparse +Matrix: darcy003 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 6.3963303565979 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1641 -m matrices/389000+_cols/darcy003.mtx -c 16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "darcy003", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 10.180182933807373} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.4101, 0.8547, 0.0587, ..., 0.2935, 0.9064, 0.8922]) +Matrix Type: SuiteSparse +Matrix: darcy003 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 10.180182933807373 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.4101, 0.8547, 0.0587, ..., 0.2935, 0.9064, 0.8922]) +Matrix Type: SuiteSparse +Matrix: darcy003 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 10.180182933807373 seconds + +[23.68, 23.72, 23.96, 23.96, 24.16, 24.28, 24.28, 24.52, 24.76, 24.52] +[24.16, 23.84, 23.72, 26.96, 28.84, 34.04, 41.68, 44.88, 51.8, 55.52, 55.8, 56.2, 56.16, 56.16] +14.587890863418579 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1641, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'darcy003', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [389874, 389874], 'MATRIX_ROWS': 389874, 'MATRIX_SIZE': 152001735876, 'MATRIX_NNZ': 2101242, 'MATRIX_DENSITY': 1.3823802655215408e-05, 'TIME_S': 10.180182933807373, 'TIME_S_1KI': 6.203645907256169, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 572.6159508895875, 'W': 39.25282662523282} +[23.68, 23.72, 23.96, 23.96, 24.16, 24.28, 24.28, 24.52, 24.76, 24.52, 24.08, 24.08, 24.12, 24.04, 23.6, 23.44, 23.28, 23.28, 23.4, 23.72] +430.88 +21.544 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1641, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'darcy003', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [389874, 389874], 'MATRIX_ROWS': 389874, 'MATRIX_SIZE': 152001735876, 'MATRIX_NNZ': 2101242, 'MATRIX_DENSITY': 1.3823802655215408e-05, 'TIME_S': 10.180182933807373, 'TIME_S_1KI': 6.203645907256169, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 572.6159508895875, 'W': 39.25282662523282, 'J_1KI': 348.94329731236286, 'W_1KI': 23.920064975766497, 'W_D': 17.70882662523282, 'J_D': 258.33443012809755, 'W_D_1KI': 10.791484841701902, 'J_D_1KI': 6.576163827971908} diff --git a/pytorch/output_389000+_16core/altra_16_csr_10_10_10_helm2d03.json b/pytorch/output_389000+_16core/altra_16_csr_10_10_10_helm2d03.json new file mode 100644 index 0000000..4535749 --- /dev/null +++ b/pytorch/output_389000+_16core/altra_16_csr_10_10_10_helm2d03.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 1801, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "helm2d03", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392257, 392257], "MATRIX_ROWS": 392257, "MATRIX_SIZE": 153865554049, "MATRIX_NNZ": 2741935, "MATRIX_DENSITY": 1.7820330332848923e-05, "TIME_S": 10.381673336029053, "TIME_S_1KI": 5.764393856762384, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 582.8333181858063, "W": 39.805332297033694, "J_1KI": 323.6165009360391, "W_1KI": 22.10179472350566, "W_D": 18.366332297033694, "J_D": 268.92151824545857, "W_D_1KI": 10.197852469202495, "J_D_1KI": 5.66232785630344} diff --git a/pytorch/output_389000+_16core/altra_16_csr_10_10_10_helm2d03.output b/pytorch/output_389000+_16core/altra_16_csr_10_10_10_helm2d03.output new file mode 100644 index 0000000..c022936 --- /dev/null +++ b/pytorch/output_389000+_16core/altra_16_csr_10_10_10_helm2d03.output @@ -0,0 +1,74 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/389000+_cols/helm2d03.mtx -c 16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "helm2d03", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392257, 392257], "MATRIX_ROWS": 392257, "MATRIX_SIZE": 153865554049, "MATRIX_NNZ": 2741935, "MATRIX_DENSITY": 1.7820330332848923e-05, "TIME_S": 5.829137325286865} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 14, ..., 2741921, + 2741928, 2741935]), + col_indices=tensor([ 0, 98273, 133833, ..., 392252, 392254, + 392256]), + values=tensor([ 3.4808, -0.6217, -0.5806, ..., -0.6940, -0.7602, + 3.5476]), size=(392257, 392257), nnz=2741935, + layout=torch.sparse_csr) +tensor([0.8248, 0.9604, 0.9464, ..., 0.7437, 0.8759, 0.5369]) +Matrix Type: SuiteSparse +Matrix: helm2d03 +Matrix Format: csr +Shape: torch.Size([392257, 392257]) +Rows: 392257 +Size: 153865554049 +NNZ: 2741935 +Density: 1.7820330332848923e-05 +Time: 5.829137325286865 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1801 -m matrices/389000+_cols/helm2d03.mtx -c 16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "helm2d03", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392257, 392257], "MATRIX_ROWS": 392257, "MATRIX_SIZE": 153865554049, "MATRIX_NNZ": 2741935, "MATRIX_DENSITY": 1.7820330332848923e-05, "TIME_S": 10.381673336029053} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 14, ..., 2741921, + 2741928, 2741935]), + col_indices=tensor([ 0, 98273, 133833, ..., 392252, 392254, + 392256]), + values=tensor([ 3.4808, -0.6217, -0.5806, ..., -0.6940, -0.7602, + 3.5476]), size=(392257, 392257), nnz=2741935, + layout=torch.sparse_csr) +tensor([0.8790, 0.0885, 0.6163, ..., 0.1605, 0.4532, 0.8862]) +Matrix Type: SuiteSparse +Matrix: helm2d03 +Matrix Format: csr +Shape: torch.Size([392257, 392257]) +Rows: 392257 +Size: 153865554049 +NNZ: 2741935 +Density: 1.7820330332848923e-05 +Time: 10.381673336029053 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 14, ..., 2741921, + 2741928, 2741935]), + col_indices=tensor([ 0, 98273, 133833, ..., 392252, 392254, + 392256]), + values=tensor([ 3.4808, -0.6217, -0.5806, ..., -0.6940, -0.7602, + 3.5476]), size=(392257, 392257), nnz=2741935, + layout=torch.sparse_csr) +tensor([0.8790, 0.0885, 0.6163, ..., 0.1605, 0.4532, 0.8862]) +Matrix Type: SuiteSparse +Matrix: helm2d03 +Matrix Format: csr +Shape: torch.Size([392257, 392257]) +Rows: 392257 +Size: 153865554049 +NNZ: 2741935 +Density: 1.7820330332848923e-05 +Time: 10.381673336029053 seconds + +[23.72, 23.72, 23.68, 23.68, 23.68, 23.6, 23.68, 23.64, 23.64, 23.72] +[23.88, 24.0, 24.28, 26.28, 27.2, 34.12, 40.64, 47.44, 53.0, 57.04, 57.04, 56.96, 57.52, 57.24] +14.642091512680054 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1801, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'helm2d03', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [392257, 392257], 'MATRIX_ROWS': 392257, 'MATRIX_SIZE': 153865554049, 'MATRIX_NNZ': 2741935, 'MATRIX_DENSITY': 1.7820330332848923e-05, 'TIME_S': 10.381673336029053, 'TIME_S_1KI': 5.764393856762384, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 582.8333181858063, 'W': 39.805332297033694} +[23.72, 23.72, 23.68, 23.68, 23.68, 23.6, 23.68, 23.64, 23.64, 23.72, 23.68, 24.04, 24.0, 24.12, 24.0, 23.92, 24.08, 24.04, 23.76, 23.88] +428.78000000000003 +21.439 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1801, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'helm2d03', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [392257, 392257], 'MATRIX_ROWS': 392257, 'MATRIX_SIZE': 153865554049, 'MATRIX_NNZ': 2741935, 'MATRIX_DENSITY': 1.7820330332848923e-05, 'TIME_S': 10.381673336029053, 'TIME_S_1KI': 5.764393856762384, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 582.8333181858063, 'W': 39.805332297033694, 'J_1KI': 323.6165009360391, 'W_1KI': 22.10179472350566, 'W_D': 18.366332297033694, 'J_D': 268.92151824545857, 'W_D_1KI': 10.197852469202495, 'J_D_1KI': 5.66232785630344} diff --git a/pytorch/output_389000+_16core/altra_16_csr_10_10_10_language.json b/pytorch/output_389000+_16core/altra_16_csr_10_10_10_language.json new file mode 100644 index 0000000..319d17a --- /dev/null +++ b/pytorch/output_389000+_16core/altra_16_csr_10_10_10_language.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 2142, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "language", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [399130, 399130], "MATRIX_ROWS": 399130, "MATRIX_SIZE": 159304756900, "MATRIX_NNZ": 1216334, "MATRIX_DENSITY": 7.635264782228233e-06, "TIME_S": 10.489102602005005, "TIME_S_1KI": 4.896873296921104, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 527.8806602096558, "W": 35.901030267154354, "J_1KI": 246.44288525194017, "W_1KI": 16.760518332004835, "W_D": 14.631030267154355, "J_D": 215.1313725399972, "W_D_1KI": 6.830546343209316, "J_D_1KI": 3.1888638390332944} diff --git a/pytorch/output_389000+_16core/altra_16_csr_10_10_10_language.output b/pytorch/output_389000+_16core/altra_16_csr_10_10_10_language.output new file mode 100644 index 0000000..e43ddab --- /dev/null +++ b/pytorch/output_389000+_16core/altra_16_csr_10_10_10_language.output @@ -0,0 +1,71 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/389000+_cols/language.mtx -c 16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "language", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [399130, 399130], "MATRIX_ROWS": 399130, "MATRIX_SIZE": 159304756900, "MATRIX_NNZ": 1216334, "MATRIX_DENSITY": 7.635264782228233e-06, "TIME_S": 4.9012720584869385} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 3, ..., 1216330, + 1216332, 1216334]), + col_indices=tensor([ 0, 0, 1, ..., 399128, 399125, + 399129]), + values=tensor([ 1., -1., 1., ..., 1., -1., 1.]), + size=(399130, 399130), nnz=1216334, layout=torch.sparse_csr) +tensor([0.0783, 0.8612, 0.3161, ..., 0.8531, 0.6998, 0.6080]) +Matrix Type: SuiteSparse +Matrix: language +Matrix Format: csr +Shape: torch.Size([399130, 399130]) +Rows: 399130 +Size: 159304756900 +NNZ: 1216334 +Density: 7.635264782228233e-06 +Time: 4.9012720584869385 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 2142 -m matrices/389000+_cols/language.mtx -c 16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "language", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [399130, 399130], "MATRIX_ROWS": 399130, "MATRIX_SIZE": 159304756900, "MATRIX_NNZ": 1216334, "MATRIX_DENSITY": 7.635264782228233e-06, "TIME_S": 10.489102602005005} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 3, ..., 1216330, + 1216332, 1216334]), + col_indices=tensor([ 0, 0, 1, ..., 399128, 399125, + 399129]), + values=tensor([ 1., -1., 1., ..., 1., -1., 1.]), + size=(399130, 399130), nnz=1216334, layout=torch.sparse_csr) +tensor([0.5677, 0.4069, 0.3735, ..., 0.4488, 0.2885, 0.1400]) +Matrix Type: SuiteSparse +Matrix: language +Matrix Format: csr +Shape: torch.Size([399130, 399130]) +Rows: 399130 +Size: 159304756900 +NNZ: 1216334 +Density: 7.635264782228233e-06 +Time: 10.489102602005005 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 3, ..., 1216330, + 1216332, 1216334]), + col_indices=tensor([ 0, 0, 1, ..., 399128, 399125, + 399129]), + values=tensor([ 1., -1., 1., ..., 1., -1., 1.]), + size=(399130, 399130), nnz=1216334, layout=torch.sparse_csr) +tensor([0.5677, 0.4069, 0.3735, ..., 0.4488, 0.2885, 0.1400]) +Matrix Type: SuiteSparse +Matrix: language +Matrix Format: csr +Shape: torch.Size([399130, 399130]) +Rows: 399130 +Size: 159304756900 +NNZ: 1216334 +Density: 7.635264782228233e-06 +Time: 10.489102602005005 seconds + +[23.72, 23.8, 23.88, 23.8, 23.8, 23.52, 23.48, 23.36, 23.36, 23.24] +[23.28, 23.4, 23.56, 24.68, 26.6, 30.12, 36.56, 41.44, 45.96, 49.92, 50.84, 50.96, 50.76, 51.04] +14.703774690628052 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 2142, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'language', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [399130, 399130], 'MATRIX_ROWS': 399130, 'MATRIX_SIZE': 159304756900, 'MATRIX_NNZ': 1216334, 'MATRIX_DENSITY': 7.635264782228233e-06, 'TIME_S': 10.489102602005005, 'TIME_S_1KI': 4.896873296921104, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 527.8806602096558, 'W': 35.901030267154354} +[23.72, 23.8, 23.88, 23.8, 23.8, 23.52, 23.48, 23.36, 23.36, 23.24, 23.6, 23.64, 23.64, 23.68, 23.68, 23.68, 23.76, 23.64, 23.64, 23.52] +425.4 +21.27 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 2142, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'language', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [399130, 399130], 'MATRIX_ROWS': 399130, 'MATRIX_SIZE': 159304756900, 'MATRIX_NNZ': 1216334, 'MATRIX_DENSITY': 7.635264782228233e-06, 'TIME_S': 10.489102602005005, 'TIME_S_1KI': 4.896873296921104, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 527.8806602096558, 'W': 35.901030267154354, 'J_1KI': 246.44288525194017, 'W_1KI': 16.760518332004835, 'W_D': 14.631030267154355, 'J_D': 215.1313725399972, 'W_D_1KI': 6.830546343209316, 'J_D_1KI': 3.1888638390332944} diff --git a/pytorch/output_389000+_16core/altra_16_csr_10_10_10_marine1.json b/pytorch/output_389000+_16core/altra_16_csr_10_10_10_marine1.json new file mode 100644 index 0000000..ca7dd60 --- /dev/null +++ b/pytorch/output_389000+_16core/altra_16_csr_10_10_10_marine1.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 1000, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "marine1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400320, 400320], "MATRIX_ROWS": 400320, "MATRIX_SIZE": 160256102400, "MATRIX_NNZ": 6226538, "MATRIX_DENSITY": 3.885367175883594e-05, "TIME_S": 12.712969779968262, "TIME_S_1KI": 12.712969779968262, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 642.9576612091064, "W": 41.13704135154824, "J_1KI": 642.9576612091064, "W_1KI": 41.13704135154824, "W_D": 19.36904135154824, "J_D": 302.73138558578484, "W_D_1KI": 19.36904135154824, "J_D_1KI": 19.36904135154824} diff --git a/pytorch/output_389000+_16core/altra_16_csr_10_10_10_marine1.output b/pytorch/output_389000+_16core/altra_16_csr_10_10_10_marine1.output new file mode 100644 index 0000000..30a4718 --- /dev/null +++ b/pytorch/output_389000+_16core/altra_16_csr_10_10_10_marine1.output @@ -0,0 +1,51 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/389000+_cols/marine1.mtx -c 16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "marine1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400320, 400320], "MATRIX_ROWS": 400320, "MATRIX_SIZE": 160256102400, "MATRIX_NNZ": 6226538, "MATRIX_DENSITY": 3.885367175883594e-05, "TIME_S": 12.712969779968262} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 18, ..., 6226522, + 6226531, 6226538]), + col_indices=tensor([ 0, 1, 10383, ..., 400315, 400318, + 400319]), + values=tensor([ 6.2373e+03, -1.8964e+00, -5.7529e+00, ..., + -6.8099e-01, -6.4187e-01, 1.7595e+01]), + size=(400320, 400320), nnz=6226538, layout=torch.sparse_csr) +tensor([0.7918, 0.6380, 0.4821, ..., 0.8085, 0.1927, 0.4528]) +Matrix Type: SuiteSparse +Matrix: marine1 +Matrix Format: csr +Shape: torch.Size([400320, 400320]) +Rows: 400320 +Size: 160256102400 +NNZ: 6226538 +Density: 3.885367175883594e-05 +Time: 12.712969779968262 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 18, ..., 6226522, + 6226531, 6226538]), + col_indices=tensor([ 0, 1, 10383, ..., 400315, 400318, + 400319]), + values=tensor([ 6.2373e+03, -1.8964e+00, -5.7529e+00, ..., + -6.8099e-01, -6.4187e-01, 1.7595e+01]), + size=(400320, 400320), nnz=6226538, layout=torch.sparse_csr) +tensor([0.7918, 0.6380, 0.4821, ..., 0.8085, 0.1927, 0.4528]) +Matrix Type: SuiteSparse +Matrix: marine1 +Matrix Format: csr +Shape: torch.Size([400320, 400320]) +Rows: 400320 +Size: 160256102400 +NNZ: 6226538 +Density: 3.885367175883594e-05 +Time: 12.712969779968262 seconds + +[24.16, 24.24, 24.24, 24.08, 24.2, 24.48, 24.48, 24.6, 24.28, 24.24] +[23.96, 23.92, 24.76, 26.04, 30.04, 34.84, 41.8, 41.8, 48.8, 54.68, 57.92, 59.36, 59.6, 60.0, 59.68] +15.629652500152588 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'marine1', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [400320, 400320], 'MATRIX_ROWS': 400320, 'MATRIX_SIZE': 160256102400, 'MATRIX_NNZ': 6226538, 'MATRIX_DENSITY': 3.885367175883594e-05, 'TIME_S': 12.712969779968262, 'TIME_S_1KI': 12.712969779968262, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 642.9576612091064, 'W': 41.13704135154824} +[24.16, 24.24, 24.24, 24.08, 24.2, 24.48, 24.48, 24.6, 24.28, 24.24, 24.24, 24.16, 24.08, 24.12, 24.12, 24.0, 23.92, 24.0, 24.0, 24.08] +435.36 +21.768 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'marine1', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [400320, 400320], 'MATRIX_ROWS': 400320, 'MATRIX_SIZE': 160256102400, 'MATRIX_NNZ': 6226538, 'MATRIX_DENSITY': 3.885367175883594e-05, 'TIME_S': 12.712969779968262, 'TIME_S_1KI': 12.712969779968262, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 642.9576612091064, 'W': 41.13704135154824, 'J_1KI': 642.9576612091064, 'W_1KI': 41.13704135154824, 'W_D': 19.36904135154824, 'J_D': 302.73138558578484, 'W_D_1KI': 19.36904135154824, 'J_D_1KI': 19.36904135154824} diff --git a/pytorch/output_389000+_16core/altra_16_csr_10_10_10_mario002.json b/pytorch/output_389000+_16core/altra_16_csr_10_10_10_mario002.json new file mode 100644 index 0000000..de53ae3 --- /dev/null +++ b/pytorch/output_389000+_16core/altra_16_csr_10_10_10_mario002.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 1694, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "mario002", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 10.441875219345093, "TIME_S_1KI": 6.164034958291081, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 563.8270891189575, "W": 38.54107447042048, "J_1KI": 332.8377149462559, "W_1KI": 22.751519758217523, "W_D": 16.675074470420483, "J_D": 243.94386582851405, "W_D_1KI": 9.843609486670887, "J_D_1KI": 5.81086746556723} diff --git a/pytorch/output_389000+_16core/altra_16_csr_10_10_10_mario002.output b/pytorch/output_389000+_16core/altra_16_csr_10_10_10_mario002.output new file mode 100644 index 0000000..4db99ef --- /dev/null +++ b/pytorch/output_389000+_16core/altra_16_csr_10_10_10_mario002.output @@ -0,0 +1,93 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/389000+_cols/mario002.mtx -c 16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "mario002", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 7.140163421630859} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.1948, 0.1869, 0.5638, ..., 0.6155, 0.6170, 0.7726]) +Matrix Type: SuiteSparse +Matrix: mario002 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 7.140163421630859 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1470 -m matrices/389000+_cols/mario002.mtx -c 16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "mario002", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 9.107451438903809} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.1872, 0.8693, 0.3135, ..., 0.4431, 0.3648, 0.2379]) +Matrix Type: SuiteSparse +Matrix: mario002 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 9.107451438903809 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1694 -m matrices/389000+_cols/mario002.mtx -c 16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "mario002", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 10.441875219345093} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.6416, 0.1879, 0.5321, ..., 0.0693, 0.5314, 0.2281]) +Matrix Type: SuiteSparse +Matrix: mario002 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 10.441875219345093 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.6416, 0.1879, 0.5321, ..., 0.0693, 0.5314, 0.2281]) +Matrix Type: SuiteSparse +Matrix: mario002 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 10.441875219345093 seconds + +[24.04, 23.88, 24.2, 24.4, 24.52, 24.32, 24.64, 24.08, 24.08, 24.2] +[24.04, 23.88, 24.16, 26.0, 26.68, 32.8, 39.08, 45.28, 50.52, 54.72, 54.84, 55.48, 55.4, 55.76] +14.629251956939697 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1694, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'mario002', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [389874, 389874], 'MATRIX_ROWS': 389874, 'MATRIX_SIZE': 152001735876, 'MATRIX_NNZ': 2101242, 'MATRIX_DENSITY': 1.3823802655215408e-05, 'TIME_S': 10.441875219345093, 'TIME_S_1KI': 6.164034958291081, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 563.8270891189575, 'W': 38.54107447042048} +[24.04, 23.88, 24.2, 24.4, 24.52, 24.32, 24.64, 24.08, 24.08, 24.2, 24.0, 23.96, 24.08, 24.08, 24.44, 24.72, 24.6, 24.56, 24.48, 24.32] +437.32 +21.866 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1694, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'mario002', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [389874, 389874], 'MATRIX_ROWS': 389874, 'MATRIX_SIZE': 152001735876, 'MATRIX_NNZ': 2101242, 'MATRIX_DENSITY': 1.3823802655215408e-05, 'TIME_S': 10.441875219345093, 'TIME_S_1KI': 6.164034958291081, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 563.8270891189575, 'W': 38.54107447042048, 'J_1KI': 332.8377149462559, 'W_1KI': 22.751519758217523, 'W_D': 16.675074470420483, 'J_D': 243.94386582851405, 'W_D_1KI': 9.843609486670887, 'J_D_1KI': 5.81086746556723} diff --git a/pytorch/output_389000+_16core/altra_16_csr_10_10_10_test1.json b/pytorch/output_389000+_16core/altra_16_csr_10_10_10_test1.json new file mode 100644 index 0000000..45ba869 --- /dev/null +++ b/pytorch/output_389000+_16core/altra_16_csr_10_10_10_test1.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 1000, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "test1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392908, 392908], "MATRIX_ROWS": 392908, "MATRIX_SIZE": 154376696464, "MATRIX_NNZ": 12968200, "MATRIX_DENSITY": 8.400361127706946e-05, "TIME_S": 33.385778188705444, "TIME_S_1KI": 33.385778188705444, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1758.8809212875371, "W": 47.990539203077404, "J_1KI": 1758.8809212875371, "W_1KI": 47.990539203077404, "W_D": 26.191539203077404, "J_D": 959.9350073668963, "W_D_1KI": 26.191539203077404, "J_D_1KI": 26.191539203077404} diff --git a/pytorch/output_389000+_16core/altra_16_csr_10_10_10_test1.output b/pytorch/output_389000+_16core/altra_16_csr_10_10_10_test1.output new file mode 100644 index 0000000..fec9ae3 --- /dev/null +++ b/pytorch/output_389000+_16core/altra_16_csr_10_10_10_test1.output @@ -0,0 +1,51 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/389000+_cols/test1.mtx -c 16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "test1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392908, 392908], "MATRIX_ROWS": 392908, "MATRIX_SIZE": 154376696464, "MATRIX_NNZ": 12968200, "MATRIX_DENSITY": 8.400361127706946e-05, "TIME_S": 33.385778188705444} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 24, 48, ..., 12968181, + 12968191, 12968200]), + col_indices=tensor([ 0, 1, 8, ..., 392905, 392906, + 392907]), + values=tensor([1.0000e+00, 0.0000e+00, 0.0000e+00, ..., + 0.0000e+00, 0.0000e+00, 2.1156e-17]), + size=(392908, 392908), nnz=12968200, layout=torch.sparse_csr) +tensor([0.3716, 0.7020, 0.0579, ..., 0.5562, 0.4218, 0.2724]) +Matrix Type: SuiteSparse +Matrix: test1 +Matrix Format: csr +Shape: torch.Size([392908, 392908]) +Rows: 392908 +Size: 154376696464 +NNZ: 12968200 +Density: 8.400361127706946e-05 +Time: 33.385778188705444 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 24, 48, ..., 12968181, + 12968191, 12968200]), + col_indices=tensor([ 0, 1, 8, ..., 392905, 392906, + 392907]), + values=tensor([1.0000e+00, 0.0000e+00, 0.0000e+00, ..., + 0.0000e+00, 0.0000e+00, 2.1156e-17]), + size=(392908, 392908), nnz=12968200, layout=torch.sparse_csr) +tensor([0.3716, 0.7020, 0.0579, ..., 0.5562, 0.4218, 0.2724]) +Matrix Type: SuiteSparse +Matrix: test1 +Matrix Format: csr +Shape: torch.Size([392908, 392908]) +Rows: 392908 +Size: 154376696464 +NNZ: 12968200 +Density: 8.400361127706946e-05 +Time: 33.385778188705444 seconds + +[24.12, 24.32, 24.32, 24.28, 24.08, 24.2, 24.0, 24.0, 24.08, 24.08] +[24.24, 24.16, 24.36, 28.92, 31.16, 35.28, 37.04, 40.44, 46.16, 49.16, 54.2, 56.72, 56.48, 56.72, 56.72, 56.48, 57.24, 56.52, 56.44, 56.56, 56.52, 57.2, 56.84, 56.8, 56.92, 56.92, 58.08, 57.8, 57.88, 57.44, 57.4, 56.88, 56.44, 57.16, 57.12] +36.65057635307312 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'test1', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [392908, 392908], 'MATRIX_ROWS': 392908, 'MATRIX_SIZE': 154376696464, 'MATRIX_NNZ': 12968200, 'MATRIX_DENSITY': 8.400361127706946e-05, 'TIME_S': 33.385778188705444, 'TIME_S_1KI': 33.385778188705444, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1758.8809212875371, 'W': 47.990539203077404} +[24.12, 24.32, 24.32, 24.28, 24.08, 24.2, 24.0, 24.0, 24.08, 24.08, 24.24, 24.24, 24.24, 24.28, 24.48, 24.36, 24.4, 24.16, 24.16, 24.32] +435.98 +21.799 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'test1', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [392908, 392908], 'MATRIX_ROWS': 392908, 'MATRIX_SIZE': 154376696464, 'MATRIX_NNZ': 12968200, 'MATRIX_DENSITY': 8.400361127706946e-05, 'TIME_S': 33.385778188705444, 'TIME_S_1KI': 33.385778188705444, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1758.8809212875371, 'W': 47.990539203077404, 'J_1KI': 1758.8809212875371, 'W_1KI': 47.990539203077404, 'W_D': 26.191539203077404, 'J_D': 959.9350073668963, 'W_D_1KI': 26.191539203077404, 'J_D_1KI': 26.191539203077404} diff --git a/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_amazon0312.json b/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_amazon0312.json new file mode 100644 index 0000000..6a184df --- /dev/null +++ b/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_amazon0312.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 20402, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "amazon0312", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400727, 400727], "MATRIX_ROWS": 400727, "MATRIX_SIZE": 160582128529, "MATRIX_NNZ": 3200440, "MATRIX_DENSITY": 1.9930237750099465e-05, "TIME_S": 10.640971422195435, "TIME_S_1KI": 0.5215651123515065, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1986.5667917442322, "W": 146.96, "J_1KI": 97.3711788914926, "W_1KI": 7.203215371042055, "W_D": 111.28150000000001, "J_D": 1504.2741728054286, "W_D_1KI": 5.454440741103813, "J_D_1KI": 0.2673483355114113} diff --git a/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_amazon0312.output b/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_amazon0312.output new file mode 100644 index 0000000..1947bf8 --- /dev/null +++ b/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_amazon0312.output @@ -0,0 +1,93 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/amazon0312.mtx', '-c', '16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "amazon0312", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400727, 400727], "MATRIX_ROWS": 400727, "MATRIX_SIZE": 160582128529, "MATRIX_NNZ": 3200440, "MATRIX_DENSITY": 1.9930237750099465e-05, "TIME_S": 0.5630712509155273} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 10, ..., 3200428, + 3200438, 3200440]), + col_indices=tensor([ 1, 2, 3, ..., 400724, 6009, + 400707]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(400727, 400727), + nnz=3200440, layout=torch.sparse_csr) +tensor([0.4842, 0.5105, 0.4860, ..., 0.7675, 0.4934, 0.1706]) +Matrix Type: SuiteSparse +Matrix: amazon0312 +Matrix Format: csr +Shape: torch.Size([400727, 400727]) +Rows: 400727 +Size: 160582128529 +NNZ: 3200440 +Density: 1.9930237750099465e-05 +Time: 0.5630712509155273 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '18647', '-m', 'matrices/389000+_cols/amazon0312.mtx', '-c', '16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "amazon0312", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400727, 400727], "MATRIX_ROWS": 400727, "MATRIX_SIZE": 160582128529, "MATRIX_NNZ": 3200440, "MATRIX_DENSITY": 1.9930237750099465e-05, "TIME_S": 9.59645700454712} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 10, ..., 3200428, + 3200438, 3200440]), + col_indices=tensor([ 1, 2, 3, ..., 400724, 6009, + 400707]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(400727, 400727), + nnz=3200440, layout=torch.sparse_csr) +tensor([0.6749, 0.4854, 0.2428, ..., 0.8655, 0.6324, 0.8376]) +Matrix Type: SuiteSparse +Matrix: amazon0312 +Matrix Format: csr +Shape: torch.Size([400727, 400727]) +Rows: 400727 +Size: 160582128529 +NNZ: 3200440 +Density: 1.9930237750099465e-05 +Time: 9.59645700454712 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '20402', '-m', 'matrices/389000+_cols/amazon0312.mtx', '-c', '16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "amazon0312", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400727, 400727], "MATRIX_ROWS": 400727, "MATRIX_SIZE": 160582128529, "MATRIX_NNZ": 3200440, "MATRIX_DENSITY": 1.9930237750099465e-05, "TIME_S": 10.640971422195435} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 10, ..., 3200428, + 3200438, 3200440]), + col_indices=tensor([ 1, 2, 3, ..., 400724, 6009, + 400707]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(400727, 400727), + nnz=3200440, layout=torch.sparse_csr) +tensor([0.0724, 0.4329, 0.4595, ..., 0.8349, 0.8167, 0.6766]) +Matrix Type: SuiteSparse +Matrix: amazon0312 +Matrix Format: csr +Shape: torch.Size([400727, 400727]) +Rows: 400727 +Size: 160582128529 +NNZ: 3200440 +Density: 1.9930237750099465e-05 +Time: 10.640971422195435 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 10, ..., 3200428, + 3200438, 3200440]), + col_indices=tensor([ 1, 2, 3, ..., 400724, 6009, + 400707]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(400727, 400727), + nnz=3200440, layout=torch.sparse_csr) +tensor([0.0724, 0.4329, 0.4595, ..., 0.8349, 0.8167, 0.6766]) +Matrix Type: SuiteSparse +Matrix: amazon0312 +Matrix Format: csr +Shape: torch.Size([400727, 400727]) +Rows: 400727 +Size: 160582128529 +NNZ: 3200440 +Density: 1.9930237750099465e-05 +Time: 10.640971422195435 seconds + +[41.07, 39.13, 39.76, 39.13, 39.16, 39.59, 39.67, 39.13, 39.06, 38.95] +[146.96] +13.517738103866577 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 20402, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'amazon0312', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [400727, 400727], 'MATRIX_ROWS': 400727, 'MATRIX_SIZE': 160582128529, 'MATRIX_NNZ': 3200440, 'MATRIX_DENSITY': 1.9930237750099465e-05, 'TIME_S': 10.640971422195435, 'TIME_S_1KI': 0.5215651123515065, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1986.5667917442322, 'W': 146.96} +[41.07, 39.13, 39.76, 39.13, 39.16, 39.59, 39.67, 39.13, 39.06, 38.95, 39.85, 39.43, 39.2, 39.39, 39.57, 39.13, 39.11, 38.95, 44.59, 39.27] +713.5699999999999 +35.6785 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 20402, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'amazon0312', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [400727, 400727], 'MATRIX_ROWS': 400727, 'MATRIX_SIZE': 160582128529, 'MATRIX_NNZ': 3200440, 'MATRIX_DENSITY': 1.9930237750099465e-05, 'TIME_S': 10.640971422195435, 'TIME_S_1KI': 0.5215651123515065, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1986.5667917442322, 'W': 146.96, 'J_1KI': 97.3711788914926, 'W_1KI': 7.203215371042055, 'W_D': 111.28150000000001, 'J_D': 1504.2741728054286, 'W_D_1KI': 5.454440741103813, 'J_D_1KI': 0.2673483355114113} diff --git a/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_darcy003.json b/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_darcy003.json new file mode 100644 index 0000000..d5b5a92 --- /dev/null +++ b/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_darcy003.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 25477, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "darcy003", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 10.081330299377441, "TIME_S_1KI": 0.39570319501422624, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1725.3062429666518, "W": 138.54, "J_1KI": 67.72014927058333, "W_1KI": 5.4378459002237305, "W_D": 102.46124999999999, "J_D": 1275.9999587640166, "W_D_1KI": 4.021715665109706, "J_D_1KI": 0.1578567203795465} diff --git a/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_darcy003.output b/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_darcy003.output new file mode 100644 index 0000000..66066f0 --- /dev/null +++ b/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_darcy003.output @@ -0,0 +1,71 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/darcy003.mtx', '-c', '16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "darcy003", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 0.41213083267211914} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.5658, 0.2599, 0.8647, ..., 0.0110, 0.8951, 0.0945]) +Matrix Type: SuiteSparse +Matrix: darcy003 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 0.41213083267211914 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '25477', '-m', 'matrices/389000+_cols/darcy003.mtx', '-c', '16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "darcy003", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 10.081330299377441} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.2219, 0.1747, 0.4109, ..., 0.8392, 0.1445, 0.6192]) +Matrix Type: SuiteSparse +Matrix: darcy003 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 10.081330299377441 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.2219, 0.1747, 0.4109, ..., 0.8392, 0.1445, 0.6192]) +Matrix Type: SuiteSparse +Matrix: darcy003 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 10.081330299377441 seconds + +[40.52, 39.06, 39.46, 40.78, 43.93, 39.17, 39.42, 39.04, 39.35, 39.02] +[138.54] +12.453488111495972 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 25477, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'darcy003', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [389874, 389874], 'MATRIX_ROWS': 389874, 'MATRIX_SIZE': 152001735876, 'MATRIX_NNZ': 2101242, 'MATRIX_DENSITY': 1.3823802655215408e-05, 'TIME_S': 10.081330299377441, 'TIME_S_1KI': 0.39570319501422624, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1725.3062429666518, 'W': 138.54} +[40.52, 39.06, 39.46, 40.78, 43.93, 39.17, 39.42, 39.04, 39.35, 39.02, 39.79, 46.26, 39.61, 38.94, 39.43, 40.38, 39.06, 38.95, 39.02, 40.1] +721.575 +36.07875 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 25477, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'darcy003', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [389874, 389874], 'MATRIX_ROWS': 389874, 'MATRIX_SIZE': 152001735876, 'MATRIX_NNZ': 2101242, 'MATRIX_DENSITY': 1.3823802655215408e-05, 'TIME_S': 10.081330299377441, 'TIME_S_1KI': 0.39570319501422624, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1725.3062429666518, 'W': 138.54, 'J_1KI': 67.72014927058333, 'W_1KI': 5.4378459002237305, 'W_D': 102.46124999999999, 'J_D': 1275.9999587640166, 'W_D_1KI': 4.021715665109706, 'J_D_1KI': 0.1578567203795465} diff --git a/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_helm2d03.json b/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_helm2d03.json new file mode 100644 index 0000000..0316f37 --- /dev/null +++ b/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_helm2d03.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 30984, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "helm2d03", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392257, 392257], "MATRIX_ROWS": 392257, "MATRIX_SIZE": 153865554049, "MATRIX_NNZ": 2741935, "MATRIX_DENSITY": 1.7820330332848923e-05, "TIME_S": 10.317180871963501, "TIME_S_1KI": 0.33298414897894074, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1916.101525504589, "W": 149.37, "J_1KI": 61.84164489751449, "W_1KI": 4.820875290472502, "W_D": 113.45200000000001, "J_D": 1455.3494695825577, "W_D_1KI": 3.661631809966435, "J_D_1KI": 0.11817815033457382} diff --git a/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_helm2d03.output b/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_helm2d03.output new file mode 100644 index 0000000..257335e --- /dev/null +++ b/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_helm2d03.output @@ -0,0 +1,97 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/helm2d03.mtx', '-c', '16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "helm2d03", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392257, 392257], "MATRIX_ROWS": 392257, "MATRIX_SIZE": 153865554049, "MATRIX_NNZ": 2741935, "MATRIX_DENSITY": 1.7820330332848923e-05, "TIME_S": 0.39348506927490234} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 14, ..., 2741921, + 2741928, 2741935]), + col_indices=tensor([ 0, 98273, 133833, ..., 392252, 392254, + 392256]), + values=tensor([ 3.4808, -0.6217, -0.5806, ..., -0.6940, -0.7602, + 3.5476]), size=(392257, 392257), nnz=2741935, + layout=torch.sparse_csr) +tensor([0.1720, 0.8662, 0.8556, ..., 0.0402, 0.8663, 0.3929]) +Matrix Type: SuiteSparse +Matrix: helm2d03 +Matrix Format: csr +Shape: torch.Size([392257, 392257]) +Rows: 392257 +Size: 153865554049 +NNZ: 2741935 +Density: 1.7820330332848923e-05 +Time: 0.39348506927490234 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '26684', '-m', 'matrices/389000+_cols/helm2d03.mtx', '-c', '16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "helm2d03", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392257, 392257], "MATRIX_ROWS": 392257, "MATRIX_SIZE": 153865554049, "MATRIX_NNZ": 2741935, "MATRIX_DENSITY": 1.7820330332848923e-05, "TIME_S": 9.042525291442871} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 14, ..., 2741921, + 2741928, 2741935]), + col_indices=tensor([ 0, 98273, 133833, ..., 392252, 392254, + 392256]), + values=tensor([ 3.4808, -0.6217, -0.5806, ..., -0.6940, -0.7602, + 3.5476]), size=(392257, 392257), nnz=2741935, + layout=torch.sparse_csr) +tensor([0.7375, 0.5933, 0.9050, ..., 0.8578, 0.6740, 0.2052]) +Matrix Type: SuiteSparse +Matrix: helm2d03 +Matrix Format: csr +Shape: torch.Size([392257, 392257]) +Rows: 392257 +Size: 153865554049 +NNZ: 2741935 +Density: 1.7820330332848923e-05 +Time: 9.042525291442871 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '30984', '-m', 'matrices/389000+_cols/helm2d03.mtx', '-c', '16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "helm2d03", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392257, 392257], "MATRIX_ROWS": 392257, "MATRIX_SIZE": 153865554049, "MATRIX_NNZ": 2741935, "MATRIX_DENSITY": 1.7820330332848923e-05, "TIME_S": 10.317180871963501} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 14, ..., 2741921, + 2741928, 2741935]), + col_indices=tensor([ 0, 98273, 133833, ..., 392252, 392254, + 392256]), + values=tensor([ 3.4808, -0.6217, -0.5806, ..., -0.6940, -0.7602, + 3.5476]), size=(392257, 392257), nnz=2741935, + layout=torch.sparse_csr) +tensor([0.0633, 0.8834, 0.2857, ..., 0.1984, 0.6858, 0.2922]) +Matrix Type: SuiteSparse +Matrix: helm2d03 +Matrix Format: csr +Shape: torch.Size([392257, 392257]) +Rows: 392257 +Size: 153865554049 +NNZ: 2741935 +Density: 1.7820330332848923e-05 +Time: 10.317180871963501 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 14, ..., 2741921, + 2741928, 2741935]), + col_indices=tensor([ 0, 98273, 133833, ..., 392252, 392254, + 392256]), + values=tensor([ 3.4808, -0.6217, -0.5806, ..., -0.6940, -0.7602, + 3.5476]), size=(392257, 392257), nnz=2741935, + layout=torch.sparse_csr) +tensor([0.0633, 0.8834, 0.2857, ..., 0.1984, 0.6858, 0.2922]) +Matrix Type: SuiteSparse +Matrix: helm2d03 +Matrix Format: csr +Shape: torch.Size([392257, 392257]) +Rows: 392257 +Size: 153865554049 +NNZ: 2741935 +Density: 1.7820330332848923e-05 +Time: 10.317180871963501 seconds + +[40.3, 39.14, 39.07, 38.97, 39.41, 40.63, 44.46, 38.96, 39.02, 39.22] +[149.37] +12.827887296676636 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 30984, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'helm2d03', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [392257, 392257], 'MATRIX_ROWS': 392257, 'MATRIX_SIZE': 153865554049, 'MATRIX_NNZ': 2741935, 'MATRIX_DENSITY': 1.7820330332848923e-05, 'TIME_S': 10.317180871963501, 'TIME_S_1KI': 0.33298414897894074, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1916.101525504589, 'W': 149.37} +[40.3, 39.14, 39.07, 38.97, 39.41, 40.63, 44.46, 38.96, 39.02, 39.22, 40.56, 39.23, 39.36, 44.65, 39.73, 38.95, 39.17, 38.94, 39.11, 39.04] +718.3599999999999 +35.91799999999999 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 30984, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'helm2d03', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [392257, 392257], 'MATRIX_ROWS': 392257, 'MATRIX_SIZE': 153865554049, 'MATRIX_NNZ': 2741935, 'MATRIX_DENSITY': 1.7820330332848923e-05, 'TIME_S': 10.317180871963501, 'TIME_S_1KI': 0.33298414897894074, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1916.101525504589, 'W': 149.37, 'J_1KI': 61.84164489751449, 'W_1KI': 4.820875290472502, 'W_D': 113.45200000000001, 'J_D': 1455.3494695825577, 'W_D_1KI': 3.661631809966435, 'J_D_1KI': 0.11817815033457382} diff --git a/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_language.json b/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_language.json new file mode 100644 index 0000000..26cda3b --- /dev/null +++ b/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_language.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 30366, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "language", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [399130, 399130], "MATRIX_ROWS": 399130, "MATRIX_SIZE": 159304756900, "MATRIX_NNZ": 1216334, "MATRIX_DENSITY": 7.635264782228233e-06, "TIME_S": 10.31269907951355, "TIME_S_1KI": 0.33961335307625473, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1775.0306821584702, "W": 137.59, "J_1KI": 58.454543968862225, "W_1KI": 4.531054468813805, "W_D": 102.3395, "J_D": 1320.2685696399212, "W_D_1KI": 3.370200223934664, "J_D_1KI": 0.11098597852646591} diff --git a/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_language.output b/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_language.output new file mode 100644 index 0000000..a5b64fc --- /dev/null +++ b/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_language.output @@ -0,0 +1,93 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/language.mtx', '-c', '16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "language", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [399130, 399130], "MATRIX_ROWS": 399130, "MATRIX_SIZE": 159304756900, "MATRIX_NNZ": 1216334, "MATRIX_DENSITY": 7.635264782228233e-06, "TIME_S": 0.3785414695739746} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 3, ..., 1216330, + 1216332, 1216334]), + col_indices=tensor([ 0, 0, 1, ..., 399128, 399125, + 399129]), + values=tensor([ 1., -1., 1., ..., 1., -1., 1.]), + size=(399130, 399130), nnz=1216334, layout=torch.sparse_csr) +tensor([0.2846, 0.0684, 0.3415, ..., 0.3157, 0.0663, 0.9624]) +Matrix Type: SuiteSparse +Matrix: language +Matrix Format: csr +Shape: torch.Size([399130, 399130]) +Rows: 399130 +Size: 159304756900 +NNZ: 1216334 +Density: 7.635264782228233e-06 +Time: 0.3785414695739746 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '27738', '-m', 'matrices/389000+_cols/language.mtx', '-c', '16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "language", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [399130, 399130], "MATRIX_ROWS": 399130, "MATRIX_SIZE": 159304756900, "MATRIX_NNZ": 1216334, "MATRIX_DENSITY": 7.635264782228233e-06, "TIME_S": 9.591154098510742} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 3, ..., 1216330, + 1216332, 1216334]), + col_indices=tensor([ 0, 0, 1, ..., 399128, 399125, + 399129]), + values=tensor([ 1., -1., 1., ..., 1., -1., 1.]), + size=(399130, 399130), nnz=1216334, layout=torch.sparse_csr) +tensor([0.1194, 0.5235, 0.5697, ..., 0.7322, 0.5132, 0.4627]) +Matrix Type: SuiteSparse +Matrix: language +Matrix Format: csr +Shape: torch.Size([399130, 399130]) +Rows: 399130 +Size: 159304756900 +NNZ: 1216334 +Density: 7.635264782228233e-06 +Time: 9.591154098510742 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '30366', '-m', 'matrices/389000+_cols/language.mtx', '-c', '16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "language", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [399130, 399130], "MATRIX_ROWS": 399130, "MATRIX_SIZE": 159304756900, "MATRIX_NNZ": 1216334, "MATRIX_DENSITY": 7.635264782228233e-06, "TIME_S": 10.31269907951355} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 3, ..., 1216330, + 1216332, 1216334]), + col_indices=tensor([ 0, 0, 1, ..., 399128, 399125, + 399129]), + values=tensor([ 1., -1., 1., ..., 1., -1., 1.]), + size=(399130, 399130), nnz=1216334, layout=torch.sparse_csr) +tensor([0.4535, 0.4285, 0.5680, ..., 0.4151, 0.8586, 0.5793]) +Matrix Type: SuiteSparse +Matrix: language +Matrix Format: csr +Shape: torch.Size([399130, 399130]) +Rows: 399130 +Size: 159304756900 +NNZ: 1216334 +Density: 7.635264782228233e-06 +Time: 10.31269907951355 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 3, ..., 1216330, + 1216332, 1216334]), + col_indices=tensor([ 0, 0, 1, ..., 399128, 399125, + 399129]), + values=tensor([ 1., -1., 1., ..., 1., -1., 1.]), + size=(399130, 399130), nnz=1216334, layout=torch.sparse_csr) +tensor([0.4535, 0.4285, 0.5680, ..., 0.4151, 0.8586, 0.5793]) +Matrix Type: SuiteSparse +Matrix: language +Matrix Format: csr +Shape: torch.Size([399130, 399130]) +Rows: 399130 +Size: 159304756900 +NNZ: 1216334 +Density: 7.635264782228233e-06 +Time: 10.31269907951355 seconds + +[40.67, 39.06, 39.61, 38.65, 39.12, 40.0, 38.74, 39.55, 38.64, 38.57] +[137.59] +12.900869846343994 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 30366, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'language', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [399130, 399130], 'MATRIX_ROWS': 399130, 'MATRIX_SIZE': 159304756900, 'MATRIX_NNZ': 1216334, 'MATRIX_DENSITY': 7.635264782228233e-06, 'TIME_S': 10.31269907951355, 'TIME_S_1KI': 0.33961335307625473, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1775.0306821584702, 'W': 137.59} +[40.67, 39.06, 39.61, 38.65, 39.12, 40.0, 38.74, 39.55, 38.64, 38.57, 39.44, 38.8, 39.85, 39.35, 38.9, 38.8, 38.74, 39.19, 39.29, 38.76] +705.01 +35.2505 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 30366, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'language', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [399130, 399130], 'MATRIX_ROWS': 399130, 'MATRIX_SIZE': 159304756900, 'MATRIX_NNZ': 1216334, 'MATRIX_DENSITY': 7.635264782228233e-06, 'TIME_S': 10.31269907951355, 'TIME_S_1KI': 0.33961335307625473, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1775.0306821584702, 'W': 137.59, 'J_1KI': 58.454543968862225, 'W_1KI': 4.531054468813805, 'W_D': 102.3395, 'J_D': 1320.2685696399212, 'W_D_1KI': 3.370200223934664, 'J_D_1KI': 0.11098597852646591} diff --git a/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_marine1.json b/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_marine1.json new file mode 100644 index 0000000..c6e3e05 --- /dev/null +++ b/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_marine1.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 19495, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "marine1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400320, 400320], "MATRIX_ROWS": 400320, "MATRIX_SIZE": 160256102400, "MATRIX_NNZ": 6226538, "MATRIX_DENSITY": 3.885367175883594e-05, "TIME_S": 10.71416425704956, "TIME_S_1KI": 0.5495852401666869, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2110.003728497028, "W": 158.07, "J_1KI": 108.23307147971418, "W_1KI": 8.108232880225698, "W_D": 122.3625, "J_D": 1633.3607340306044, "W_D_1KI": 6.276609387022313, "J_D_1KI": 0.32195995829814383} diff --git a/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_marine1.output b/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_marine1.output new file mode 100644 index 0000000..d7a731b --- /dev/null +++ b/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_marine1.output @@ -0,0 +1,97 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/marine1.mtx', '-c', '16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "marine1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400320, 400320], "MATRIX_ROWS": 400320, "MATRIX_SIZE": 160256102400, "MATRIX_NNZ": 6226538, "MATRIX_DENSITY": 3.885367175883594e-05, "TIME_S": 0.6046795845031738} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 18, ..., 6226522, + 6226531, 6226538]), + col_indices=tensor([ 0, 1, 10383, ..., 400315, 400318, + 400319]), + values=tensor([ 6.2373e+03, -1.8964e+00, -5.7529e+00, ..., + -6.8099e-01, -6.4187e-01, 1.7595e+01]), + size=(400320, 400320), nnz=6226538, layout=torch.sparse_csr) +tensor([0.7423, 0.4233, 0.1707, ..., 0.4030, 0.8937, 0.1151]) +Matrix Type: SuiteSparse +Matrix: marine1 +Matrix Format: csr +Shape: torch.Size([400320, 400320]) +Rows: 400320 +Size: 160256102400 +NNZ: 6226538 +Density: 3.885367175883594e-05 +Time: 0.6046795845031738 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '17364', '-m', 'matrices/389000+_cols/marine1.mtx', '-c', '16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "marine1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400320, 400320], "MATRIX_ROWS": 400320, "MATRIX_SIZE": 160256102400, "MATRIX_NNZ": 6226538, "MATRIX_DENSITY": 3.885367175883594e-05, "TIME_S": 9.352032661437988} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 18, ..., 6226522, + 6226531, 6226538]), + col_indices=tensor([ 0, 1, 10383, ..., 400315, 400318, + 400319]), + values=tensor([ 6.2373e+03, -1.8964e+00, -5.7529e+00, ..., + -6.8099e-01, -6.4187e-01, 1.7595e+01]), + size=(400320, 400320), nnz=6226538, layout=torch.sparse_csr) +tensor([0.3036, 0.7996, 0.4739, ..., 0.0238, 0.6033, 0.9918]) +Matrix Type: SuiteSparse +Matrix: marine1 +Matrix Format: csr +Shape: torch.Size([400320, 400320]) +Rows: 400320 +Size: 160256102400 +NNZ: 6226538 +Density: 3.885367175883594e-05 +Time: 9.352032661437988 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '19495', '-m', 'matrices/389000+_cols/marine1.mtx', '-c', '16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "marine1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400320, 400320], "MATRIX_ROWS": 400320, "MATRIX_SIZE": 160256102400, "MATRIX_NNZ": 6226538, "MATRIX_DENSITY": 3.885367175883594e-05, "TIME_S": 10.71416425704956} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 18, ..., 6226522, + 6226531, 6226538]), + col_indices=tensor([ 0, 1, 10383, ..., 400315, 400318, + 400319]), + values=tensor([ 6.2373e+03, -1.8964e+00, -5.7529e+00, ..., + -6.8099e-01, -6.4187e-01, 1.7595e+01]), + size=(400320, 400320), nnz=6226538, layout=torch.sparse_csr) +tensor([0.1117, 0.6424, 0.8924, ..., 0.5333, 0.0312, 0.4242]) +Matrix Type: SuiteSparse +Matrix: marine1 +Matrix Format: csr +Shape: torch.Size([400320, 400320]) +Rows: 400320 +Size: 160256102400 +NNZ: 6226538 +Density: 3.885367175883594e-05 +Time: 10.71416425704956 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 18, ..., 6226522, + 6226531, 6226538]), + col_indices=tensor([ 0, 1, 10383, ..., 400315, 400318, + 400319]), + values=tensor([ 6.2373e+03, -1.8964e+00, -5.7529e+00, ..., + -6.8099e-01, -6.4187e-01, 1.7595e+01]), + size=(400320, 400320), nnz=6226538, layout=torch.sparse_csr) +tensor([0.1117, 0.6424, 0.8924, ..., 0.5333, 0.0312, 0.4242]) +Matrix Type: SuiteSparse +Matrix: marine1 +Matrix Format: csr +Shape: torch.Size([400320, 400320]) +Rows: 400320 +Size: 160256102400 +NNZ: 6226538 +Density: 3.885367175883594e-05 +Time: 10.71416425704956 seconds + +[41.06, 39.65, 39.88, 39.81, 39.59, 39.74, 39.82, 39.26, 40.0, 39.37] +[158.07] +13.34854006767273 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 19495, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'marine1', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [400320, 400320], 'MATRIX_ROWS': 400320, 'MATRIX_SIZE': 160256102400, 'MATRIX_NNZ': 6226538, 'MATRIX_DENSITY': 3.885367175883594e-05, 'TIME_S': 10.71416425704956, 'TIME_S_1KI': 0.5495852401666869, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2110.003728497028, 'W': 158.07} +[41.06, 39.65, 39.88, 39.81, 39.59, 39.74, 39.82, 39.26, 40.0, 39.37, 41.26, 39.31, 39.83, 39.61, 39.66, 39.59, 39.58, 39.11, 39.31, 39.11] +714.15 +35.707499999999996 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 19495, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'marine1', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [400320, 400320], 'MATRIX_ROWS': 400320, 'MATRIX_SIZE': 160256102400, 'MATRIX_NNZ': 6226538, 'MATRIX_DENSITY': 3.885367175883594e-05, 'TIME_S': 10.71416425704956, 'TIME_S_1KI': 0.5495852401666869, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2110.003728497028, 'W': 158.07, 'J_1KI': 108.23307147971418, 'W_1KI': 8.108232880225698, 'W_D': 122.3625, 'J_D': 1633.3607340306044, 'W_D_1KI': 6.276609387022313, 'J_D_1KI': 0.32195995829814383} diff --git a/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_mario002.json b/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_mario002.json new file mode 100644 index 0000000..aeda75b --- /dev/null +++ b/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_mario002.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 23986, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "mario002", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 10.204793214797974, "TIME_S_1KI": 0.4254478952221285, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1608.246218369007, "W": 138.89, "J_1KI": 67.04937123192725, "W_1KI": 5.79046110230968, "W_D": 103.45274999999998, "J_D": 1197.9083732981082, "W_D_1KI": 4.313047194196614, "J_D_1KI": 0.1798151919534985} diff --git a/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_mario002.output b/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_mario002.output new file mode 100644 index 0000000..6816aec --- /dev/null +++ b/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_mario002.output @@ -0,0 +1,71 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/mario002.mtx', '-c', '16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "mario002", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 0.43773889541625977} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.9846, 0.2787, 0.2893, ..., 0.3452, 0.1271, 0.5089]) +Matrix Type: SuiteSparse +Matrix: mario002 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 0.43773889541625977 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '23986', '-m', 'matrices/389000+_cols/mario002.mtx', '-c', '16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "mario002", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 10.204793214797974} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.2099, 0.5726, 0.9552, ..., 0.7541, 0.8652, 0.1203]) +Matrix Type: SuiteSparse +Matrix: mario002 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 10.204793214797974 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.2099, 0.5726, 0.9552, ..., 0.7541, 0.8652, 0.1203]) +Matrix Type: SuiteSparse +Matrix: mario002 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 10.204793214797974 seconds + +[40.06, 38.94, 39.06, 39.11, 39.73, 38.92, 39.35, 38.88, 39.43, 39.21] +[138.89] +11.579280138015747 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 23986, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'mario002', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [389874, 389874], 'MATRIX_ROWS': 389874, 'MATRIX_SIZE': 152001735876, 'MATRIX_NNZ': 2101242, 'MATRIX_DENSITY': 1.3823802655215408e-05, 'TIME_S': 10.204793214797974, 'TIME_S_1KI': 0.4254478952221285, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1608.246218369007, 'W': 138.89} +[40.06, 38.94, 39.06, 39.11, 39.73, 38.92, 39.35, 38.88, 39.43, 39.21, 41.54, 39.4, 39.48, 39.09, 39.42, 38.94, 39.43, 39.44, 39.0, 41.44] +708.7450000000001 +35.437250000000006 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 23986, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'mario002', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [389874, 389874], 'MATRIX_ROWS': 389874, 'MATRIX_SIZE': 152001735876, 'MATRIX_NNZ': 2101242, 'MATRIX_DENSITY': 1.3823802655215408e-05, 'TIME_S': 10.204793214797974, 'TIME_S_1KI': 0.4254478952221285, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1608.246218369007, 'W': 138.89, 'J_1KI': 67.04937123192725, 'W_1KI': 5.79046110230968, 'W_D': 103.45274999999998, 'J_D': 1197.9083732981082, 'W_D_1KI': 4.313047194196614, 'J_D_1KI': 0.1798151919534985} diff --git a/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_test1.json b/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_test1.json new file mode 100644 index 0000000..ce7af8e --- /dev/null +++ b/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_test1.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 2652, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "test1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392908, 392908], "MATRIX_ROWS": 392908, "MATRIX_SIZE": 154376696464, "MATRIX_NNZ": 12968200, "MATRIX_DENSITY": 8.400361127706946e-05, "TIME_S": 10.818459033966064, "TIME_S_1KI": 4.079358610092784, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1727.0440453481674, "W": 123.26, "J_1KI": 651.2232448522501, "W_1KI": 46.47812971342383, "W_D": 87.892, "J_D": 1231.4891711320877, "W_D_1KI": 33.14177978883861, "J_D_1KI": 12.49690037286524} diff --git a/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_test1.output b/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_test1.output new file mode 100644 index 0000000..4cb795c --- /dev/null +++ b/pytorch/output_389000+_16core/epyc_7313p_16_csr_10_10_10_test1.output @@ -0,0 +1,74 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/test1.mtx', '-c', '16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "test1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392908, 392908], "MATRIX_ROWS": 392908, "MATRIX_SIZE": 154376696464, "MATRIX_NNZ": 12968200, "MATRIX_DENSITY": 8.400361127706946e-05, "TIME_S": 3.9585680961608887} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 24, 48, ..., 12968181, + 12968191, 12968200]), + col_indices=tensor([ 0, 1, 8, ..., 392905, 392906, + 392907]), + values=tensor([1.0000e+00, 0.0000e+00, 0.0000e+00, ..., + 0.0000e+00, 0.0000e+00, 2.1156e-17]), + size=(392908, 392908), nnz=12968200, layout=torch.sparse_csr) +tensor([0.7261, 0.8238, 0.5826, ..., 0.6988, 0.4899, 0.5621]) +Matrix Type: SuiteSparse +Matrix: test1 +Matrix Format: csr +Shape: torch.Size([392908, 392908]) +Rows: 392908 +Size: 154376696464 +NNZ: 12968200 +Density: 8.400361127706946e-05 +Time: 3.9585680961608887 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '2652', '-m', 'matrices/389000+_cols/test1.mtx', '-c', '16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "test1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392908, 392908], "MATRIX_ROWS": 392908, "MATRIX_SIZE": 154376696464, "MATRIX_NNZ": 12968200, "MATRIX_DENSITY": 8.400361127706946e-05, "TIME_S": 10.818459033966064} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 24, 48, ..., 12968181, + 12968191, 12968200]), + col_indices=tensor([ 0, 1, 8, ..., 392905, 392906, + 392907]), + values=tensor([1.0000e+00, 0.0000e+00, 0.0000e+00, ..., + 0.0000e+00, 0.0000e+00, 2.1156e-17]), + size=(392908, 392908), nnz=12968200, layout=torch.sparse_csr) +tensor([0.8039, 0.1348, 0.9933, ..., 0.2390, 0.5536, 0.8375]) +Matrix Type: SuiteSparse +Matrix: test1 +Matrix Format: csr +Shape: torch.Size([392908, 392908]) +Rows: 392908 +Size: 154376696464 +NNZ: 12968200 +Density: 8.400361127706946e-05 +Time: 10.818459033966064 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 24, 48, ..., 12968181, + 12968191, 12968200]), + col_indices=tensor([ 0, 1, 8, ..., 392905, 392906, + 392907]), + values=tensor([1.0000e+00, 0.0000e+00, 0.0000e+00, ..., + 0.0000e+00, 0.0000e+00, 2.1156e-17]), + size=(392908, 392908), nnz=12968200, layout=torch.sparse_csr) +tensor([0.8039, 0.1348, 0.9933, ..., 0.2390, 0.5536, 0.8375]) +Matrix Type: SuiteSparse +Matrix: test1 +Matrix Format: csr +Shape: torch.Size([392908, 392908]) +Rows: 392908 +Size: 154376696464 +NNZ: 12968200 +Density: 8.400361127706946e-05 +Time: 10.818459033966064 seconds + +[40.14, 39.06, 39.78, 39.45, 39.62, 39.78, 39.17, 39.08, 39.09, 38.93] +[123.26] +14.011390924453735 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 2652, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'test1', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [392908, 392908], 'MATRIX_ROWS': 392908, 'MATRIX_SIZE': 154376696464, 'MATRIX_NNZ': 12968200, 'MATRIX_DENSITY': 8.400361127706946e-05, 'TIME_S': 10.818459033966064, 'TIME_S_1KI': 4.079358610092784, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1727.0440453481674, 'W': 123.26} +[40.14, 39.06, 39.78, 39.45, 39.62, 39.78, 39.17, 39.08, 39.09, 38.93, 39.84, 39.42, 39.65, 39.42, 38.97, 39.03, 39.01, 38.92, 39.07, 38.77] +707.36 +35.368 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 2652, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'test1', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [392908, 392908], 'MATRIX_ROWS': 392908, 'MATRIX_SIZE': 154376696464, 'MATRIX_NNZ': 12968200, 'MATRIX_DENSITY': 8.400361127706946e-05, 'TIME_S': 10.818459033966064, 'TIME_S_1KI': 4.079358610092784, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1727.0440453481674, 'W': 123.26, 'J_1KI': 651.2232448522501, 'W_1KI': 46.47812971342383, 'W_D': 87.892, 'J_D': 1231.4891711320877, 'W_D_1KI': 33.14177978883861, 'J_D_1KI': 12.49690037286524} diff --git a/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_amazon0312.json b/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_amazon0312.json new file mode 100644 index 0000000..a5b23aa --- /dev/null +++ b/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_amazon0312.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 8118, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "amazon0312", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400727, 400727], "MATRIX_ROWS": 400727, "MATRIX_SIZE": 160582128529, "MATRIX_NNZ": 3200440, "MATRIX_DENSITY": 1.9930237750099465e-05, "TIME_S": 10.907745361328125, "TIME_S_1KI": 1.3436493423661153, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1301.0154501628876, "W": 88.97, "J_1KI": 160.2630512642138, "W_1KI": 10.959595959595958, "W_D": 72.9665, "J_D": 1066.9949853243827, "W_D_1KI": 8.988236018723823, "J_D_1KI": 1.1071983270169774} diff --git a/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_amazon0312.output b/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_amazon0312.output new file mode 100644 index 0000000..cece27c --- /dev/null +++ b/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_amazon0312.output @@ -0,0 +1,71 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/amazon0312.mtx', '-c', '16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "amazon0312", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400727, 400727], "MATRIX_ROWS": 400727, "MATRIX_SIZE": 160582128529, "MATRIX_NNZ": 3200440, "MATRIX_DENSITY": 1.9930237750099465e-05, "TIME_S": 1.293351411819458} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 10, ..., 3200428, + 3200438, 3200440]), + col_indices=tensor([ 1, 2, 3, ..., 400724, 6009, + 400707]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(400727, 400727), + nnz=3200440, layout=torch.sparse_csr) +tensor([0.2979, 0.5597, 0.8769, ..., 0.0942, 0.8424, 0.4292]) +Matrix Type: SuiteSparse +Matrix: amazon0312 +Matrix Format: csr +Shape: torch.Size([400727, 400727]) +Rows: 400727 +Size: 160582128529 +NNZ: 3200440 +Density: 1.9930237750099465e-05 +Time: 1.293351411819458 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '8118', '-m', 'matrices/389000+_cols/amazon0312.mtx', '-c', '16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "amazon0312", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400727, 400727], "MATRIX_ROWS": 400727, "MATRIX_SIZE": 160582128529, "MATRIX_NNZ": 3200440, "MATRIX_DENSITY": 1.9930237750099465e-05, "TIME_S": 10.907745361328125} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 10, ..., 3200428, + 3200438, 3200440]), + col_indices=tensor([ 1, 2, 3, ..., 400724, 6009, + 400707]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(400727, 400727), + nnz=3200440, layout=torch.sparse_csr) +tensor([0.6448, 0.4532, 0.0841, ..., 0.5791, 0.6160, 0.9399]) +Matrix Type: SuiteSparse +Matrix: amazon0312 +Matrix Format: csr +Shape: torch.Size([400727, 400727]) +Rows: 400727 +Size: 160582128529 +NNZ: 3200440 +Density: 1.9930237750099465e-05 +Time: 10.907745361328125 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 10, ..., 3200428, + 3200438, 3200440]), + col_indices=tensor([ 1, 2, 3, ..., 400724, 6009, + 400707]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(400727, 400727), + nnz=3200440, layout=torch.sparse_csr) +tensor([0.6448, 0.4532, 0.0841, ..., 0.5791, 0.6160, 0.9399]) +Matrix Type: SuiteSparse +Matrix: amazon0312 +Matrix Format: csr +Shape: torch.Size([400727, 400727]) +Rows: 400727 +Size: 160582128529 +NNZ: 3200440 +Density: 1.9930237750099465e-05 +Time: 10.907745361328125 seconds + +[18.52, 17.54, 17.79, 17.99, 17.73, 17.74, 17.74, 17.7, 17.79, 17.98] +[88.97] +14.623080253601074 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8118, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'amazon0312', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [400727, 400727], 'MATRIX_ROWS': 400727, 'MATRIX_SIZE': 160582128529, 'MATRIX_NNZ': 3200440, 'MATRIX_DENSITY': 1.9930237750099465e-05, 'TIME_S': 10.907745361328125, 'TIME_S_1KI': 1.3436493423661153, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1301.0154501628876, 'W': 88.97} +[18.52, 17.54, 17.79, 17.99, 17.73, 17.74, 17.74, 17.7, 17.79, 17.98, 18.52, 17.6, 17.66, 17.43, 18.0, 17.58, 17.75, 17.82, 17.89, 17.62] +320.06999999999994 +16.003499999999995 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8118, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'amazon0312', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [400727, 400727], 'MATRIX_ROWS': 400727, 'MATRIX_SIZE': 160582128529, 'MATRIX_NNZ': 3200440, 'MATRIX_DENSITY': 1.9930237750099465e-05, 'TIME_S': 10.907745361328125, 'TIME_S_1KI': 1.3436493423661153, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1301.0154501628876, 'W': 88.97, 'J_1KI': 160.2630512642138, 'W_1KI': 10.959595959595958, 'W_D': 72.9665, 'J_D': 1066.9949853243827, 'W_D_1KI': 8.988236018723823, 'J_D_1KI': 1.1071983270169774} diff --git a/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_darcy003.json b/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_darcy003.json new file mode 100644 index 0000000..a5c7de7 --- /dev/null +++ b/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_darcy003.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 13615, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "darcy003", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 10.780885696411133, "TIME_S_1KI": 0.7918388319068037, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1298.2887223243713, "W": 89.8, "J_1KI": 95.35723263491526, "W_1KI": 6.595666544252662, "W_D": 73.5375, "J_D": 1063.172682827711, "W_D_1KI": 5.401211898641204, "J_D_1KI": 0.3967103855043117} diff --git a/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_darcy003.output b/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_darcy003.output new file mode 100644 index 0000000..7488c34 --- /dev/null +++ b/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_darcy003.output @@ -0,0 +1,71 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/darcy003.mtx', '-c', '16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "darcy003", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 0.7711856365203857} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.3467, 0.9628, 0.5083, ..., 0.1832, 0.8742, 0.8835]) +Matrix Type: SuiteSparse +Matrix: darcy003 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 0.7711856365203857 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '13615', '-m', 'matrices/389000+_cols/darcy003.mtx', '-c', '16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "darcy003", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 10.780885696411133} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.0947, 0.1039, 0.1767, ..., 0.1078, 0.6970, 0.7249]) +Matrix Type: SuiteSparse +Matrix: darcy003 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 10.780885696411133 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.0947, 0.1039, 0.1767, ..., 0.1078, 0.6970, 0.7249]) +Matrix Type: SuiteSparse +Matrix: darcy003 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 10.780885696411133 seconds + +[19.22, 17.93, 18.06, 17.51, 17.76, 17.39, 17.9, 17.91, 17.46, 17.78] +[89.8] +14.457558155059814 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 13615, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'darcy003', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [389874, 389874], 'MATRIX_ROWS': 389874, 'MATRIX_SIZE': 152001735876, 'MATRIX_NNZ': 2101242, 'MATRIX_DENSITY': 1.3823802655215408e-05, 'TIME_S': 10.780885696411133, 'TIME_S_1KI': 0.7918388319068037, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1298.2887223243713, 'W': 89.8} +[19.22, 17.93, 18.06, 17.51, 17.76, 17.39, 17.9, 17.91, 17.46, 17.78, 18.08, 17.65, 17.61, 17.86, 17.77, 17.72, 21.98, 18.52, 17.82, 17.72] +325.25 +16.2625 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 13615, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'darcy003', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [389874, 389874], 'MATRIX_ROWS': 389874, 'MATRIX_SIZE': 152001735876, 'MATRIX_NNZ': 2101242, 'MATRIX_DENSITY': 1.3823802655215408e-05, 'TIME_S': 10.780885696411133, 'TIME_S_1KI': 0.7918388319068037, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1298.2887223243713, 'W': 89.8, 'J_1KI': 95.35723263491526, 'W_1KI': 6.595666544252662, 'W_D': 73.5375, 'J_D': 1063.172682827711, 'W_D_1KI': 5.401211898641204, 'J_D_1KI': 0.3967103855043117} diff --git a/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_helm2d03.json b/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_helm2d03.json new file mode 100644 index 0000000..6b89e52 --- /dev/null +++ b/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_helm2d03.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 12165, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "helm2d03", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392257, 392257], "MATRIX_ROWS": 392257, "MATRIX_SIZE": 153865554049, "MATRIX_NNZ": 2741935, "MATRIX_DENSITY": 1.7820330332848923e-05, "TIME_S": 10.51817512512207, "TIME_S_1KI": 0.8646259864465327, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1289.3463970065116, "W": 89.63, "J_1KI": 105.98819539716494, "W_1KI": 7.3678586107685975, "W_D": 73.37299999999999, "J_D": 1055.4860335552692, "W_D_1KI": 6.031483764899301, "J_D_1KI": 0.49580631030820393} diff --git a/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_helm2d03.output b/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_helm2d03.output new file mode 100644 index 0000000..9f3e3b4 --- /dev/null +++ b/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_helm2d03.output @@ -0,0 +1,74 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/helm2d03.mtx', '-c', '16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "helm2d03", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392257, 392257], "MATRIX_ROWS": 392257, "MATRIX_SIZE": 153865554049, "MATRIX_NNZ": 2741935, "MATRIX_DENSITY": 1.7820330332848923e-05, "TIME_S": 0.8631081581115723} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 14, ..., 2741921, + 2741928, 2741935]), + col_indices=tensor([ 0, 98273, 133833, ..., 392252, 392254, + 392256]), + values=tensor([ 3.4808, -0.6217, -0.5806, ..., -0.6940, -0.7602, + 3.5476]), size=(392257, 392257), nnz=2741935, + layout=torch.sparse_csr) +tensor([0.5077, 0.7328, 0.3933, ..., 0.4074, 0.1030, 0.0500]) +Matrix Type: SuiteSparse +Matrix: helm2d03 +Matrix Format: csr +Shape: torch.Size([392257, 392257]) +Rows: 392257 +Size: 153865554049 +NNZ: 2741935 +Density: 1.7820330332848923e-05 +Time: 0.8631081581115723 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '12165', '-m', 'matrices/389000+_cols/helm2d03.mtx', '-c', '16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "helm2d03", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392257, 392257], "MATRIX_ROWS": 392257, "MATRIX_SIZE": 153865554049, "MATRIX_NNZ": 2741935, "MATRIX_DENSITY": 1.7820330332848923e-05, "TIME_S": 10.51817512512207} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 14, ..., 2741921, + 2741928, 2741935]), + col_indices=tensor([ 0, 98273, 133833, ..., 392252, 392254, + 392256]), + values=tensor([ 3.4808, -0.6217, -0.5806, ..., -0.6940, -0.7602, + 3.5476]), size=(392257, 392257), nnz=2741935, + layout=torch.sparse_csr) +tensor([0.9100, 0.1506, 0.3829, ..., 0.6719, 0.7400, 0.8631]) +Matrix Type: SuiteSparse +Matrix: helm2d03 +Matrix Format: csr +Shape: torch.Size([392257, 392257]) +Rows: 392257 +Size: 153865554049 +NNZ: 2741935 +Density: 1.7820330332848923e-05 +Time: 10.51817512512207 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 14, ..., 2741921, + 2741928, 2741935]), + col_indices=tensor([ 0, 98273, 133833, ..., 392252, 392254, + 392256]), + values=tensor([ 3.4808, -0.6217, -0.5806, ..., -0.6940, -0.7602, + 3.5476]), size=(392257, 392257), nnz=2741935, + layout=torch.sparse_csr) +tensor([0.9100, 0.1506, 0.3829, ..., 0.6719, 0.7400, 0.8631]) +Matrix Type: SuiteSparse +Matrix: helm2d03 +Matrix Format: csr +Shape: torch.Size([392257, 392257]) +Rows: 392257 +Size: 153865554049 +NNZ: 2741935 +Density: 1.7820330332848923e-05 +Time: 10.51817512512207 seconds + +[18.23, 17.69, 18.03, 22.07, 17.84, 17.63, 17.63, 17.56, 17.59, 17.97] +[89.63] +14.385210275650024 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 12165, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'helm2d03', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [392257, 392257], 'MATRIX_ROWS': 392257, 'MATRIX_SIZE': 153865554049, 'MATRIX_NNZ': 2741935, 'MATRIX_DENSITY': 1.7820330332848923e-05, 'TIME_S': 10.51817512512207, 'TIME_S_1KI': 0.8646259864465327, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1289.3463970065116, 'W': 89.63} +[18.23, 17.69, 18.03, 22.07, 17.84, 17.63, 17.63, 17.56, 17.59, 17.97, 18.65, 17.64, 17.75, 18.23, 17.75, 18.05, 17.76, 17.53, 18.1, 17.73] +325.14 +16.256999999999998 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 12165, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'helm2d03', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [392257, 392257], 'MATRIX_ROWS': 392257, 'MATRIX_SIZE': 153865554049, 'MATRIX_NNZ': 2741935, 'MATRIX_DENSITY': 1.7820330332848923e-05, 'TIME_S': 10.51817512512207, 'TIME_S_1KI': 0.8646259864465327, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1289.3463970065116, 'W': 89.63, 'J_1KI': 105.98819539716494, 'W_1KI': 7.3678586107685975, 'W_D': 73.37299999999999, 'J_D': 1055.4860335552692, 'W_D_1KI': 6.031483764899301, 'J_D_1KI': 0.49580631030820393} diff --git a/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_language.json b/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_language.json new file mode 100644 index 0000000..3630ff9 --- /dev/null +++ b/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_language.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 13328, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "language", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [399130, 399130], "MATRIX_ROWS": 399130, "MATRIX_SIZE": 159304756900, "MATRIX_NNZ": 1216334, "MATRIX_DENSITY": 7.635264782228233e-06, "TIME_S": 10.746992826461792, "TIME_S_1KI": 0.8063470007849484, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1285.765242910385, "W": 89.74, "J_1KI": 96.47098161092326, "W_1KI": 6.733193277310924, "W_D": 73.49574999999999, "J_D": 1053.0229646939038, "W_D_1KI": 5.51438700480192, "J_D_1KI": 0.4137445231694118} diff --git a/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_language.output b/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_language.output new file mode 100644 index 0000000..437027f --- /dev/null +++ b/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_language.output @@ -0,0 +1,71 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/language.mtx', '-c', '16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "language", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [399130, 399130], "MATRIX_ROWS": 399130, "MATRIX_SIZE": 159304756900, "MATRIX_NNZ": 1216334, "MATRIX_DENSITY": 7.635264782228233e-06, "TIME_S": 0.7877652645111084} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 3, ..., 1216330, + 1216332, 1216334]), + col_indices=tensor([ 0, 0, 1, ..., 399128, 399125, + 399129]), + values=tensor([ 1., -1., 1., ..., 1., -1., 1.]), + size=(399130, 399130), nnz=1216334, layout=torch.sparse_csr) +tensor([0.0377, 0.4846, 0.1087, ..., 0.8181, 0.0416, 0.1571]) +Matrix Type: SuiteSparse +Matrix: language +Matrix Format: csr +Shape: torch.Size([399130, 399130]) +Rows: 399130 +Size: 159304756900 +NNZ: 1216334 +Density: 7.635264782228233e-06 +Time: 0.7877652645111084 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '13328', '-m', 'matrices/389000+_cols/language.mtx', '-c', '16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "language", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [399130, 399130], "MATRIX_ROWS": 399130, "MATRIX_SIZE": 159304756900, "MATRIX_NNZ": 1216334, "MATRIX_DENSITY": 7.635264782228233e-06, "TIME_S": 10.746992826461792} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 3, ..., 1216330, + 1216332, 1216334]), + col_indices=tensor([ 0, 0, 1, ..., 399128, 399125, + 399129]), + values=tensor([ 1., -1., 1., ..., 1., -1., 1.]), + size=(399130, 399130), nnz=1216334, layout=torch.sparse_csr) +tensor([0.5687, 0.0271, 0.0300, ..., 0.3524, 0.7739, 0.4785]) +Matrix Type: SuiteSparse +Matrix: language +Matrix Format: csr +Shape: torch.Size([399130, 399130]) +Rows: 399130 +Size: 159304756900 +NNZ: 1216334 +Density: 7.635264782228233e-06 +Time: 10.746992826461792 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 3, ..., 1216330, + 1216332, 1216334]), + col_indices=tensor([ 0, 0, 1, ..., 399128, 399125, + 399129]), + values=tensor([ 1., -1., 1., ..., 1., -1., 1.]), + size=(399130, 399130), nnz=1216334, layout=torch.sparse_csr) +tensor([0.5687, 0.0271, 0.0300, ..., 0.3524, 0.7739, 0.4785]) +Matrix Type: SuiteSparse +Matrix: language +Matrix Format: csr +Shape: torch.Size([399130, 399130]) +Rows: 399130 +Size: 159304756900 +NNZ: 1216334 +Density: 7.635264782228233e-06 +Time: 10.746992826461792 seconds + +[18.42, 17.72, 17.91, 18.41, 17.89, 17.65, 19.38, 18.4, 18.2, 17.87] +[89.74] +14.327671527862549 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 13328, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'language', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [399130, 399130], 'MATRIX_ROWS': 399130, 'MATRIX_SIZE': 159304756900, 'MATRIX_NNZ': 1216334, 'MATRIX_DENSITY': 7.635264782228233e-06, 'TIME_S': 10.746992826461792, 'TIME_S_1KI': 0.8063470007849484, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1285.765242910385, 'W': 89.74} +[18.42, 17.72, 17.91, 18.41, 17.89, 17.65, 19.38, 18.4, 18.2, 17.87, 19.34, 17.9, 18.16, 17.6, 17.86, 17.8, 17.9, 17.65, 17.71, 17.86] +324.885 +16.24425 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 13328, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'language', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [399130, 399130], 'MATRIX_ROWS': 399130, 'MATRIX_SIZE': 159304756900, 'MATRIX_NNZ': 1216334, 'MATRIX_DENSITY': 7.635264782228233e-06, 'TIME_S': 10.746992826461792, 'TIME_S_1KI': 0.8063470007849484, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1285.765242910385, 'W': 89.74, 'J_1KI': 96.47098161092326, 'W_1KI': 6.733193277310924, 'W_D': 73.49574999999999, 'J_D': 1053.0229646939038, 'W_D_1KI': 5.51438700480192, 'J_D_1KI': 0.4137445231694118} diff --git a/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_marine1.json b/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_marine1.json new file mode 100644 index 0000000..64c9afa --- /dev/null +++ b/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_marine1.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 5897, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "marine1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400320, 400320], "MATRIX_ROWS": 400320, "MATRIX_SIZE": 160256102400, "MATRIX_NNZ": 6226538, "MATRIX_DENSITY": 3.885367175883594e-05, "TIME_S": 10.55986738204956, "TIME_S_1KI": 1.790718565719783, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1299.1635441350936, "W": 89.05999999999999, "J_1KI": 220.30923251400603, "W_1KI": 15.102594539596403, "W_D": 72.89299999999999, "J_D": 1063.327287476301, "W_D_1KI": 12.361031032728503, "J_D_1KI": 2.096155847503562} diff --git a/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_marine1.output b/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_marine1.output new file mode 100644 index 0000000..17a33fe --- /dev/null +++ b/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_marine1.output @@ -0,0 +1,74 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/marine1.mtx', '-c', '16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "marine1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400320, 400320], "MATRIX_ROWS": 400320, "MATRIX_SIZE": 160256102400, "MATRIX_NNZ": 6226538, "MATRIX_DENSITY": 3.885367175883594e-05, "TIME_S": 1.7802655696868896} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 18, ..., 6226522, + 6226531, 6226538]), + col_indices=tensor([ 0, 1, 10383, ..., 400315, 400318, + 400319]), + values=tensor([ 6.2373e+03, -1.8964e+00, -5.7529e+00, ..., + -6.8099e-01, -6.4187e-01, 1.7595e+01]), + size=(400320, 400320), nnz=6226538, layout=torch.sparse_csr) +tensor([0.5518, 0.1807, 0.4021, ..., 0.3397, 0.2107, 0.4589]) +Matrix Type: SuiteSparse +Matrix: marine1 +Matrix Format: csr +Shape: torch.Size([400320, 400320]) +Rows: 400320 +Size: 160256102400 +NNZ: 6226538 +Density: 3.885367175883594e-05 +Time: 1.7802655696868896 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '5897', '-m', 'matrices/389000+_cols/marine1.mtx', '-c', '16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "marine1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400320, 400320], "MATRIX_ROWS": 400320, "MATRIX_SIZE": 160256102400, "MATRIX_NNZ": 6226538, "MATRIX_DENSITY": 3.885367175883594e-05, "TIME_S": 10.55986738204956} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 18, ..., 6226522, + 6226531, 6226538]), + col_indices=tensor([ 0, 1, 10383, ..., 400315, 400318, + 400319]), + values=tensor([ 6.2373e+03, -1.8964e+00, -5.7529e+00, ..., + -6.8099e-01, -6.4187e-01, 1.7595e+01]), + size=(400320, 400320), nnz=6226538, layout=torch.sparse_csr) +tensor([0.5366, 0.8162, 0.5634, ..., 0.9410, 0.0469, 0.5938]) +Matrix Type: SuiteSparse +Matrix: marine1 +Matrix Format: csr +Shape: torch.Size([400320, 400320]) +Rows: 400320 +Size: 160256102400 +NNZ: 6226538 +Density: 3.885367175883594e-05 +Time: 10.55986738204956 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 18, ..., 6226522, + 6226531, 6226538]), + col_indices=tensor([ 0, 1, 10383, ..., 400315, 400318, + 400319]), + values=tensor([ 6.2373e+03, -1.8964e+00, -5.7529e+00, ..., + -6.8099e-01, -6.4187e-01, 1.7595e+01]), + size=(400320, 400320), nnz=6226538, layout=torch.sparse_csr) +tensor([0.5366, 0.8162, 0.5634, ..., 0.9410, 0.0469, 0.5938]) +Matrix Type: SuiteSparse +Matrix: marine1 +Matrix Format: csr +Shape: torch.Size([400320, 400320]) +Rows: 400320 +Size: 160256102400 +NNZ: 6226538 +Density: 3.885367175883594e-05 +Time: 10.55986738204956 seconds + +[18.26, 17.65, 17.89, 17.69, 17.78, 17.53, 17.59, 17.98, 17.75, 17.71] +[89.06] +14.587508916854858 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 5897, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'marine1', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [400320, 400320], 'MATRIX_ROWS': 400320, 'MATRIX_SIZE': 160256102400, 'MATRIX_NNZ': 6226538, 'MATRIX_DENSITY': 3.885367175883594e-05, 'TIME_S': 10.55986738204956, 'TIME_S_1KI': 1.790718565719783, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1299.1635441350936, 'W': 89.05999999999999} +[18.26, 17.65, 17.89, 17.69, 17.78, 17.53, 17.59, 17.98, 17.75, 17.71, 18.09, 17.83, 17.84, 17.57, 17.77, 17.94, 21.22, 17.67, 17.65, 17.92] +323.34000000000003 +16.167 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 5897, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'marine1', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [400320, 400320], 'MATRIX_ROWS': 400320, 'MATRIX_SIZE': 160256102400, 'MATRIX_NNZ': 6226538, 'MATRIX_DENSITY': 3.885367175883594e-05, 'TIME_S': 10.55986738204956, 'TIME_S_1KI': 1.790718565719783, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1299.1635441350936, 'W': 89.05999999999999, 'J_1KI': 220.30923251400603, 'W_1KI': 15.102594539596403, 'W_D': 72.89299999999999, 'J_D': 1063.327287476301, 'W_D_1KI': 12.361031032728503, 'J_D_1KI': 2.096155847503562} diff --git a/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_mario002.json b/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_mario002.json new file mode 100644 index 0000000..ebecca4 --- /dev/null +++ b/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_mario002.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 13697, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "mario002", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 10.777354717254639, "TIME_S_1KI": 0.7868405283824662, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1316.45538356781, "W": 89.82, "J_1KI": 96.11268040941886, "W_1KI": 6.557640359202745, "W_D": 73.69574999999999, "J_D": 1080.128777928829, "W_D_1KI": 5.3804300211725185, "J_D_1KI": 0.3928181369038854} diff --git a/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_mario002.output b/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_mario002.output new file mode 100644 index 0000000..4381db9 --- /dev/null +++ b/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_mario002.output @@ -0,0 +1,71 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/mario002.mtx', '-c', '16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "mario002", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 0.7665784358978271} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.5208, 0.6283, 0.9927, ..., 0.7747, 0.7207, 0.3302]) +Matrix Type: SuiteSparse +Matrix: mario002 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 0.7665784358978271 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '13697', '-m', 'matrices/389000+_cols/mario002.mtx', '-c', '16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "mario002", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 10.777354717254639} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.9293, 0.9172, 0.4372, ..., 0.0528, 0.4444, 0.3291]) +Matrix Type: SuiteSparse +Matrix: mario002 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 10.777354717254639 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.9293, 0.9172, 0.4372, ..., 0.0528, 0.4444, 0.3291]) +Matrix Type: SuiteSparse +Matrix: mario002 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 10.777354717254639 seconds + +[18.67, 17.76, 17.77, 17.98, 17.83, 17.63, 17.99, 17.72, 18.01, 17.56] +[89.82] +14.656595230102539 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 13697, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'mario002', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [389874, 389874], 'MATRIX_ROWS': 389874, 'MATRIX_SIZE': 152001735876, 'MATRIX_NNZ': 2101242, 'MATRIX_DENSITY': 1.3823802655215408e-05, 'TIME_S': 10.777354717254639, 'TIME_S_1KI': 0.7868405283824662, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1316.45538356781, 'W': 89.82} +[18.67, 17.76, 17.77, 17.98, 17.83, 17.63, 17.99, 17.72, 18.01, 17.56, 18.11, 17.79, 18.09, 17.72, 17.66, 18.49, 17.86, 18.34, 17.71, 17.93] +322.485 +16.12425 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 13697, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'mario002', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [389874, 389874], 'MATRIX_ROWS': 389874, 'MATRIX_SIZE': 152001735876, 'MATRIX_NNZ': 2101242, 'MATRIX_DENSITY': 1.3823802655215408e-05, 'TIME_S': 10.777354717254639, 'TIME_S_1KI': 0.7868405283824662, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1316.45538356781, 'W': 89.82, 'J_1KI': 96.11268040941886, 'W_1KI': 6.557640359202745, 'W_D': 73.69574999999999, 'J_D': 1080.128777928829, 'W_D_1KI': 5.3804300211725185, 'J_D_1KI': 0.3928181369038854} diff --git a/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_test1.json b/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_test1.json new file mode 100644 index 0000000..480eeda --- /dev/null +++ b/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_test1.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 1887, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "test1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392908, 392908], "MATRIX_ROWS": 392908, "MATRIX_SIZE": 154376696464, "MATRIX_NNZ": 12968200, "MATRIX_DENSITY": 8.400361127706946e-05, "TIME_S": 10.573626518249512, "TIME_S_1KI": 5.603405680047436, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1443.276729001999, "W": 85.01, "J_1KI": 764.8525325924743, "W_1KI": 45.050344462109166, "W_D": 68.79925, "J_D": 1168.055011149168, "W_D_1KI": 36.459591944886064, "J_D_1KI": 19.321458370368873} diff --git a/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_test1.output b/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_test1.output new file mode 100644 index 0000000..9043b10 --- /dev/null +++ b/pytorch/output_389000+_16core/xeon_4216_16_csr_10_10_10_test1.output @@ -0,0 +1,74 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/test1.mtx', '-c', '16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "test1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392908, 392908], "MATRIX_ROWS": 392908, "MATRIX_SIZE": 154376696464, "MATRIX_NNZ": 12968200, "MATRIX_DENSITY": 8.400361127706946e-05, "TIME_S": 5.56341028213501} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 24, 48, ..., 12968181, + 12968191, 12968200]), + col_indices=tensor([ 0, 1, 8, ..., 392905, 392906, + 392907]), + values=tensor([1.0000e+00, 0.0000e+00, 0.0000e+00, ..., + 0.0000e+00, 0.0000e+00, 2.1156e-17]), + size=(392908, 392908), nnz=12968200, layout=torch.sparse_csr) +tensor([0.2549, 0.6086, 0.7138, ..., 0.3139, 0.6424, 0.7605]) +Matrix Type: SuiteSparse +Matrix: test1 +Matrix Format: csr +Shape: torch.Size([392908, 392908]) +Rows: 392908 +Size: 154376696464 +NNZ: 12968200 +Density: 8.400361127706946e-05 +Time: 5.56341028213501 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1887', '-m', 'matrices/389000+_cols/test1.mtx', '-c', '16'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "test1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392908, 392908], "MATRIX_ROWS": 392908, "MATRIX_SIZE": 154376696464, "MATRIX_NNZ": 12968200, "MATRIX_DENSITY": 8.400361127706946e-05, "TIME_S": 10.573626518249512} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 24, 48, ..., 12968181, + 12968191, 12968200]), + col_indices=tensor([ 0, 1, 8, ..., 392905, 392906, + 392907]), + values=tensor([1.0000e+00, 0.0000e+00, 0.0000e+00, ..., + 0.0000e+00, 0.0000e+00, 2.1156e-17]), + size=(392908, 392908), nnz=12968200, layout=torch.sparse_csr) +tensor([0.3061, 0.1365, 0.6683, ..., 0.9071, 0.4159, 0.8227]) +Matrix Type: SuiteSparse +Matrix: test1 +Matrix Format: csr +Shape: torch.Size([392908, 392908]) +Rows: 392908 +Size: 154376696464 +NNZ: 12968200 +Density: 8.400361127706946e-05 +Time: 10.573626518249512 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 24, 48, ..., 12968181, + 12968191, 12968200]), + col_indices=tensor([ 0, 1, 8, ..., 392905, 392906, + 392907]), + values=tensor([1.0000e+00, 0.0000e+00, 0.0000e+00, ..., + 0.0000e+00, 0.0000e+00, 2.1156e-17]), + size=(392908, 392908), nnz=12968200, layout=torch.sparse_csr) +tensor([0.3061, 0.1365, 0.6683, ..., 0.9071, 0.4159, 0.8227]) +Matrix Type: SuiteSparse +Matrix: test1 +Matrix Format: csr +Shape: torch.Size([392908, 392908]) +Rows: 392908 +Size: 154376696464 +NNZ: 12968200 +Density: 8.400361127706946e-05 +Time: 10.573626518249512 seconds + +[18.08, 17.49, 17.66, 17.63, 17.95, 17.53, 17.51, 17.68, 17.93, 17.61] +[85.01] +16.977728843688965 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1887, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'test1', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [392908, 392908], 'MATRIX_ROWS': 392908, 'MATRIX_SIZE': 154376696464, 'MATRIX_NNZ': 12968200, 'MATRIX_DENSITY': 8.400361127706946e-05, 'TIME_S': 10.573626518249512, 'TIME_S_1KI': 5.603405680047436, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1443.276729001999, 'W': 85.01} +[18.08, 17.49, 17.66, 17.63, 17.95, 17.53, 17.51, 17.68, 17.93, 17.61, 18.14, 17.76, 17.8, 22.06, 18.45, 17.61, 17.74, 18.01, 17.71, 17.56] +324.21500000000003 +16.21075 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1887, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'test1', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [392908, 392908], 'MATRIX_ROWS': 392908, 'MATRIX_SIZE': 154376696464, 'MATRIX_NNZ': 12968200, 'MATRIX_DENSITY': 8.400361127706946e-05, 'TIME_S': 10.573626518249512, 'TIME_S_1KI': 5.603405680047436, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1443.276729001999, 'W': 85.01, 'J_1KI': 764.8525325924743, 'W_1KI': 45.050344462109166, 'W_D': 68.79925, 'J_D': 1168.055011149168, 'W_D_1KI': 36.459591944886064, 'J_D_1KI': 19.321458370368873} diff --git a/pytorch/output_389000+_1core/altra_1_csr_10_10_10_amazon0312.json b/pytorch/output_389000+_1core/altra_1_csr_10_10_10_amazon0312.json new file mode 100644 index 0000000..f65875c --- /dev/null +++ b/pytorch/output_389000+_1core/altra_1_csr_10_10_10_amazon0312.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "amazon0312", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400727, 400727], "MATRIX_ROWS": 400727, "MATRIX_SIZE": 160582128529, "MATRIX_NNZ": 3200440, "MATRIX_DENSITY": 1.9930237750099465e-05, "TIME_S": 88.16403460502625, "TIME_S_1KI": 88.16403460502625, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2373.6426183891294, "W": 25.696559171496023, "J_1KI": 2373.6426183891294, "W_1KI": 25.696559171496023, "W_D": 4.897559171496024, "J_D": 452.3973461956977, "W_D_1KI": 4.897559171496024, "J_D_1KI": 4.897559171496024} diff --git a/pytorch/output_389000+_1core/altra_1_csr_10_10_10_amazon0312.output b/pytorch/output_389000+_1core/altra_1_csr_10_10_10_amazon0312.output new file mode 100644 index 0000000..59f928c --- /dev/null +++ b/pytorch/output_389000+_1core/altra_1_csr_10_10_10_amazon0312.output @@ -0,0 +1,49 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/389000+_cols/amazon0312.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "amazon0312", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400727, 400727], "MATRIX_ROWS": 400727, "MATRIX_SIZE": 160582128529, "MATRIX_NNZ": 3200440, "MATRIX_DENSITY": 1.9930237750099465e-05, "TIME_S": 88.16403460502625} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 10, ..., 3200428, + 3200438, 3200440]), + col_indices=tensor([ 1, 2, 3, ..., 400724, 6009, + 400707]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(400727, 400727), + nnz=3200440, layout=torch.sparse_csr) +tensor([0.0536, 0.3063, 0.9163, ..., 0.7704, 0.9774, 0.8233]) +Matrix Type: SuiteSparse +Matrix: amazon0312 +Matrix Format: csr +Shape: torch.Size([400727, 400727]) +Rows: 400727 +Size: 160582128529 +NNZ: 3200440 +Density: 1.9930237750099465e-05 +Time: 88.16403460502625 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 10, ..., 3200428, + 3200438, 3200440]), + col_indices=tensor([ 1, 2, 3, ..., 400724, 6009, + 400707]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(400727, 400727), + nnz=3200440, layout=torch.sparse_csr) +tensor([0.0536, 0.3063, 0.9163, ..., 0.7704, 0.9774, 0.8233]) +Matrix Type: SuiteSparse +Matrix: amazon0312 +Matrix Format: csr +Shape: torch.Size([400727, 400727]) +Rows: 400727 +Size: 160582128529 +NNZ: 3200440 +Density: 1.9930237750099465e-05 +Time: 88.16403460502625 seconds + +[22.96, 23.04, 23.08, 23.12, 23.16, 23.24, 23.24, 22.92, 22.8, 22.84] +[22.88, 23.08, 23.44, 27.52, 29.2, 30.24, 30.68, 28.56, 27.76, 26.76, 26.68, 26.84, 26.68, 26.6, 26.56, 26.56, 26.52, 26.72, 26.88, 26.8, 26.92, 26.88, 26.8, 27.0, 27.0, 26.96, 27.08, 26.92, 27.04, 27.08, 27.0, 26.88, 26.72, 26.84, 26.96, 27.04, 27.16, 27.16, 27.28, 27.44, 27.32, 27.44, 27.6, 27.56, 27.44, 27.44, 27.36, 27.72, 27.84, 28.0, 27.96, 27.92, 27.6, 27.56, 27.68, 27.4, 27.4, 27.12, 27.12, 27.12, 27.28, 27.32, 27.36, 27.24, 27.2, 27.16, 27.24, 27.2, 27.24, 27.32, 27.24, 27.28, 27.2, 26.92, 26.92, 27.2, 27.24, 27.32, 27.24, 27.24, 26.96, 26.96, 27.04, 26.84, 26.88, 26.96, 27.4, 27.48] +92.37200212478638 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'amazon0312', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [400727, 400727], 'MATRIX_ROWS': 400727, 'MATRIX_SIZE': 160582128529, 'MATRIX_NNZ': 3200440, 'MATRIX_DENSITY': 1.9930237750099465e-05, 'TIME_S': 88.16403460502625, 'TIME_S_1KI': 88.16403460502625, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2373.6426183891294, 'W': 25.696559171496023} +[22.96, 23.04, 23.08, 23.12, 23.16, 23.24, 23.24, 22.92, 22.8, 22.84, 23.16, 22.88, 22.76, 23.04, 23.12, 23.44, 23.56, 23.36, 23.24, 23.0] +415.98 +20.799 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'amazon0312', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [400727, 400727], 'MATRIX_ROWS': 400727, 'MATRIX_SIZE': 160582128529, 'MATRIX_NNZ': 3200440, 'MATRIX_DENSITY': 1.9930237750099465e-05, 'TIME_S': 88.16403460502625, 'TIME_S_1KI': 88.16403460502625, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2373.6426183891294, 'W': 25.696559171496023, 'J_1KI': 2373.6426183891294, 'W_1KI': 25.696559171496023, 'W_D': 4.897559171496024, 'J_D': 452.3973461956977, 'W_D_1KI': 4.897559171496024, 'J_D_1KI': 4.897559171496024} diff --git a/pytorch/output_389000+_1core/altra_1_csr_10_10_10_darcy003.json b/pytorch/output_389000+_1core/altra_1_csr_10_10_10_darcy003.json new file mode 100644 index 0000000..e2b106a --- /dev/null +++ b/pytorch/output_389000+_1core/altra_1_csr_10_10_10_darcy003.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "darcy003", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 53.30746507644653, "TIME_S_1KI": 53.30746507644653, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1536.7467108154294, "W": 25.681407272182962, "J_1KI": 1536.7467108154294, "W_1KI": 25.681407272182962, "W_D": 4.658407272182959, "J_D": 278.7538851470942, "W_D_1KI": 4.658407272182959, "J_D_1KI": 4.658407272182959} diff --git a/pytorch/output_389000+_1core/altra_1_csr_10_10_10_darcy003.output b/pytorch/output_389000+_1core/altra_1_csr_10_10_10_darcy003.output new file mode 100644 index 0000000..5b58f1e --- /dev/null +++ b/pytorch/output_389000+_1core/altra_1_csr_10_10_10_darcy003.output @@ -0,0 +1,49 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/389000+_cols/darcy003.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "darcy003", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 53.30746507644653} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.6125, 0.5278, 0.3887, ..., 0.3141, 0.8902, 0.1690]) +Matrix Type: SuiteSparse +Matrix: darcy003 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 53.30746507644653 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.6125, 0.5278, 0.3887, ..., 0.3141, 0.8902, 0.1690]) +Matrix Type: SuiteSparse +Matrix: darcy003 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 53.30746507644653 seconds + +[23.16, 23.4, 23.4, 23.32, 23.48, 23.4, 23.36, 23.56, 23.24, 23.28] +[23.32, 22.84, 26.36, 27.44, 28.48, 28.48, 29.44, 30.12, 27.2, 26.6, 26.92, 27.16, 27.24, 27.2, 27.28, 27.12, 27.12, 26.96, 27.08, 27.04, 26.48, 26.72, 26.84, 26.76, 26.96, 27.12, 26.68, 26.68, 26.84, 26.64, 26.8, 26.96, 26.72, 26.6, 26.64, 26.44, 26.36, 26.68, 26.8, 26.84, 27.16, 27.28, 27.36, 27.0, 27.0, 27.12, 27.04, 26.96, 26.88, 26.88, 26.88, 26.64, 27.32, 27.88, 28.04, 28.0, 27.64] +59.83888244628906 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'darcy003', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [389874, 389874], 'MATRIX_ROWS': 389874, 'MATRIX_SIZE': 152001735876, 'MATRIX_NNZ': 2101242, 'MATRIX_DENSITY': 1.3823802655215408e-05, 'TIME_S': 53.30746507644653, 'TIME_S_1KI': 53.30746507644653, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1536.7467108154294, 'W': 25.681407272182962} +[23.16, 23.4, 23.4, 23.32, 23.48, 23.4, 23.36, 23.56, 23.24, 23.28, 23.36, 23.32, 23.28, 23.32, 23.28, 23.36, 23.4, 23.48, 23.32, 23.28] +420.46000000000004 +21.023000000000003 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'darcy003', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [389874, 389874], 'MATRIX_ROWS': 389874, 'MATRIX_SIZE': 152001735876, 'MATRIX_NNZ': 2101242, 'MATRIX_DENSITY': 1.3823802655215408e-05, 'TIME_S': 53.30746507644653, 'TIME_S_1KI': 53.30746507644653, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1536.7467108154294, 'W': 25.681407272182962, 'J_1KI': 1536.7467108154294, 'W_1KI': 25.681407272182962, 'W_D': 4.658407272182959, 'J_D': 278.7538851470942, 'W_D_1KI': 4.658407272182959, 'J_D_1KI': 4.658407272182959} diff --git a/pytorch/output_389000+_1core/altra_1_csr_10_10_10_helm2d03.json b/pytorch/output_389000+_1core/altra_1_csr_10_10_10_helm2d03.json new file mode 100644 index 0000000..b96c66c --- /dev/null +++ b/pytorch/output_389000+_1core/altra_1_csr_10_10_10_helm2d03.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "helm2d03", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392257, 392257], "MATRIX_ROWS": 392257, "MATRIX_SIZE": 153865554049, "MATRIX_NNZ": 2741935, "MATRIX_DENSITY": 1.7820330332848923e-05, "TIME_S": 62.08789658546448, "TIME_S_1KI": 62.08789658546448, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1791.450892791748, "W": 25.109871656394702, "J_1KI": 1791.450892791748, "W_1KI": 25.109871656394702, "W_D": 4.4798716563947, "J_D": 319.61414173126184, "W_D_1KI": 4.4798716563947, "J_D_1KI": 4.4798716563947} diff --git a/pytorch/output_389000+_1core/altra_1_csr_10_10_10_helm2d03.output b/pytorch/output_389000+_1core/altra_1_csr_10_10_10_helm2d03.output new file mode 100644 index 0000000..13ebb65 --- /dev/null +++ b/pytorch/output_389000+_1core/altra_1_csr_10_10_10_helm2d03.output @@ -0,0 +1,51 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/389000+_cols/helm2d03.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "helm2d03", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392257, 392257], "MATRIX_ROWS": 392257, "MATRIX_SIZE": 153865554049, "MATRIX_NNZ": 2741935, "MATRIX_DENSITY": 1.7820330332848923e-05, "TIME_S": 62.08789658546448} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 14, ..., 2741921, + 2741928, 2741935]), + col_indices=tensor([ 0, 98273, 133833, ..., 392252, 392254, + 392256]), + values=tensor([ 3.4808, -0.6217, -0.5806, ..., -0.6940, -0.7602, + 3.5476]), size=(392257, 392257), nnz=2741935, + layout=torch.sparse_csr) +tensor([0.2732, 0.1117, 0.4132, ..., 0.8859, 0.7833, 0.1406]) +Matrix Type: SuiteSparse +Matrix: helm2d03 +Matrix Format: csr +Shape: torch.Size([392257, 392257]) +Rows: 392257 +Size: 153865554049 +NNZ: 2741935 +Density: 1.7820330332848923e-05 +Time: 62.08789658546448 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 14, ..., 2741921, + 2741928, 2741935]), + col_indices=tensor([ 0, 98273, 133833, ..., 392252, 392254, + 392256]), + values=tensor([ 3.4808, -0.6217, -0.5806, ..., -0.6940, -0.7602, + 3.5476]), size=(392257, 392257), nnz=2741935, + layout=torch.sparse_csr) +tensor([0.2732, 0.1117, 0.4132, ..., 0.8859, 0.7833, 0.1406]) +Matrix Type: SuiteSparse +Matrix: helm2d03 +Matrix Format: csr +Shape: torch.Size([392257, 392257]) +Rows: 392257 +Size: 153865554049 +NNZ: 2741935 +Density: 1.7820330332848923e-05 +Time: 62.08789658546448 seconds + +[23.16, 22.92, 22.92, 22.96, 22.84, 23.04, 23.12, 23.2, 23.36, 23.44] +[23.36, 23.32, 23.32, 24.2, 25.04, 27.48, 28.36, 28.84, 28.4, 27.24, 26.92, 26.76, 26.72, 26.8, 26.68, 26.6, 26.6, 26.56, 26.52, 26.56, 26.48, 26.48, 26.44, 26.6, 26.6, 26.56, 26.4, 26.48, 26.56, 26.36, 26.28, 26.2, 26.44, 26.6, 26.68, 27.2, 27.2, 27.04, 27.04, 26.84, 26.64, 26.64, 26.44, 26.4, 26.48, 26.48, 26.16, 26.52, 26.72, 26.8, 26.84, 26.84, 26.72, 26.64, 26.6, 26.8, 26.72, 26.8, 26.96, 27.0, 27.2, 27.32, 27.16, 26.88, 26.72, 26.68, 26.68, 26.68] +71.34448623657227 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'helm2d03', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [392257, 392257], 'MATRIX_ROWS': 392257, 'MATRIX_SIZE': 153865554049, 'MATRIX_NNZ': 2741935, 'MATRIX_DENSITY': 1.7820330332848923e-05, 'TIME_S': 62.08789658546448, 'TIME_S_1KI': 62.08789658546448, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1791.450892791748, 'W': 25.109871656394702} +[23.16, 22.92, 22.92, 22.96, 22.84, 23.04, 23.12, 23.2, 23.36, 23.44, 23.08, 23.08, 23.04, 22.76, 22.48, 22.48, 22.48, 22.64, 22.96, 22.96] +412.6 +20.630000000000003 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'helm2d03', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [392257, 392257], 'MATRIX_ROWS': 392257, 'MATRIX_SIZE': 153865554049, 'MATRIX_NNZ': 2741935, 'MATRIX_DENSITY': 1.7820330332848923e-05, 'TIME_S': 62.08789658546448, 'TIME_S_1KI': 62.08789658546448, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1791.450892791748, 'W': 25.109871656394702, 'J_1KI': 1791.450892791748, 'W_1KI': 25.109871656394702, 'W_D': 4.4798716563947, 'J_D': 319.61414173126184, 'W_D_1KI': 4.4798716563947, 'J_D_1KI': 4.4798716563947} diff --git a/pytorch/output_389000+_1core/altra_1_csr_10_10_10_language.json b/pytorch/output_389000+_1core/altra_1_csr_10_10_10_language.json new file mode 100644 index 0000000..e4470f0 --- /dev/null +++ b/pytorch/output_389000+_1core/altra_1_csr_10_10_10_language.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "language", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [399130, 399130], "MATRIX_ROWS": 399130, "MATRIX_SIZE": 159304756900, "MATRIX_NNZ": 1216334, "MATRIX_DENSITY": 7.635264782228233e-06, "TIME_S": 32.617069482803345, "TIME_S_1KI": 32.617069482803345, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 877.9408810043335, "W": 25.358346526187013, "J_1KI": 877.9408810043335, "W_1KI": 25.358346526187013, "W_D": 4.655346526187017, "J_D": 161.17450821805022, "W_D_1KI": 4.655346526187017, "J_D_1KI": 4.655346526187017} diff --git a/pytorch/output_389000+_1core/altra_1_csr_10_10_10_language.output b/pytorch/output_389000+_1core/altra_1_csr_10_10_10_language.output new file mode 100644 index 0000000..8ed6535 --- /dev/null +++ b/pytorch/output_389000+_1core/altra_1_csr_10_10_10_language.output @@ -0,0 +1,49 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/389000+_cols/language.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "language", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [399130, 399130], "MATRIX_ROWS": 399130, "MATRIX_SIZE": 159304756900, "MATRIX_NNZ": 1216334, "MATRIX_DENSITY": 7.635264782228233e-06, "TIME_S": 32.617069482803345} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 3, ..., 1216330, + 1216332, 1216334]), + col_indices=tensor([ 0, 0, 1, ..., 399128, 399125, + 399129]), + values=tensor([ 1., -1., 1., ..., 1., -1., 1.]), + size=(399130, 399130), nnz=1216334, layout=torch.sparse_csr) +tensor([0.1808, 0.0389, 0.7706, ..., 0.1715, 0.5157, 0.4224]) +Matrix Type: SuiteSparse +Matrix: language +Matrix Format: csr +Shape: torch.Size([399130, 399130]) +Rows: 399130 +Size: 159304756900 +NNZ: 1216334 +Density: 7.635264782228233e-06 +Time: 32.617069482803345 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 3, ..., 1216330, + 1216332, 1216334]), + col_indices=tensor([ 0, 0, 1, ..., 399128, 399125, + 399129]), + values=tensor([ 1., -1., 1., ..., 1., -1., 1.]), + size=(399130, 399130), nnz=1216334, layout=torch.sparse_csr) +tensor([0.1808, 0.0389, 0.7706, ..., 0.1715, 0.5157, 0.4224]) +Matrix Type: SuiteSparse +Matrix: language +Matrix Format: csr +Shape: torch.Size([399130, 399130]) +Rows: 399130 +Size: 159304756900 +NNZ: 1216334 +Density: 7.635264782228233e-06 +Time: 32.617069482803345 seconds + +[23.16, 23.16, 22.96, 23.16, 22.92, 22.92, 23.0, 22.92, 22.84, 23.16] +[23.24, 23.4, 24.8, 25.6, 27.28, 28.0, 28.0, 28.48, 27.76, 27.96, 26.76, 26.64, 26.76, 26.88, 26.88, 26.96, 26.96, 27.2, 27.0, 27.12, 27.08, 26.68, 27.08, 27.16, 27.08, 27.08, 27.28, 27.0, 27.0, 27.24, 27.16, 27.36, 27.36] +34.62137722969055 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'language', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [399130, 399130], 'MATRIX_ROWS': 399130, 'MATRIX_SIZE': 159304756900, 'MATRIX_NNZ': 1216334, 'MATRIX_DENSITY': 7.635264782228233e-06, 'TIME_S': 32.617069482803345, 'TIME_S_1KI': 32.617069482803345, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 877.9408810043335, 'W': 25.358346526187013} +[23.16, 23.16, 22.96, 23.16, 22.92, 22.92, 23.0, 22.92, 22.84, 23.16, 22.96, 23.28, 23.08, 23.16, 23.04, 22.88, 22.76, 22.88, 22.96, 23.0] +414.05999999999995 +20.702999999999996 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'language', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [399130, 399130], 'MATRIX_ROWS': 399130, 'MATRIX_SIZE': 159304756900, 'MATRIX_NNZ': 1216334, 'MATRIX_DENSITY': 7.635264782228233e-06, 'TIME_S': 32.617069482803345, 'TIME_S_1KI': 32.617069482803345, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 877.9408810043335, 'W': 25.358346526187013, 'J_1KI': 877.9408810043335, 'W_1KI': 25.358346526187013, 'W_D': 4.655346526187017, 'J_D': 161.17450821805022, 'W_D_1KI': 4.655346526187017, 'J_D_1KI': 4.655346526187017} diff --git a/pytorch/output_389000+_1core/altra_1_csr_10_10_10_marine1.json b/pytorch/output_389000+_1core/altra_1_csr_10_10_10_marine1.json new file mode 100644 index 0000000..91cb90d --- /dev/null +++ b/pytorch/output_389000+_1core/altra_1_csr_10_10_10_marine1.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "marine1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400320, 400320], "MATRIX_ROWS": 400320, "MATRIX_SIZE": 160256102400, "MATRIX_NNZ": 6226538, "MATRIX_DENSITY": 3.885367175883594e-05, "TIME_S": 136.75263905525208, "TIME_S_1KI": 136.75263905525208, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 3809.28440010071, "W": 25.30736544046405, "J_1KI": 3809.28440010071, "W_1KI": 25.30736544046405, "W_D": 4.417365440464049, "J_D": 664.905294132235, "W_D_1KI": 4.417365440464049, "J_D_1KI": 4.417365440464049} diff --git a/pytorch/output_389000+_1core/altra_1_csr_10_10_10_marine1.output b/pytorch/output_389000+_1core/altra_1_csr_10_10_10_marine1.output new file mode 100644 index 0000000..5e3841d --- /dev/null +++ b/pytorch/output_389000+_1core/altra_1_csr_10_10_10_marine1.output @@ -0,0 +1,51 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/389000+_cols/marine1.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "marine1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400320, 400320], "MATRIX_ROWS": 400320, "MATRIX_SIZE": 160256102400, "MATRIX_NNZ": 6226538, "MATRIX_DENSITY": 3.885367175883594e-05, "TIME_S": 136.75263905525208} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 18, ..., 6226522, + 6226531, 6226538]), + col_indices=tensor([ 0, 1, 10383, ..., 400315, 400318, + 400319]), + values=tensor([ 6.2373e+03, -1.8964e+00, -5.7529e+00, ..., + -6.8099e-01, -6.4187e-01, 1.7595e+01]), + size=(400320, 400320), nnz=6226538, layout=torch.sparse_csr) +tensor([0.1033, 0.2543, 0.2854, ..., 0.8643, 0.3799, 0.0773]) +Matrix Type: SuiteSparse +Matrix: marine1 +Matrix Format: csr +Shape: torch.Size([400320, 400320]) +Rows: 400320 +Size: 160256102400 +NNZ: 6226538 +Density: 3.885367175883594e-05 +Time: 136.75263905525208 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 18, ..., 6226522, + 6226531, 6226538]), + col_indices=tensor([ 0, 1, 10383, ..., 400315, 400318, + 400319]), + values=tensor([ 6.2373e+03, -1.8964e+00, -5.7529e+00, ..., + -6.8099e-01, -6.4187e-01, 1.7595e+01]), + size=(400320, 400320), nnz=6226538, layout=torch.sparse_csr) +tensor([0.1033, 0.2543, 0.2854, ..., 0.8643, 0.3799, 0.0773]) +Matrix Type: SuiteSparse +Matrix: marine1 +Matrix Format: csr +Shape: torch.Size([400320, 400320]) +Rows: 400320 +Size: 160256102400 +NNZ: 6226538 +Density: 3.885367175883594e-05 +Time: 136.75263905525208 seconds + +[23.6, 23.2, 23.36, 23.44, 23.4, 23.52, 23.52, 23.4, 23.16, 22.92] +[23.12, 23.0, 23.0, 25.76, 27.64, 28.96, 29.28, 28.32, 27.36, 27.36, 27.0, 26.64, 26.6, 26.6, 26.76, 26.64, 26.6, 26.6, 26.4, 26.48, 26.56, 26.76, 26.96, 27.0, 27.04, 26.84, 26.92, 26.64, 26.64, 26.8, 26.88, 26.88, 26.88, 27.04, 27.12, 26.96, 26.96, 26.84, 26.6, 26.6, 26.24, 26.04, 26.0, 26.24, 26.48, 26.88, 27.2, 27.36, 27.16, 27.24, 27.16, 27.04, 27.04, 27.04, 26.88, 26.8, 26.48, 26.68, 26.64, 26.72, 26.68, 26.84, 26.6, 26.48, 26.36, 26.4, 26.28, 26.48, 26.6, 26.48, 26.48, 26.8, 26.76, 26.72, 26.76, 26.76, 26.68, 26.52, 26.48, 26.56, 26.6, 26.52, 26.28, 26.32, 26.24, 26.44, 26.52, 26.6, 26.44, 26.52, 26.56, 26.4, 26.56, 26.64, 26.92, 27.04, 26.92, 26.92, 26.96, 26.92, 26.84, 26.88, 27.16, 27.08, 26.96, 26.72, 26.56, 26.44, 26.4, 26.4, 26.64, 26.72, 26.84, 26.96, 27.2, 26.88, 26.8, 26.8, 26.92, 26.92, 27.08, 27.28, 27.2, 27.0, 26.92, 26.8, 26.64, 26.92, 26.72, 26.76, 26.8, 26.8, 26.76, 26.84, 26.88, 26.72, 26.6, 26.64, 26.52, 26.8, 26.96, 26.96, 27.16] +150.5207805633545 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'marine1', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [400320, 400320], 'MATRIX_ROWS': 400320, 'MATRIX_SIZE': 160256102400, 'MATRIX_NNZ': 6226538, 'MATRIX_DENSITY': 3.885367175883594e-05, 'TIME_S': 136.75263905525208, 'TIME_S_1KI': 136.75263905525208, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3809.28440010071, 'W': 25.30736544046405} +[23.6, 23.2, 23.36, 23.44, 23.4, 23.52, 23.52, 23.4, 23.16, 22.92, 23.28, 23.28, 23.28, 23.16, 23.08, 22.96, 22.84, 22.8, 22.92, 23.16] +417.8 +20.89 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'marine1', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [400320, 400320], 'MATRIX_ROWS': 400320, 'MATRIX_SIZE': 160256102400, 'MATRIX_NNZ': 6226538, 'MATRIX_DENSITY': 3.885367175883594e-05, 'TIME_S': 136.75263905525208, 'TIME_S_1KI': 136.75263905525208, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3809.28440010071, 'W': 25.30736544046405, 'J_1KI': 3809.28440010071, 'W_1KI': 25.30736544046405, 'W_D': 4.417365440464049, 'J_D': 664.905294132235, 'W_D_1KI': 4.417365440464049, 'J_D_1KI': 4.417365440464049} diff --git a/pytorch/output_389000+_1core/altra_1_csr_10_10_10_mario002.json b/pytorch/output_389000+_1core/altra_1_csr_10_10_10_mario002.json new file mode 100644 index 0000000..931b93e --- /dev/null +++ b/pytorch/output_389000+_1core/altra_1_csr_10_10_10_mario002.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "mario002", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 51.17483377456665, "TIME_S_1KI": 51.17483377456665, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1410.4799920654298, "W": 25.376735316064863, "J_1KI": 1410.4799920654298, "W_1KI": 25.376735316064863, "W_D": 4.522735316064864, "J_D": 251.3809437370302, "W_D_1KI": 4.522735316064864, "J_D_1KI": 4.522735316064864} diff --git a/pytorch/output_389000+_1core/altra_1_csr_10_10_10_mario002.output b/pytorch/output_389000+_1core/altra_1_csr_10_10_10_mario002.output new file mode 100644 index 0000000..103b1d5 --- /dev/null +++ b/pytorch/output_389000+_1core/altra_1_csr_10_10_10_mario002.output @@ -0,0 +1,49 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/389000+_cols/mario002.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "mario002", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 51.17483377456665} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.0328, 0.3187, 0.7172, ..., 0.3931, 0.0888, 0.1198]) +Matrix Type: SuiteSparse +Matrix: mario002 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 51.17483377456665 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.0328, 0.3187, 0.7172, ..., 0.3931, 0.0888, 0.1198]) +Matrix Type: SuiteSparse +Matrix: mario002 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 51.17483377456665 seconds + +[23.16, 23.16, 23.16, 23.28, 23.28, 23.48, 23.44, 23.4, 23.44, 23.44] +[23.4, 23.48, 23.92, 24.84, 26.8, 27.6, 28.64, 28.08, 28.08, 26.88, 26.8, 26.8, 26.76, 26.76, 26.8, 26.64, 26.68, 26.48, 26.72, 26.96, 27.12, 27.2, 27.08, 27.12, 27.2, 27.12, 26.88, 27.04, 27.04, 27.2, 27.36, 27.24, 27.2, 27.2, 27.12, 27.12, 27.2, 26.92, 26.88, 26.88, 26.92, 26.84, 26.6, 26.84, 26.68, 26.72, 26.96, 26.72, 26.64, 26.84, 26.76, 27.0, 27.2] +55.58161735534668 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'mario002', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [389874, 389874], 'MATRIX_ROWS': 389874, 'MATRIX_SIZE': 152001735876, 'MATRIX_NNZ': 2101242, 'MATRIX_DENSITY': 1.3823802655215408e-05, 'TIME_S': 51.17483377456665, 'TIME_S_1KI': 51.17483377456665, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1410.4799920654298, 'W': 25.376735316064863} +[23.16, 23.16, 23.16, 23.28, 23.28, 23.48, 23.44, 23.4, 23.44, 23.44, 23.08, 22.96, 22.96, 23.04, 23.08, 22.96, 23.04, 23.12, 22.88, 23.12] +417.08 +20.854 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'mario002', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [389874, 389874], 'MATRIX_ROWS': 389874, 'MATRIX_SIZE': 152001735876, 'MATRIX_NNZ': 2101242, 'MATRIX_DENSITY': 1.3823802655215408e-05, 'TIME_S': 51.17483377456665, 'TIME_S_1KI': 51.17483377456665, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1410.4799920654298, 'W': 25.376735316064863, 'J_1KI': 1410.4799920654298, 'W_1KI': 25.376735316064863, 'W_D': 4.522735316064864, 'J_D': 251.3809437370302, 'W_D_1KI': 4.522735316064864, 'J_D_1KI': 4.522735316064864} diff --git a/pytorch/output_389000+_1core/altra_1_csr_10_10_10_test1.json b/pytorch/output_389000+_1core/altra_1_csr_10_10_10_test1.json new file mode 100644 index 0000000..4bb27e2 --- /dev/null +++ b/pytorch/output_389000+_1core/altra_1_csr_10_10_10_test1.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "test1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392908, 392908], "MATRIX_ROWS": 392908, "MATRIX_SIZE": 154376696464, "MATRIX_NNZ": 12968200, "MATRIX_DENSITY": 8.400361127706946e-05, "TIME_S": 283.81978273391724, "TIME_S_1KI": 283.81978273391724, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 7375.577878112791, "W": 25.373727151984518, "J_1KI": 7375.577878112791, "W_1KI": 25.373727151984518, "W_D": 4.492727151984518, "J_D": 1305.9358129017337, "W_D_1KI": 4.492727151984518, "J_D_1KI": 4.492727151984518} diff --git a/pytorch/output_389000+_1core/altra_1_csr_10_10_10_test1.output b/pytorch/output_389000+_1core/altra_1_csr_10_10_10_test1.output new file mode 100644 index 0000000..c57d8f0 --- /dev/null +++ b/pytorch/output_389000+_1core/altra_1_csr_10_10_10_test1.output @@ -0,0 +1,51 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/389000+_cols/test1.mtx -c 1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "test1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392908, 392908], "MATRIX_ROWS": 392908, "MATRIX_SIZE": 154376696464, "MATRIX_NNZ": 12968200, "MATRIX_DENSITY": 8.400361127706946e-05, "TIME_S": 283.81978273391724} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 24, 48, ..., 12968181, + 12968191, 12968200]), + col_indices=tensor([ 0, 1, 8, ..., 392905, 392906, + 392907]), + values=tensor([1.0000e+00, 0.0000e+00, 0.0000e+00, ..., + 0.0000e+00, 0.0000e+00, 2.1156e-17]), + size=(392908, 392908), nnz=12968200, layout=torch.sparse_csr) +tensor([0.4069, 0.7437, 0.0499, ..., 0.6858, 0.0232, 0.7224]) +Matrix Type: SuiteSparse +Matrix: test1 +Matrix Format: csr +Shape: torch.Size([392908, 392908]) +Rows: 392908 +Size: 154376696464 +NNZ: 12968200 +Density: 8.400361127706946e-05 +Time: 283.81978273391724 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 24, 48, ..., 12968181, + 12968191, 12968200]), + col_indices=tensor([ 0, 1, 8, ..., 392905, 392906, + 392907]), + values=tensor([1.0000e+00, 0.0000e+00, 0.0000e+00, ..., + 0.0000e+00, 0.0000e+00, 2.1156e-17]), + size=(392908, 392908), nnz=12968200, layout=torch.sparse_csr) +tensor([0.4069, 0.7437, 0.0499, ..., 0.6858, 0.0232, 0.7224]) +Matrix Type: SuiteSparse +Matrix: test1 +Matrix Format: csr +Shape: torch.Size([392908, 392908]) +Rows: 392908 +Size: 154376696464 +NNZ: 12968200 +Density: 8.400361127706946e-05 +Time: 283.81978273391724 seconds + +[22.72, 22.6, 22.72, 22.52, 22.8, 23.2, 23.32, 23.36, 23.36, 23.36] +[23.36, 23.0, 23.52, 26.52, 28.0, 29.08, 29.6, 28.4, 27.72, 27.56, 27.4, 26.84, 27.08, 27.28, 27.24, 27.04, 26.76, 26.76, 26.68, 26.48, 26.44, 26.48, 26.36, 26.2, 26.48, 26.48, 26.64, 26.64, 26.68, 26.52, 26.52, 26.68, 26.64, 26.68, 26.68, 26.68, 26.64, 26.64, 26.64, 26.92, 26.84, 26.6, 26.72, 26.68, 26.44, 26.76, 26.96, 26.8, 26.8, 26.96, 27.0, 26.92, 26.96, 26.96, 26.64, 26.52, 26.36, 26.56, 26.6, 26.72, 26.72, 26.72, 26.64, 26.6, 26.52, 26.64, 26.52, 26.48, 26.44, 26.44, 26.48, 26.56, 26.76, 26.64, 26.76, 26.72, 26.76, 26.8, 26.76, 26.64, 26.44, 26.44, 26.44, 26.44, 26.64, 26.56, 26.76, 26.76, 26.68, 26.68, 26.72, 26.72, 26.8, 26.8, 26.64, 26.4, 26.52, 26.56, 26.68, 26.68, 26.96, 26.72, 26.64, 26.76, 26.76, 26.84, 26.76, 26.96, 26.8, 26.76, 26.68, 26.72, 26.8, 27.12, 27.08, 26.92, 26.68, 26.28, 26.2, 26.44, 26.4, 26.68, 26.88, 26.92, 27.08, 26.96, 26.96, 26.88, 26.84, 26.68, 26.64, 26.68, 26.52, 26.48, 26.28, 26.28, 26.24, 26.44, 26.76, 27.04, 26.92, 27.0, 27.0, 27.0, 26.84, 26.92, 26.88, 26.92, 26.92, 26.8, 26.8, 26.84, 26.92, 26.92, 27.08, 26.92, 27.0, 26.92, 26.8, 26.8, 26.96, 26.84, 26.92, 26.84, 26.68, 26.6, 26.68, 26.72, 26.96, 26.76, 26.76, 26.6, 26.4, 26.28, 26.2, 26.44, 26.8, 26.76, 26.92, 27.04, 26.8, 26.6, 26.6, 26.32, 26.24, 26.4, 26.28, 26.36, 26.64, 26.64, 26.6, 26.68, 26.68, 26.44, 26.24, 26.12, 26.36, 26.44, 26.68, 26.8, 26.92, 26.96, 26.92, 26.88, 26.88, 26.76, 26.76, 26.8, 26.68, 26.72, 26.64, 26.52, 26.64, 26.92, 26.92, 26.76, 26.6, 26.6, 26.56, 26.36, 26.36, 26.64, 26.6, 26.72, 26.76, 26.84, 26.72, 27.04, 26.72, 26.6, 26.88, 26.76, 26.72, 26.64, 26.8, 27.04, 27.04, 26.96, 27.04, 27.04, 26.8, 26.68, 26.88, 26.84, 27.0, 26.92, 26.96, 27.0, 26.76, 26.8, 26.88, 26.88, 26.96, 27.0, 27.12, 27.32, 27.32, 27.32, 27.32, 27.24, 26.88, 26.6, 26.68, 26.64, 26.92, 27.08, 27.28, 27.04, 26.92, 27.08, 27.4, 27.56, 27.6, 27.48, 27.2, 26.88] +290.67774844169617 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'test1', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [392908, 392908], 'MATRIX_ROWS': 392908, 'MATRIX_SIZE': 154376696464, 'MATRIX_NNZ': 12968200, 'MATRIX_DENSITY': 8.400361127706946e-05, 'TIME_S': 283.81978273391724, 'TIME_S_1KI': 283.81978273391724, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 7375.577878112791, 'W': 25.373727151984518} +[22.72, 22.6, 22.72, 22.52, 22.8, 23.2, 23.32, 23.36, 23.36, 23.36, 23.48, 23.28, 23.2, 23.24, 23.24, 23.32, 23.6, 23.8, 23.52, 23.52] +417.62 +20.881 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'test1', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [392908, 392908], 'MATRIX_ROWS': 392908, 'MATRIX_SIZE': 154376696464, 'MATRIX_NNZ': 12968200, 'MATRIX_DENSITY': 8.400361127706946e-05, 'TIME_S': 283.81978273391724, 'TIME_S_1KI': 283.81978273391724, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 7375.577878112791, 'W': 25.373727151984518, 'J_1KI': 7375.577878112791, 'W_1KI': 25.373727151984518, 'W_D': 4.492727151984518, 'J_D': 1305.9358129017337, 'W_D_1KI': 4.492727151984518, 'J_D_1KI': 4.492727151984518} diff --git a/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_amazon0312.json b/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_amazon0312.json new file mode 100644 index 0000000..4f2ba5e --- /dev/null +++ b/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_amazon0312.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 1665, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "amazon0312", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400727, 400727], "MATRIX_ROWS": 400727, "MATRIX_SIZE": 160582128529, "MATRIX_NNZ": 3200440, "MATRIX_DENSITY": 1.9930237750099465e-05, "TIME_S": 10.677687168121338, "TIME_S_1KI": 6.413025326199002, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 984.0334419322014, "W": 72.51, "J_1KI": 591.0110762355564, "W_1KI": 43.54954954954955, "W_D": 37.19775000000001, "J_D": 504.81078423160335, "W_D_1KI": 22.340990990991, "J_D_1KI": 13.418012607201801} diff --git a/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_amazon0312.output b/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_amazon0312.output new file mode 100644 index 0000000..78545de --- /dev/null +++ b/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_amazon0312.output @@ -0,0 +1,71 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/amazon0312.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "amazon0312", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400727, 400727], "MATRIX_ROWS": 400727, "MATRIX_SIZE": 160582128529, "MATRIX_NNZ": 3200440, "MATRIX_DENSITY": 1.9930237750099465e-05, "TIME_S": 6.305053472518921} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 10, ..., 3200428, + 3200438, 3200440]), + col_indices=tensor([ 1, 2, 3, ..., 400724, 6009, + 400707]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(400727, 400727), + nnz=3200440, layout=torch.sparse_csr) +tensor([0.6406, 0.1468, 0.7280, ..., 0.0181, 0.2043, 0.6040]) +Matrix Type: SuiteSparse +Matrix: amazon0312 +Matrix Format: csr +Shape: torch.Size([400727, 400727]) +Rows: 400727 +Size: 160582128529 +NNZ: 3200440 +Density: 1.9930237750099465e-05 +Time: 6.305053472518921 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1665', '-m', 'matrices/389000+_cols/amazon0312.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "amazon0312", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400727, 400727], "MATRIX_ROWS": 400727, "MATRIX_SIZE": 160582128529, "MATRIX_NNZ": 3200440, "MATRIX_DENSITY": 1.9930237750099465e-05, "TIME_S": 10.677687168121338} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 10, ..., 3200428, + 3200438, 3200440]), + col_indices=tensor([ 1, 2, 3, ..., 400724, 6009, + 400707]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(400727, 400727), + nnz=3200440, layout=torch.sparse_csr) +tensor([0.7736, 0.6337, 0.2989, ..., 0.0625, 0.4089, 0.3233]) +Matrix Type: SuiteSparse +Matrix: amazon0312 +Matrix Format: csr +Shape: torch.Size([400727, 400727]) +Rows: 400727 +Size: 160582128529 +NNZ: 3200440 +Density: 1.9930237750099465e-05 +Time: 10.677687168121338 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 10, ..., 3200428, + 3200438, 3200440]), + col_indices=tensor([ 1, 2, 3, ..., 400724, 6009, + 400707]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(400727, 400727), + nnz=3200440, layout=torch.sparse_csr) +tensor([0.7736, 0.6337, 0.2989, ..., 0.0625, 0.4089, 0.3233]) +Matrix Type: SuiteSparse +Matrix: amazon0312 +Matrix Format: csr +Shape: torch.Size([400727, 400727]) +Rows: 400727 +Size: 160582128529 +NNZ: 3200440 +Density: 1.9930237750099465e-05 +Time: 10.677687168121338 seconds + +[40.11, 38.6, 39.5, 38.36, 38.75, 38.98, 39.85, 38.73, 38.93, 38.39] +[72.51] +13.571003198623657 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1665, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'amazon0312', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [400727, 400727], 'MATRIX_ROWS': 400727, 'MATRIX_SIZE': 160582128529, 'MATRIX_NNZ': 3200440, 'MATRIX_DENSITY': 1.9930237750099465e-05, 'TIME_S': 10.677687168121338, 'TIME_S_1KI': 6.413025326199002, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 984.0334419322014, 'W': 72.51} +[40.11, 38.6, 39.5, 38.36, 38.75, 38.98, 39.85, 38.73, 38.93, 38.39, 39.83, 38.29, 40.69, 40.97, 41.34, 39.2, 38.48, 38.73, 38.45, 38.46] +706.2449999999999 +35.31224999999999 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1665, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'amazon0312', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [400727, 400727], 'MATRIX_ROWS': 400727, 'MATRIX_SIZE': 160582128529, 'MATRIX_NNZ': 3200440, 'MATRIX_DENSITY': 1.9930237750099465e-05, 'TIME_S': 10.677687168121338, 'TIME_S_1KI': 6.413025326199002, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 984.0334419322014, 'W': 72.51, 'J_1KI': 591.0110762355564, 'W_1KI': 43.54954954954955, 'W_D': 37.19775000000001, 'J_D': 504.81078423160335, 'W_D_1KI': 22.340990990991, 'J_D_1KI': 13.418012607201801} diff --git a/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_darcy003.json b/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_darcy003.json new file mode 100644 index 0000000..0c5532e --- /dev/null +++ b/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_darcy003.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 2770, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "darcy003", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 10.539327144622803, "TIME_S_1KI": 3.804811243546138, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 940.3520177507401, "W": 70.14, "J_1KI": 339.477262725899, "W_1KI": 25.32129963898917, "W_D": 35.342749999999995, "J_D": 473.8327099424004, "W_D_1KI": 12.759115523465702, "J_D_1KI": 4.606178889337799} diff --git a/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_darcy003.output b/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_darcy003.output new file mode 100644 index 0000000..0191954 --- /dev/null +++ b/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_darcy003.output @@ -0,0 +1,71 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/darcy003.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "darcy003", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 3.790585994720459} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.8724, 0.5946, 0.8360, ..., 0.1630, 0.5271, 0.0708]) +Matrix Type: SuiteSparse +Matrix: darcy003 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 3.790585994720459 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '2770', '-m', 'matrices/389000+_cols/darcy003.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "darcy003", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 10.539327144622803} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.4798, 0.6348, 0.4010, ..., 0.9410, 0.2128, 0.7861]) +Matrix Type: SuiteSparse +Matrix: darcy003 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 10.539327144622803 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.4798, 0.6348, 0.4010, ..., 0.9410, 0.2128, 0.7861]) +Matrix Type: SuiteSparse +Matrix: darcy003 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 10.539327144622803 seconds + +[39.64, 38.41, 38.51, 38.38, 38.48, 38.55, 38.6, 38.35, 39.22, 38.54] +[70.14] +13.406786680221558 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 2770, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'darcy003', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [389874, 389874], 'MATRIX_ROWS': 389874, 'MATRIX_SIZE': 152001735876, 'MATRIX_NNZ': 2101242, 'MATRIX_DENSITY': 1.3823802655215408e-05, 'TIME_S': 10.539327144622803, 'TIME_S_1KI': 3.804811243546138, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 940.3520177507401, 'W': 70.14} +[39.64, 38.41, 38.51, 38.38, 38.48, 38.55, 38.6, 38.35, 39.22, 38.54, 39.52, 38.45, 38.63, 38.39, 38.55, 38.9, 38.88, 38.72, 38.83, 38.49] +695.945 +34.797250000000005 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 2770, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'darcy003', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [389874, 389874], 'MATRIX_ROWS': 389874, 'MATRIX_SIZE': 152001735876, 'MATRIX_NNZ': 2101242, 'MATRIX_DENSITY': 1.3823802655215408e-05, 'TIME_S': 10.539327144622803, 'TIME_S_1KI': 3.804811243546138, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 940.3520177507401, 'W': 70.14, 'J_1KI': 339.477262725899, 'W_1KI': 25.32129963898917, 'W_D': 35.342749999999995, 'J_D': 473.8327099424004, 'W_D_1KI': 12.759115523465702, 'J_D_1KI': 4.606178889337799} diff --git a/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_helm2d03.json b/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_helm2d03.json new file mode 100644 index 0000000..6c16e41 --- /dev/null +++ b/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_helm2d03.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 2881, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "helm2d03", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392257, 392257], "MATRIX_ROWS": 392257, "MATRIX_SIZE": 153865554049, "MATRIX_NNZ": 2741935, "MATRIX_DENSITY": 1.7820330332848923e-05, "TIME_S": 10.489487886428833, "TIME_S_1KI": 3.640919085882969, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 982.9177148509026, "W": 72.89, "J_1KI": 341.1724105695601, "W_1KI": 25.30024297119056, "W_D": 38.247499999999995, "J_D": 515.765472612977, "W_D_1KI": 13.275772301284276, "J_D_1KI": 4.6080431451871835} diff --git a/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_helm2d03.output b/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_helm2d03.output new file mode 100644 index 0000000..3aa1656 --- /dev/null +++ b/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_helm2d03.output @@ -0,0 +1,74 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/helm2d03.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "helm2d03", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392257, 392257], "MATRIX_ROWS": 392257, "MATRIX_SIZE": 153865554049, "MATRIX_NNZ": 2741935, "MATRIX_DENSITY": 1.7820330332848923e-05, "TIME_S": 3.64394474029541} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 14, ..., 2741921, + 2741928, 2741935]), + col_indices=tensor([ 0, 98273, 133833, ..., 392252, 392254, + 392256]), + values=tensor([ 3.4808, -0.6217, -0.5806, ..., -0.6940, -0.7602, + 3.5476]), size=(392257, 392257), nnz=2741935, + layout=torch.sparse_csr) +tensor([0.2874, 0.3706, 0.3465, ..., 0.0468, 0.1058, 0.2863]) +Matrix Type: SuiteSparse +Matrix: helm2d03 +Matrix Format: csr +Shape: torch.Size([392257, 392257]) +Rows: 392257 +Size: 153865554049 +NNZ: 2741935 +Density: 1.7820330332848923e-05 +Time: 3.64394474029541 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '2881', '-m', 'matrices/389000+_cols/helm2d03.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "helm2d03", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392257, 392257], "MATRIX_ROWS": 392257, "MATRIX_SIZE": 153865554049, "MATRIX_NNZ": 2741935, "MATRIX_DENSITY": 1.7820330332848923e-05, "TIME_S": 10.489487886428833} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 14, ..., 2741921, + 2741928, 2741935]), + col_indices=tensor([ 0, 98273, 133833, ..., 392252, 392254, + 392256]), + values=tensor([ 3.4808, -0.6217, -0.5806, ..., -0.6940, -0.7602, + 3.5476]), size=(392257, 392257), nnz=2741935, + layout=torch.sparse_csr) +tensor([0.4950, 0.0177, 0.5787, ..., 0.9424, 0.3532, 0.5521]) +Matrix Type: SuiteSparse +Matrix: helm2d03 +Matrix Format: csr +Shape: torch.Size([392257, 392257]) +Rows: 392257 +Size: 153865554049 +NNZ: 2741935 +Density: 1.7820330332848923e-05 +Time: 10.489487886428833 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 14, ..., 2741921, + 2741928, 2741935]), + col_indices=tensor([ 0, 98273, 133833, ..., 392252, 392254, + 392256]), + values=tensor([ 3.4808, -0.6217, -0.5806, ..., -0.6940, -0.7602, + 3.5476]), size=(392257, 392257), nnz=2741935, + layout=torch.sparse_csr) +tensor([0.4950, 0.0177, 0.5787, ..., 0.9424, 0.3532, 0.5521]) +Matrix Type: SuiteSparse +Matrix: helm2d03 +Matrix Format: csr +Shape: torch.Size([392257, 392257]) +Rows: 392257 +Size: 153865554049 +NNZ: 2741935 +Density: 1.7820330332848923e-05 +Time: 10.489487886428833 seconds + +[39.75, 38.21, 38.34, 38.21, 38.27, 38.42, 38.28, 38.17, 38.78, 38.77] +[72.89] +13.484946012496948 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 2881, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'helm2d03', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [392257, 392257], 'MATRIX_ROWS': 392257, 'MATRIX_SIZE': 153865554049, 'MATRIX_NNZ': 2741935, 'MATRIX_DENSITY': 1.7820330332848923e-05, 'TIME_S': 10.489487886428833, 'TIME_S_1KI': 3.640919085882969, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 982.9177148509026, 'W': 72.89} +[39.75, 38.21, 38.34, 38.21, 38.27, 38.42, 38.28, 38.17, 38.78, 38.77, 39.12, 38.21, 38.45, 38.24, 39.01, 38.22, 38.93, 38.26, 38.73, 38.6] +692.8500000000001 +34.642500000000005 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 2881, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'helm2d03', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [392257, 392257], 'MATRIX_ROWS': 392257, 'MATRIX_SIZE': 153865554049, 'MATRIX_NNZ': 2741935, 'MATRIX_DENSITY': 1.7820330332848923e-05, 'TIME_S': 10.489487886428833, 'TIME_S_1KI': 3.640919085882969, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 982.9177148509026, 'W': 72.89, 'J_1KI': 341.1724105695601, 'W_1KI': 25.30024297119056, 'W_D': 38.247499999999995, 'J_D': 515.765472612977, 'W_D_1KI': 13.275772301284276, 'J_D_1KI': 4.6080431451871835} diff --git a/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_language.json b/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_language.json new file mode 100644 index 0000000..d92fabc --- /dev/null +++ b/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_language.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 3433, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "language", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [399130, 399130], "MATRIX_ROWS": 399130, "MATRIX_SIZE": 159304756900, "MATRIX_NNZ": 1216334, "MATRIX_DENSITY": 7.635264782228233e-06, "TIME_S": 10.349842309951782, "TIME_S_1KI": 3.01480987764398, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 866.6259407162667, "W": 65.83, "J_1KI": 252.43983126020004, "W_1KI": 19.175648121176813, "W_D": 31.058999999999997, "J_D": 408.8794636595249, "W_D_1KI": 9.047189047480337, "J_D_1KI": 2.635359466204584} diff --git a/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_language.output b/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_language.output new file mode 100644 index 0000000..994626a --- /dev/null +++ b/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_language.output @@ -0,0 +1,71 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/language.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "language", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [399130, 399130], "MATRIX_ROWS": 399130, "MATRIX_SIZE": 159304756900, "MATRIX_NNZ": 1216334, "MATRIX_DENSITY": 7.635264782228233e-06, "TIME_S": 3.058311939239502} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 3, ..., 1216330, + 1216332, 1216334]), + col_indices=tensor([ 0, 0, 1, ..., 399128, 399125, + 399129]), + values=tensor([ 1., -1., 1., ..., 1., -1., 1.]), + size=(399130, 399130), nnz=1216334, layout=torch.sparse_csr) +tensor([0.8774, 0.7244, 0.5547, ..., 0.2046, 0.1297, 0.0114]) +Matrix Type: SuiteSparse +Matrix: language +Matrix Format: csr +Shape: torch.Size([399130, 399130]) +Rows: 399130 +Size: 159304756900 +NNZ: 1216334 +Density: 7.635264782228233e-06 +Time: 3.058311939239502 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '3433', '-m', 'matrices/389000+_cols/language.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "language", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [399130, 399130], "MATRIX_ROWS": 399130, "MATRIX_SIZE": 159304756900, "MATRIX_NNZ": 1216334, "MATRIX_DENSITY": 7.635264782228233e-06, "TIME_S": 10.349842309951782} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 3, ..., 1216330, + 1216332, 1216334]), + col_indices=tensor([ 0, 0, 1, ..., 399128, 399125, + 399129]), + values=tensor([ 1., -1., 1., ..., 1., -1., 1.]), + size=(399130, 399130), nnz=1216334, layout=torch.sparse_csr) +tensor([0.8074, 0.3012, 0.9549, ..., 0.2881, 0.3396, 0.4512]) +Matrix Type: SuiteSparse +Matrix: language +Matrix Format: csr +Shape: torch.Size([399130, 399130]) +Rows: 399130 +Size: 159304756900 +NNZ: 1216334 +Density: 7.635264782228233e-06 +Time: 10.349842309951782 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 3, ..., 1216330, + 1216332, 1216334]), + col_indices=tensor([ 0, 0, 1, ..., 399128, 399125, + 399129]), + values=tensor([ 1., -1., 1., ..., 1., -1., 1.]), + size=(399130, 399130), nnz=1216334, layout=torch.sparse_csr) +tensor([0.8074, 0.3012, 0.9549, ..., 0.2881, 0.3396, 0.4512]) +Matrix Type: SuiteSparse +Matrix: language +Matrix Format: csr +Shape: torch.Size([399130, 399130]) +Rows: 399130 +Size: 159304756900 +NNZ: 1216334 +Density: 7.635264782228233e-06 +Time: 10.349842309951782 seconds + +[39.97, 38.36, 38.86, 38.59, 38.26, 38.09, 38.21, 38.92, 38.29, 38.47] +[65.83] +13.164604902267456 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 3433, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'language', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [399130, 399130], 'MATRIX_ROWS': 399130, 'MATRIX_SIZE': 159304756900, 'MATRIX_NNZ': 1216334, 'MATRIX_DENSITY': 7.635264782228233e-06, 'TIME_S': 10.349842309951782, 'TIME_S_1KI': 3.01480987764398, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 866.6259407162667, 'W': 65.83} +[39.97, 38.36, 38.86, 38.59, 38.26, 38.09, 38.21, 38.92, 38.29, 38.47, 39.36, 38.28, 38.75, 39.51, 38.42, 38.7, 38.49, 38.22, 39.26, 38.62] +695.4200000000001 +34.771 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 3433, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'language', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [399130, 399130], 'MATRIX_ROWS': 399130, 'MATRIX_SIZE': 159304756900, 'MATRIX_NNZ': 1216334, 'MATRIX_DENSITY': 7.635264782228233e-06, 'TIME_S': 10.349842309951782, 'TIME_S_1KI': 3.01480987764398, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 866.6259407162667, 'W': 65.83, 'J_1KI': 252.43983126020004, 'W_1KI': 19.175648121176813, 'W_D': 31.058999999999997, 'J_D': 408.8794636595249, 'W_D_1KI': 9.047189047480337, 'J_D_1KI': 2.635359466204584} diff --git a/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_marine1.json b/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_marine1.json new file mode 100644 index 0000000..7b79af7 --- /dev/null +++ b/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_marine1.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 1403, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "marine1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400320, 400320], "MATRIX_ROWS": 400320, "MATRIX_SIZE": 160256102400, "MATRIX_NNZ": 6226538, "MATRIX_DENSITY": 3.885367175883594e-05, "TIME_S": 10.47270679473877, "TIME_S_1KI": 7.4645094759364, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1076.08722530365, "W": 76.53000000000002, "J_1KI": 766.9901819698147, "W_1KI": 54.54739843193158, "W_D": 40.18550000000002, "J_D": 565.0477354297641, "W_D_1KI": 28.642551674982194, "J_D_1KI": 20.415218585161934} diff --git a/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_marine1.output b/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_marine1.output new file mode 100644 index 0000000..f00e48c --- /dev/null +++ b/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_marine1.output @@ -0,0 +1,74 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/marine1.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "marine1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400320, 400320], "MATRIX_ROWS": 400320, "MATRIX_SIZE": 160256102400, "MATRIX_NNZ": 6226538, "MATRIX_DENSITY": 3.885367175883594e-05, "TIME_S": 7.480671405792236} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 18, ..., 6226522, + 6226531, 6226538]), + col_indices=tensor([ 0, 1, 10383, ..., 400315, 400318, + 400319]), + values=tensor([ 6.2373e+03, -1.8964e+00, -5.7529e+00, ..., + -6.8099e-01, -6.4187e-01, 1.7595e+01]), + size=(400320, 400320), nnz=6226538, layout=torch.sparse_csr) +tensor([0.5771, 0.6006, 0.1014, ..., 0.3420, 0.9665, 0.9706]) +Matrix Type: SuiteSparse +Matrix: marine1 +Matrix Format: csr +Shape: torch.Size([400320, 400320]) +Rows: 400320 +Size: 160256102400 +NNZ: 6226538 +Density: 3.885367175883594e-05 +Time: 7.480671405792236 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1403', '-m', 'matrices/389000+_cols/marine1.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "marine1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400320, 400320], "MATRIX_ROWS": 400320, "MATRIX_SIZE": 160256102400, "MATRIX_NNZ": 6226538, "MATRIX_DENSITY": 3.885367175883594e-05, "TIME_S": 10.47270679473877} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 18, ..., 6226522, + 6226531, 6226538]), + col_indices=tensor([ 0, 1, 10383, ..., 400315, 400318, + 400319]), + values=tensor([ 6.2373e+03, -1.8964e+00, -5.7529e+00, ..., + -6.8099e-01, -6.4187e-01, 1.7595e+01]), + size=(400320, 400320), nnz=6226538, layout=torch.sparse_csr) +tensor([0.9737, 0.1599, 0.8628, ..., 0.5469, 0.5754, 0.2289]) +Matrix Type: SuiteSparse +Matrix: marine1 +Matrix Format: csr +Shape: torch.Size([400320, 400320]) +Rows: 400320 +Size: 160256102400 +NNZ: 6226538 +Density: 3.885367175883594e-05 +Time: 10.47270679473877 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 18, ..., 6226522, + 6226531, 6226538]), + col_indices=tensor([ 0, 1, 10383, ..., 400315, 400318, + 400319]), + values=tensor([ 6.2373e+03, -1.8964e+00, -5.7529e+00, ..., + -6.8099e-01, -6.4187e-01, 1.7595e+01]), + size=(400320, 400320), nnz=6226538, layout=torch.sparse_csr) +tensor([0.9737, 0.1599, 0.8628, ..., 0.5469, 0.5754, 0.2289]) +Matrix Type: SuiteSparse +Matrix: marine1 +Matrix Format: csr +Shape: torch.Size([400320, 400320]) +Rows: 400320 +Size: 160256102400 +NNZ: 6226538 +Density: 3.885367175883594e-05 +Time: 10.47270679473877 seconds + +[39.8, 38.66, 38.76, 38.93, 38.51, 38.5, 38.48, 53.83, 45.0, 38.92] +[76.53] +14.060985565185547 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1403, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'marine1', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [400320, 400320], 'MATRIX_ROWS': 400320, 'MATRIX_SIZE': 160256102400, 'MATRIX_NNZ': 6226538, 'MATRIX_DENSITY': 3.885367175883594e-05, 'TIME_S': 10.47270679473877, 'TIME_S_1KI': 7.4645094759364, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1076.08722530365, 'W': 76.53000000000002} +[39.8, 38.66, 38.76, 38.93, 38.51, 38.5, 38.48, 53.83, 45.0, 38.92, 39.54, 38.42, 38.88, 38.56, 45.29, 38.95, 39.28, 38.53, 39.92, 38.52] +726.89 +36.3445 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1403, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'marine1', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [400320, 400320], 'MATRIX_ROWS': 400320, 'MATRIX_SIZE': 160256102400, 'MATRIX_NNZ': 6226538, 'MATRIX_DENSITY': 3.885367175883594e-05, 'TIME_S': 10.47270679473877, 'TIME_S_1KI': 7.4645094759364, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1076.08722530365, 'W': 76.53000000000002, 'J_1KI': 766.9901819698147, 'W_1KI': 54.54739843193158, 'W_D': 40.18550000000002, 'J_D': 565.0477354297641, 'W_D_1KI': 28.642551674982194, 'J_D_1KI': 20.415218585161934} diff --git a/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_mario002.json b/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_mario002.json new file mode 100644 index 0000000..8c1f364 --- /dev/null +++ b/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_mario002.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 2778, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "mario002", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 10.518349885940552, "TIME_S_1KI": 3.786303054694223, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 942.0412521362306, "W": 70.4, "J_1KI": 339.1077221512709, "W_1KI": 25.341972642188626, "W_D": 35.57825000000001, "J_D": 476.0820906081797, "W_D_1KI": 12.807145428365734, "J_D_1KI": 4.61020353792863} diff --git a/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_mario002.output b/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_mario002.output new file mode 100644 index 0000000..c39c36c --- /dev/null +++ b/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_mario002.output @@ -0,0 +1,71 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/mario002.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "mario002", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 3.778977870941162} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.3385, 0.4156, 0.4762, ..., 0.6246, 0.7256, 0.2909]) +Matrix Type: SuiteSparse +Matrix: mario002 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 3.778977870941162 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '2778', '-m', 'matrices/389000+_cols/mario002.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "mario002", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 10.518349885940552} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.1706, 0.4199, 0.8169, ..., 0.9237, 0.2859, 0.4340]) +Matrix Type: SuiteSparse +Matrix: mario002 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 10.518349885940552 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.1706, 0.4199, 0.8169, ..., 0.9237, 0.2859, 0.4340]) +Matrix Type: SuiteSparse +Matrix: mario002 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 10.518349885940552 seconds + +[39.45, 38.78, 38.96, 38.79, 38.45, 38.37, 38.35, 39.22, 38.56, 38.56] +[70.4] +13.381267786026001 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 2778, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'mario002', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [389874, 389874], 'MATRIX_ROWS': 389874, 'MATRIX_SIZE': 152001735876, 'MATRIX_NNZ': 2101242, 'MATRIX_DENSITY': 1.3823802655215408e-05, 'TIME_S': 10.518349885940552, 'TIME_S_1KI': 3.786303054694223, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 942.0412521362306, 'W': 70.4} +[39.45, 38.78, 38.96, 38.79, 38.45, 38.37, 38.35, 39.22, 38.56, 38.56, 40.19, 38.52, 38.44, 38.38, 38.77, 38.32, 38.58, 39.02, 38.41, 38.83] +696.435 +34.821749999999994 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 2778, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'mario002', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [389874, 389874], 'MATRIX_ROWS': 389874, 'MATRIX_SIZE': 152001735876, 'MATRIX_NNZ': 2101242, 'MATRIX_DENSITY': 1.3823802655215408e-05, 'TIME_S': 10.518349885940552, 'TIME_S_1KI': 3.786303054694223, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 942.0412521362306, 'W': 70.4, 'J_1KI': 339.1077221512709, 'W_1KI': 25.341972642188626, 'W_D': 35.57825000000001, 'J_D': 476.0820906081797, 'W_D_1KI': 12.807145428365734, 'J_D_1KI': 4.61020353792863} diff --git a/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_test1.json b/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_test1.json new file mode 100644 index 0000000..48065b0 --- /dev/null +++ b/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_test1.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "test1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392908, 392908], "MATRIX_ROWS": 392908, "MATRIX_SIZE": 154376696464, "MATRIX_NNZ": 12968200, "MATRIX_DENSITY": 8.400361127706946e-05, "TIME_S": 20.74174404144287, "TIME_S_1KI": 20.74174404144287, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1964.3499762821195, "W": 78.38, "J_1KI": 1964.3499762821195, "W_1KI": 78.38, "W_D": 43.477, "J_D": 1089.6152579588888, "W_D_1KI": 43.477, "J_D_1KI": 43.477} diff --git a/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_test1.output b/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_test1.output new file mode 100644 index 0000000..7a3f429 --- /dev/null +++ b/pytorch/output_389000+_1core/epyc_7313p_1_csr_10_10_10_test1.output @@ -0,0 +1,51 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/test1.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "test1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392908, 392908], "MATRIX_ROWS": 392908, "MATRIX_SIZE": 154376696464, "MATRIX_NNZ": 12968200, "MATRIX_DENSITY": 8.400361127706946e-05, "TIME_S": 20.74174404144287} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 24, 48, ..., 12968181, + 12968191, 12968200]), + col_indices=tensor([ 0, 1, 8, ..., 392905, 392906, + 392907]), + values=tensor([1.0000e+00, 0.0000e+00, 0.0000e+00, ..., + 0.0000e+00, 0.0000e+00, 2.1156e-17]), + size=(392908, 392908), nnz=12968200, layout=torch.sparse_csr) +tensor([0.3044, 0.7914, 0.5459, ..., 0.2990, 0.3126, 0.6970]) +Matrix Type: SuiteSparse +Matrix: test1 +Matrix Format: csr +Shape: torch.Size([392908, 392908]) +Rows: 392908 +Size: 154376696464 +NNZ: 12968200 +Density: 8.400361127706946e-05 +Time: 20.74174404144287 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 24, 48, ..., 12968181, + 12968191, 12968200]), + col_indices=tensor([ 0, 1, 8, ..., 392905, 392906, + 392907]), + values=tensor([1.0000e+00, 0.0000e+00, 0.0000e+00, ..., + 0.0000e+00, 0.0000e+00, 2.1156e-17]), + size=(392908, 392908), nnz=12968200, layout=torch.sparse_csr) +tensor([0.3044, 0.7914, 0.5459, ..., 0.2990, 0.3126, 0.6970]) +Matrix Type: SuiteSparse +Matrix: test1 +Matrix Format: csr +Shape: torch.Size([392908, 392908]) +Rows: 392908 +Size: 154376696464 +NNZ: 12968200 +Density: 8.400361127706946e-05 +Time: 20.74174404144287 seconds + +[39.18, 39.6, 38.95, 39.49, 38.43, 39.82, 38.88, 38.36, 38.96, 38.49] +[78.38] +25.061877727508545 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'test1', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [392908, 392908], 'MATRIX_ROWS': 392908, 'MATRIX_SIZE': 154376696464, 'MATRIX_NNZ': 12968200, 'MATRIX_DENSITY': 8.400361127706946e-05, 'TIME_S': 20.74174404144287, 'TIME_S_1KI': 20.74174404144287, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1964.3499762821195, 'W': 78.38} +[39.18, 39.6, 38.95, 39.49, 38.43, 39.82, 38.88, 38.36, 38.96, 38.49, 39.91, 38.51, 38.34, 38.55, 38.51, 38.42, 38.42, 38.4, 38.39, 38.48] +698.06 +34.903 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'test1', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [392908, 392908], 'MATRIX_ROWS': 392908, 'MATRIX_SIZE': 154376696464, 'MATRIX_NNZ': 12968200, 'MATRIX_DENSITY': 8.400361127706946e-05, 'TIME_S': 20.74174404144287, 'TIME_S_1KI': 20.74174404144287, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1964.3499762821195, 'W': 78.38, 'J_1KI': 1964.3499762821195, 'W_1KI': 78.38, 'W_D': 43.477, 'J_D': 1089.6152579588888, 'W_D_1KI': 43.477, 'J_D_1KI': 43.477} diff --git a/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_amazon0312.json b/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_amazon0312.json new file mode 100644 index 0000000..d26e6d4 --- /dev/null +++ b/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_amazon0312.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "amazon0312", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400727, 400727], "MATRIX_ROWS": 400727, "MATRIX_SIZE": 160582128529, "MATRIX_NNZ": 3200440, "MATRIX_DENSITY": 1.9930237750099465e-05, "TIME_S": 12.424208641052246, "TIME_S_1KI": 12.424208641052246, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 879.7217792987824, "W": 52.38, "J_1KI": 879.7217792987824, "W_1KI": 52.38, "W_D": 36.156000000000006, "J_D": 607.2397986316682, "W_D_1KI": 36.156000000000006, "J_D_1KI": 36.156000000000006} diff --git a/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_amazon0312.output b/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_amazon0312.output new file mode 100644 index 0000000..3f99e16 --- /dev/null +++ b/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_amazon0312.output @@ -0,0 +1,49 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/amazon0312.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "amazon0312", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400727, 400727], "MATRIX_ROWS": 400727, "MATRIX_SIZE": 160582128529, "MATRIX_NNZ": 3200440, "MATRIX_DENSITY": 1.9930237750099465e-05, "TIME_S": 12.424208641052246} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 10, ..., 3200428, + 3200438, 3200440]), + col_indices=tensor([ 1, 2, 3, ..., 400724, 6009, + 400707]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(400727, 400727), + nnz=3200440, layout=torch.sparse_csr) +tensor([0.6069, 0.8095, 0.9925, ..., 0.5957, 0.3239, 0.4137]) +Matrix Type: SuiteSparse +Matrix: amazon0312 +Matrix Format: csr +Shape: torch.Size([400727, 400727]) +Rows: 400727 +Size: 160582128529 +NNZ: 3200440 +Density: 1.9930237750099465e-05 +Time: 12.424208641052246 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 10, ..., 3200428, + 3200438, 3200440]), + col_indices=tensor([ 1, 2, 3, ..., 400724, 6009, + 400707]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(400727, 400727), + nnz=3200440, layout=torch.sparse_csr) +tensor([0.6069, 0.8095, 0.9925, ..., 0.5957, 0.3239, 0.4137]) +Matrix Type: SuiteSparse +Matrix: amazon0312 +Matrix Format: csr +Shape: torch.Size([400727, 400727]) +Rows: 400727 +Size: 160582128529 +NNZ: 3200440 +Density: 1.9930237750099465e-05 +Time: 12.424208641052246 seconds + +[18.89, 17.73, 17.96, 17.88, 17.85, 17.69, 17.97, 17.83, 17.93, 18.01] +[52.38] +16.79499387741089 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'amazon0312', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [400727, 400727], 'MATRIX_ROWS': 400727, 'MATRIX_SIZE': 160582128529, 'MATRIX_NNZ': 3200440, 'MATRIX_DENSITY': 1.9930237750099465e-05, 'TIME_S': 12.424208641052246, 'TIME_S_1KI': 12.424208641052246, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 879.7217792987824, 'W': 52.38} +[18.89, 17.73, 17.96, 17.88, 17.85, 17.69, 17.97, 17.83, 17.93, 18.01, 18.12, 18.25, 18.06, 18.22, 18.08, 17.89, 17.8, 17.75, 19.11, 17.94] +324.48 +16.224 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'amazon0312', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [400727, 400727], 'MATRIX_ROWS': 400727, 'MATRIX_SIZE': 160582128529, 'MATRIX_NNZ': 3200440, 'MATRIX_DENSITY': 1.9930237750099465e-05, 'TIME_S': 12.424208641052246, 'TIME_S_1KI': 12.424208641052246, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 879.7217792987824, 'W': 52.38, 'J_1KI': 879.7217792987824, 'W_1KI': 52.38, 'W_D': 36.156000000000006, 'J_D': 607.2397986316682, 'W_D_1KI': 36.156000000000006, 'J_D_1KI': 36.156000000000006} diff --git a/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_darcy003.json b/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_darcy003.json new file mode 100644 index 0000000..06cf5ab --- /dev/null +++ b/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_darcy003.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 1604, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "darcy003", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 10.463838577270508, "TIME_S_1KI": 6.5235901354554295, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 792.353337585926, "W": 53.79, "J_1KI": 493.98587131292146, "W_1KI": 33.53491271820449, "W_D": 15.963500000000003, "J_D": 235.15026035606866, "W_D_1KI": 9.952306733167084, "J_D_1KI": 6.204680008208905} diff --git a/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_darcy003.output b/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_darcy003.output new file mode 100644 index 0000000..66e352a --- /dev/null +++ b/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_darcy003.output @@ -0,0 +1,71 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/darcy003.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "darcy003", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 6.544304132461548} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.9984, 0.0550, 0.4152, ..., 0.8933, 0.3177, 0.3432]) +Matrix Type: SuiteSparse +Matrix: darcy003 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 6.544304132461548 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1604', '-m', 'matrices/389000+_cols/darcy003.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "darcy003", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 10.463838577270508} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.3553, 0.0914, 0.5617, ..., 0.2172, 0.2068, 0.5865]) +Matrix Type: SuiteSparse +Matrix: darcy003 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 10.463838577270508 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.3553, 0.0914, 0.5617, ..., 0.2172, 0.2068, 0.5865]) +Matrix Type: SuiteSparse +Matrix: darcy003 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 10.463838577270508 seconds + +[51.54, 47.98, 24.21, 22.55, 39.25, 40.93, 45.39, 44.37, 40.99, 42.3] +[53.79] +14.73049521446228 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1604, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'darcy003', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [389874, 389874], 'MATRIX_ROWS': 389874, 'MATRIX_SIZE': 152001735876, 'MATRIX_NNZ': 2101242, 'MATRIX_DENSITY': 1.3823802655215408e-05, 'TIME_S': 10.463838577270508, 'TIME_S_1KI': 6.5235901354554295, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 792.353337585926, 'W': 53.79} +[51.54, 47.98, 24.21, 22.55, 39.25, 40.93, 45.39, 44.37, 40.99, 42.3, 51.25, 53.2, 44.13, 44.41, 42.29, 42.29, 43.1, 44.05, 43.73, 42.23] +756.53 +37.826499999999996 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1604, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'darcy003', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [389874, 389874], 'MATRIX_ROWS': 389874, 'MATRIX_SIZE': 152001735876, 'MATRIX_NNZ': 2101242, 'MATRIX_DENSITY': 1.3823802655215408e-05, 'TIME_S': 10.463838577270508, 'TIME_S_1KI': 6.5235901354554295, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 792.353337585926, 'W': 53.79, 'J_1KI': 493.98587131292146, 'W_1KI': 33.53491271820449, 'W_D': 15.963500000000003, 'J_D': 235.15026035606866, 'W_D_1KI': 9.952306733167084, 'J_D_1KI': 6.204680008208905} diff --git a/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_helm2d03.json b/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_helm2d03.json new file mode 100644 index 0000000..2466ba0 --- /dev/null +++ b/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_helm2d03.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 1567, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "helm2d03", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392257, 392257], "MATRIX_ROWS": 392257, "MATRIX_SIZE": 153865554049, "MATRIX_NNZ": 2741935, "MATRIX_DENSITY": 1.7820330332848923e-05, "TIME_S": 10.482280731201172, "TIME_S_1KI": 6.6893942126363575, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 809.832340335846, "W": 53.88, "J_1KI": 516.8043014268321, "W_1KI": 34.38417358008934, "W_D": 37.52475, "J_D": 564.0080941540002, "W_D_1KI": 23.94687300574346, "J_D_1KI": 15.2819866022613} diff --git a/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_helm2d03.output b/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_helm2d03.output new file mode 100644 index 0000000..a8b8852 --- /dev/null +++ b/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_helm2d03.output @@ -0,0 +1,74 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/helm2d03.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "helm2d03", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392257, 392257], "MATRIX_ROWS": 392257, "MATRIX_SIZE": 153865554049, "MATRIX_NNZ": 2741935, "MATRIX_DENSITY": 1.7820330332848923e-05, "TIME_S": 6.698031663894653} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 14, ..., 2741921, + 2741928, 2741935]), + col_indices=tensor([ 0, 98273, 133833, ..., 392252, 392254, + 392256]), + values=tensor([ 3.4808, -0.6217, -0.5806, ..., -0.6940, -0.7602, + 3.5476]), size=(392257, 392257), nnz=2741935, + layout=torch.sparse_csr) +tensor([0.6880, 0.8256, 0.6674, ..., 0.8572, 0.2017, 0.9423]) +Matrix Type: SuiteSparse +Matrix: helm2d03 +Matrix Format: csr +Shape: torch.Size([392257, 392257]) +Rows: 392257 +Size: 153865554049 +NNZ: 2741935 +Density: 1.7820330332848923e-05 +Time: 6.698031663894653 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1567', '-m', 'matrices/389000+_cols/helm2d03.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "helm2d03", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392257, 392257], "MATRIX_ROWS": 392257, "MATRIX_SIZE": 153865554049, "MATRIX_NNZ": 2741935, "MATRIX_DENSITY": 1.7820330332848923e-05, "TIME_S": 10.482280731201172} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 14, ..., 2741921, + 2741928, 2741935]), + col_indices=tensor([ 0, 98273, 133833, ..., 392252, 392254, + 392256]), + values=tensor([ 3.4808, -0.6217, -0.5806, ..., -0.6940, -0.7602, + 3.5476]), size=(392257, 392257), nnz=2741935, + layout=torch.sparse_csr) +tensor([0.9695, 0.5429, 0.1111, ..., 0.2474, 0.2323, 0.6789]) +Matrix Type: SuiteSparse +Matrix: helm2d03 +Matrix Format: csr +Shape: torch.Size([392257, 392257]) +Rows: 392257 +Size: 153865554049 +NNZ: 2741935 +Density: 1.7820330332848923e-05 +Time: 10.482280731201172 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 14, ..., 2741921, + 2741928, 2741935]), + col_indices=tensor([ 0, 98273, 133833, ..., 392252, 392254, + 392256]), + values=tensor([ 3.4808, -0.6217, -0.5806, ..., -0.6940, -0.7602, + 3.5476]), size=(392257, 392257), nnz=2741935, + layout=torch.sparse_csr) +tensor([0.9695, 0.5429, 0.1111, ..., 0.2474, 0.2323, 0.6789]) +Matrix Type: SuiteSparse +Matrix: helm2d03 +Matrix Format: csr +Shape: torch.Size([392257, 392257]) +Rows: 392257 +Size: 153865554049 +NNZ: 2741935 +Density: 1.7820330332848923e-05 +Time: 10.482280731201172 seconds + +[18.37, 18.1, 22.31, 17.76, 18.12, 17.9, 17.88, 17.63, 17.79, 17.9] +[53.88] +15.030295848846436 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1567, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'helm2d03', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [392257, 392257], 'MATRIX_ROWS': 392257, 'MATRIX_SIZE': 153865554049, 'MATRIX_NNZ': 2741935, 'MATRIX_DENSITY': 1.7820330332848923e-05, 'TIME_S': 10.482280731201172, 'TIME_S_1KI': 6.6893942126363575, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 809.832340335846, 'W': 53.88} +[18.37, 18.1, 22.31, 17.76, 18.12, 17.9, 17.88, 17.63, 17.79, 17.9, 18.29, 18.08, 17.89, 18.0, 17.96, 17.67, 18.29, 17.73, 17.87, 17.69] +327.105 +16.35525 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1567, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'helm2d03', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [392257, 392257], 'MATRIX_ROWS': 392257, 'MATRIX_SIZE': 153865554049, 'MATRIX_NNZ': 2741935, 'MATRIX_DENSITY': 1.7820330332848923e-05, 'TIME_S': 10.482280731201172, 'TIME_S_1KI': 6.6893942126363575, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 809.832340335846, 'W': 53.88, 'J_1KI': 516.8043014268321, 'W_1KI': 34.38417358008934, 'W_D': 37.52475, 'J_D': 564.0080941540002, 'W_D_1KI': 23.94687300574346, 'J_D_1KI': 15.2819866022613} diff --git a/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_language.json b/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_language.json new file mode 100644 index 0000000..9d30cd5 --- /dev/null +++ b/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_language.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 1711, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "language", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [399130, 399130], "MATRIX_ROWS": 399130, "MATRIX_SIZE": 159304756900, "MATRIX_NNZ": 1216334, "MATRIX_DENSITY": 7.635264782228233e-06, "TIME_S": 10.39021348953247, "TIME_S_1KI": 6.072597013169182, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 758.5677815818786, "W": 52.04, "J_1KI": 443.34762219864325, "W_1KI": 30.414962010520163, "W_D": 35.8595, "J_D": 522.710633428812, "W_D_1KI": 20.95821157218001, "J_D_1KI": 12.249100860420812} diff --git a/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_language.output b/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_language.output new file mode 100644 index 0000000..7a78a51 --- /dev/null +++ b/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_language.output @@ -0,0 +1,71 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/language.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "language", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [399130, 399130], "MATRIX_ROWS": 399130, "MATRIX_SIZE": 159304756900, "MATRIX_NNZ": 1216334, "MATRIX_DENSITY": 7.635264782228233e-06, "TIME_S": 6.135177135467529} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 3, ..., 1216330, + 1216332, 1216334]), + col_indices=tensor([ 0, 0, 1, ..., 399128, 399125, + 399129]), + values=tensor([ 1., -1., 1., ..., 1., -1., 1.]), + size=(399130, 399130), nnz=1216334, layout=torch.sparse_csr) +tensor([0.0547, 0.0947, 0.9321, ..., 0.9094, 0.0107, 0.8738]) +Matrix Type: SuiteSparse +Matrix: language +Matrix Format: csr +Shape: torch.Size([399130, 399130]) +Rows: 399130 +Size: 159304756900 +NNZ: 1216334 +Density: 7.635264782228233e-06 +Time: 6.135177135467529 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1711', '-m', 'matrices/389000+_cols/language.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "language", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [399130, 399130], "MATRIX_ROWS": 399130, "MATRIX_SIZE": 159304756900, "MATRIX_NNZ": 1216334, "MATRIX_DENSITY": 7.635264782228233e-06, "TIME_S": 10.39021348953247} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 3, ..., 1216330, + 1216332, 1216334]), + col_indices=tensor([ 0, 0, 1, ..., 399128, 399125, + 399129]), + values=tensor([ 1., -1., 1., ..., 1., -1., 1.]), + size=(399130, 399130), nnz=1216334, layout=torch.sparse_csr) +tensor([0.6001, 0.7097, 0.4908, ..., 0.7271, 0.7976, 0.2970]) +Matrix Type: SuiteSparse +Matrix: language +Matrix Format: csr +Shape: torch.Size([399130, 399130]) +Rows: 399130 +Size: 159304756900 +NNZ: 1216334 +Density: 7.635264782228233e-06 +Time: 10.39021348953247 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 3, ..., 1216330, + 1216332, 1216334]), + col_indices=tensor([ 0, 0, 1, ..., 399128, 399125, + 399129]), + values=tensor([ 1., -1., 1., ..., 1., -1., 1.]), + size=(399130, 399130), nnz=1216334, layout=torch.sparse_csr) +tensor([0.6001, 0.7097, 0.4908, ..., 0.7271, 0.7976, 0.2970]) +Matrix Type: SuiteSparse +Matrix: language +Matrix Format: csr +Shape: torch.Size([399130, 399130]) +Rows: 399130 +Size: 159304756900 +NNZ: 1216334 +Density: 7.635264782228233e-06 +Time: 10.39021348953247 seconds + +[18.03, 17.94, 17.82, 17.99, 17.74, 17.56, 17.76, 17.68, 17.79, 18.07] +[52.04] +14.576629161834717 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1711, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'language', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [399130, 399130], 'MATRIX_ROWS': 399130, 'MATRIX_SIZE': 159304756900, 'MATRIX_NNZ': 1216334, 'MATRIX_DENSITY': 7.635264782228233e-06, 'TIME_S': 10.39021348953247, 'TIME_S_1KI': 6.072597013169182, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 758.5677815818786, 'W': 52.04} +[18.03, 17.94, 17.82, 17.99, 17.74, 17.56, 17.76, 17.68, 17.79, 18.07, 18.72, 17.93, 17.72, 18.0, 18.39, 17.97, 18.72, 17.86, 18.44, 17.78] +323.60999999999996 +16.1805 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1711, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'language', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [399130, 399130], 'MATRIX_ROWS': 399130, 'MATRIX_SIZE': 159304756900, 'MATRIX_NNZ': 1216334, 'MATRIX_DENSITY': 7.635264782228233e-06, 'TIME_S': 10.39021348953247, 'TIME_S_1KI': 6.072597013169182, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 758.5677815818786, 'W': 52.04, 'J_1KI': 443.34762219864325, 'W_1KI': 30.414962010520163, 'W_D': 35.8595, 'J_D': 522.710633428812, 'W_D_1KI': 20.95821157218001, 'J_D_1KI': 12.249100860420812} diff --git a/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_marine1.json b/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_marine1.json new file mode 100644 index 0000000..bc13ead --- /dev/null +++ b/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_marine1.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "marine1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400320, 400320], "MATRIX_ROWS": 400320, "MATRIX_SIZE": 160256102400, "MATRIX_NNZ": 6226538, "MATRIX_DENSITY": 3.885367175883594e-05, "TIME_S": 14.3206205368042, "TIME_S_1KI": 14.3206205368042, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1077.2933460760116, "W": 53.62, "J_1KI": 1077.2933460760116, "W_1KI": 53.62, "W_D": 37.37875, "J_D": 750.9861741819977, "W_D_1KI": 37.37875, "J_D_1KI": 37.37875} diff --git a/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_marine1.output b/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_marine1.output new file mode 100644 index 0000000..ba23280 --- /dev/null +++ b/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_marine1.output @@ -0,0 +1,51 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/marine1.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "marine1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400320, 400320], "MATRIX_ROWS": 400320, "MATRIX_SIZE": 160256102400, "MATRIX_NNZ": 6226538, "MATRIX_DENSITY": 3.885367175883594e-05, "TIME_S": 14.3206205368042} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 18, ..., 6226522, + 6226531, 6226538]), + col_indices=tensor([ 0, 1, 10383, ..., 400315, 400318, + 400319]), + values=tensor([ 6.2373e+03, -1.8964e+00, -5.7529e+00, ..., + -6.8099e-01, -6.4187e-01, 1.7595e+01]), + size=(400320, 400320), nnz=6226538, layout=torch.sparse_csr) +tensor([0.4405, 0.4136, 0.9296, ..., 0.1477, 0.1453, 0.8762]) +Matrix Type: SuiteSparse +Matrix: marine1 +Matrix Format: csr +Shape: torch.Size([400320, 400320]) +Rows: 400320 +Size: 160256102400 +NNZ: 6226538 +Density: 3.885367175883594e-05 +Time: 14.3206205368042 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 18, ..., 6226522, + 6226531, 6226538]), + col_indices=tensor([ 0, 1, 10383, ..., 400315, 400318, + 400319]), + values=tensor([ 6.2373e+03, -1.8964e+00, -5.7529e+00, ..., + -6.8099e-01, -6.4187e-01, 1.7595e+01]), + size=(400320, 400320), nnz=6226538, layout=torch.sparse_csr) +tensor([0.4405, 0.4136, 0.9296, ..., 0.1477, 0.1453, 0.8762]) +Matrix Type: SuiteSparse +Matrix: marine1 +Matrix Format: csr +Shape: torch.Size([400320, 400320]) +Rows: 400320 +Size: 160256102400 +NNZ: 6226538 +Density: 3.885367175883594e-05 +Time: 14.3206205368042 seconds + +[18.25, 17.59, 17.7, 17.8, 18.1, 17.82, 18.62, 21.23, 18.03, 17.76] +[53.62] +20.091259717941284 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'marine1', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [400320, 400320], 'MATRIX_ROWS': 400320, 'MATRIX_SIZE': 160256102400, 'MATRIX_NNZ': 6226538, 'MATRIX_DENSITY': 3.885367175883594e-05, 'TIME_S': 14.3206205368042, 'TIME_S_1KI': 14.3206205368042, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1077.2933460760116, 'W': 53.62} +[18.25, 17.59, 17.7, 17.8, 18.1, 17.82, 18.62, 21.23, 18.03, 17.76, 18.27, 17.65, 17.53, 17.72, 17.84, 17.65, 17.87, 17.77, 17.96, 17.61] +324.825 +16.24125 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'marine1', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [400320, 400320], 'MATRIX_ROWS': 400320, 'MATRIX_SIZE': 160256102400, 'MATRIX_NNZ': 6226538, 'MATRIX_DENSITY': 3.885367175883594e-05, 'TIME_S': 14.3206205368042, 'TIME_S_1KI': 14.3206205368042, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1077.2933460760116, 'W': 53.62, 'J_1KI': 1077.2933460760116, 'W_1KI': 53.62, 'W_D': 37.37875, 'J_D': 750.9861741819977, 'W_D_1KI': 37.37875, 'J_D_1KI': 37.37875} diff --git a/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_mario002.json b/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_mario002.json new file mode 100644 index 0000000..f7e4602 --- /dev/null +++ b/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_mario002.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 1598, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "mario002", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 10.48720407485962, "TIME_S_1KI": 6.562705929198761, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 768.8178126811981, "W": 53.0, "J_1KI": 481.1125235802241, "W_1KI": 33.16645807259074, "W_D": 36.777, "J_D": 533.4870320184231, "W_D_1KI": 23.01439299123905, "J_D_1KI": 14.40199811717087} diff --git a/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_mario002.output b/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_mario002.output new file mode 100644 index 0000000..323fb21 --- /dev/null +++ b/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_mario002.output @@ -0,0 +1,71 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/mario002.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "mario002", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 6.568377256393433} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.1410, 0.8504, 0.4141, ..., 0.6370, 0.5152, 0.1646]) +Matrix Type: SuiteSparse +Matrix: mario002 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 6.568377256393433 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1598', '-m', 'matrices/389000+_cols/mario002.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "mario002", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 10.48720407485962} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.9888, 0.3844, 0.2800, ..., 0.8268, 0.5179, 0.1169]) +Matrix Type: SuiteSparse +Matrix: mario002 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 10.48720407485962 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.9888, 0.3844, 0.2800, ..., 0.8268, 0.5179, 0.1169]) +Matrix Type: SuiteSparse +Matrix: mario002 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 10.48720407485962 seconds + +[18.78, 17.92, 18.04, 18.03, 17.82, 17.72, 18.29, 17.74, 17.92, 17.72] +[53.0] +14.505996465682983 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1598, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'mario002', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [389874, 389874], 'MATRIX_ROWS': 389874, 'MATRIX_SIZE': 152001735876, 'MATRIX_NNZ': 2101242, 'MATRIX_DENSITY': 1.3823802655215408e-05, 'TIME_S': 10.48720407485962, 'TIME_S_1KI': 6.562705929198761, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 768.8178126811981, 'W': 53.0} +[18.78, 17.92, 18.04, 18.03, 17.82, 17.72, 18.29, 17.74, 17.92, 17.72, 18.42, 18.18, 17.97, 17.99, 17.82, 17.72, 17.94, 18.12, 18.4, 18.76] +324.4599999999999 +16.222999999999995 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1598, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'mario002', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [389874, 389874], 'MATRIX_ROWS': 389874, 'MATRIX_SIZE': 152001735876, 'MATRIX_NNZ': 2101242, 'MATRIX_DENSITY': 1.3823802655215408e-05, 'TIME_S': 10.48720407485962, 'TIME_S_1KI': 6.562705929198761, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 768.8178126811981, 'W': 53.0, 'J_1KI': 481.1125235802241, 'W_1KI': 33.16645807259074, 'W_D': 36.777, 'J_D': 533.4870320184231, 'W_D_1KI': 23.01439299123905, 'J_D_1KI': 14.40199811717087} diff --git a/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_test1.json b/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_test1.json new file mode 100644 index 0000000..54da32a --- /dev/null +++ b/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_test1.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "test1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392908, 392908], "MATRIX_ROWS": 392908, "MATRIX_SIZE": 154376696464, "MATRIX_NNZ": 12968200, "MATRIX_DENSITY": 8.400361127706946e-05, "TIME_S": 37.58896088600159, "TIME_S_1KI": 37.58896088600159, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2465.35528427124, "W": 53.12, "J_1KI": 2465.35528427124, "W_1KI": 53.12, "W_D": 36.98599999999999, "J_D": 1716.5593099408145, "W_D_1KI": 36.98599999999999, "J_D_1KI": 36.98599999999999} diff --git a/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_test1.output b/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_test1.output new file mode 100644 index 0000000..7c7841e --- /dev/null +++ b/pytorch/output_389000+_1core/xeon_4216_1_csr_10_10_10_test1.output @@ -0,0 +1,51 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/test1.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "test1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392908, 392908], "MATRIX_ROWS": 392908, "MATRIX_SIZE": 154376696464, "MATRIX_NNZ": 12968200, "MATRIX_DENSITY": 8.400361127706946e-05, "TIME_S": 37.58896088600159} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 24, 48, ..., 12968181, + 12968191, 12968200]), + col_indices=tensor([ 0, 1, 8, ..., 392905, 392906, + 392907]), + values=tensor([1.0000e+00, 0.0000e+00, 0.0000e+00, ..., + 0.0000e+00, 0.0000e+00, 2.1156e-17]), + size=(392908, 392908), nnz=12968200, layout=torch.sparse_csr) +tensor([0.9581, 0.4048, 0.0262, ..., 0.9819, 0.7450, 0.5527]) +Matrix Type: SuiteSparse +Matrix: test1 +Matrix Format: csr +Shape: torch.Size([392908, 392908]) +Rows: 392908 +Size: 154376696464 +NNZ: 12968200 +Density: 8.400361127706946e-05 +Time: 37.58896088600159 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 24, 48, ..., 12968181, + 12968191, 12968200]), + col_indices=tensor([ 0, 1, 8, ..., 392905, 392906, + 392907]), + values=tensor([1.0000e+00, 0.0000e+00, 0.0000e+00, ..., + 0.0000e+00, 0.0000e+00, 2.1156e-17]), + size=(392908, 392908), nnz=12968200, layout=torch.sparse_csr) +tensor([0.9581, 0.4048, 0.0262, ..., 0.9819, 0.7450, 0.5527]) +Matrix Type: SuiteSparse +Matrix: test1 +Matrix Format: csr +Shape: torch.Size([392908, 392908]) +Rows: 392908 +Size: 154376696464 +NNZ: 12968200 +Density: 8.400361127706946e-05 +Time: 37.58896088600159 seconds + +[18.09, 17.76, 17.72, 17.96, 17.74, 17.95, 17.93, 17.84, 17.96, 18.58] +[53.12] +46.41105580329895 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'test1', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [392908, 392908], 'MATRIX_ROWS': 392908, 'MATRIX_SIZE': 154376696464, 'MATRIX_NNZ': 12968200, 'MATRIX_DENSITY': 8.400361127706946e-05, 'TIME_S': 37.58896088600159, 'TIME_S_1KI': 37.58896088600159, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2465.35528427124, 'W': 53.12} +[18.09, 17.76, 17.72, 17.96, 17.74, 17.95, 17.93, 17.84, 17.96, 18.58, 18.32, 17.63, 17.7, 18.87, 17.96, 17.88, 17.96, 17.6, 17.83, 17.79] +322.68000000000006 +16.134000000000004 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'test1', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [392908, 392908], 'MATRIX_ROWS': 392908, 'MATRIX_SIZE': 154376696464, 'MATRIX_NNZ': 12968200, 'MATRIX_DENSITY': 8.400361127706946e-05, 'TIME_S': 37.58896088600159, 'TIME_S_1KI': 37.58896088600159, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2465.35528427124, 'W': 53.12, 'J_1KI': 2465.35528427124, 'W_1KI': 53.12, 'W_D': 36.98599999999999, 'J_D': 1716.5593099408145, 'W_D_1KI': 36.98599999999999, 'J_D_1KI': 36.98599999999999} diff --git a/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_amazon0312.json b/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_amazon0312.json new file mode 100644 index 0000000..5fe67db --- /dev/null +++ b/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_amazon0312.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 1000, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "amazon0312", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400727, 400727], "MATRIX_ROWS": 400727, "MATRIX_SIZE": 160582128529, "MATRIX_NNZ": 3200440, "MATRIX_DENSITY": 1.9930237750099465e-05, "TIME_S": 30.323144912719727, "TIME_S_1KI": 30.323144912719727, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2708.072883968354, "W": 77.40203729318671, "J_1KI": 2708.072883968354, "W_1KI": 77.40203729318671, "W_D": 53.61603729318671, "J_D": 1875.8697033972746, "W_D_1KI": 53.61603729318671, "J_D_1KI": 53.61603729318671} diff --git a/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_amazon0312.output b/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_amazon0312.output new file mode 100644 index 0000000..c77cfff --- /dev/null +++ b/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_amazon0312.output @@ -0,0 +1,49 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/389000+_cols/amazon0312.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "amazon0312", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400727, 400727], "MATRIX_ROWS": 400727, "MATRIX_SIZE": 160582128529, "MATRIX_NNZ": 3200440, "MATRIX_DENSITY": 1.9930237750099465e-05, "TIME_S": 30.323144912719727} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 10, ..., 3200428, + 3200438, 3200440]), + col_indices=tensor([ 1, 2, 3, ..., 400724, 6009, + 400707]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(400727, 400727), + nnz=3200440, layout=torch.sparse_csr) +tensor([0.9519, 0.7886, 0.4122, ..., 0.0191, 0.4041, 0.8787]) +Matrix Type: SuiteSparse +Matrix: amazon0312 +Matrix Format: csr +Shape: torch.Size([400727, 400727]) +Rows: 400727 +Size: 160582128529 +NNZ: 3200440 +Density: 1.9930237750099465e-05 +Time: 30.323144912719727 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 10, ..., 3200428, + 3200438, 3200440]), + col_indices=tensor([ 1, 2, 3, ..., 400724, 6009, + 400707]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(400727, 400727), + nnz=3200440, layout=torch.sparse_csr) +tensor([0.9519, 0.7886, 0.4122, ..., 0.0191, 0.4041, 0.8787]) +Matrix Type: SuiteSparse +Matrix: amazon0312 +Matrix Format: csr +Shape: torch.Size([400727, 400727]) +Rows: 400727 +Size: 160582128529 +NNZ: 3200440 +Density: 1.9930237750099465e-05 +Time: 30.323144912719727 seconds + +[26.56, 26.36, 26.4, 26.4, 26.28, 26.16, 26.4, 26.4, 26.6, 26.56] +[26.56, 26.64, 26.44, 29.44, 30.6, 36.2, 50.64, 63.2, 76.56, 90.36, 96.08, 94.84, 94.28, 93.0, 94.08, 94.08, 94.76, 95.56, 97.52, 96.08, 95.76, 91.48, 89.72, 90.12, 89.92, 89.6, 90.64, 91.72, 90.48, 91.12, 89.6, 89.6, 90.12, 90.48] +34.98710083961487 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'amazon0312', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [400727, 400727], 'MATRIX_ROWS': 400727, 'MATRIX_SIZE': 160582128529, 'MATRIX_NNZ': 3200440, 'MATRIX_DENSITY': 1.9930237750099465e-05, 'TIME_S': 30.323144912719727, 'TIME_S_1KI': 30.323144912719727, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2708.072883968354, 'W': 77.40203729318671} +[26.56, 26.36, 26.4, 26.4, 26.28, 26.16, 26.4, 26.4, 26.6, 26.56, 26.8, 26.84, 26.76, 26.72, 26.52, 26.44, 26.32, 26.16, 26.0, 26.0] +475.72 +23.786 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'amazon0312', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [400727, 400727], 'MATRIX_ROWS': 400727, 'MATRIX_SIZE': 160582128529, 'MATRIX_NNZ': 3200440, 'MATRIX_DENSITY': 1.9930237750099465e-05, 'TIME_S': 30.323144912719727, 'TIME_S_1KI': 30.323144912719727, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2708.072883968354, 'W': 77.40203729318671, 'J_1KI': 2708.072883968354, 'W_1KI': 77.40203729318671, 'W_D': 53.61603729318671, 'J_D': 1875.8697033972746, 'W_D_1KI': 53.61603729318671, 'J_D_1KI': 53.61603729318671} diff --git a/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_darcy003.json b/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_darcy003.json new file mode 100644 index 0000000..8890b72 --- /dev/null +++ b/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_darcy003.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 1000, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "darcy003", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 20.392087936401367, "TIME_S_1KI": 20.392087936401367, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1566.7498482894898, "W": 69.10901709242208, "J_1KI": 1566.7498482894898, "W_1KI": 69.10901709242208, "W_D": 45.48401709242208, "J_D": 1031.1545421612263, "W_D_1KI": 45.48401709242208, "J_D_1KI": 45.48401709242208} diff --git a/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_darcy003.output b/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_darcy003.output new file mode 100644 index 0000000..05b62a7 --- /dev/null +++ b/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_darcy003.output @@ -0,0 +1,49 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/389000+_cols/darcy003.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "darcy003", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 20.392087936401367} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.5706, 0.0924, 0.8150, ..., 0.7995, 0.0048, 0.8110]) +Matrix Type: SuiteSparse +Matrix: darcy003 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 20.392087936401367 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.5706, 0.0924, 0.8150, ..., 0.7995, 0.0048, 0.8110]) +Matrix Type: SuiteSparse +Matrix: darcy003 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 20.392087936401367 seconds + +[26.52, 26.44, 26.28, 26.32, 26.32, 26.12, 26.16, 26.12, 26.12, 26.08] +[26.52, 26.68, 29.52, 30.56, 30.56, 36.56, 50.04, 66.72, 78.32, 93.24, 95.96, 93.96, 92.08, 92.12, 90.56, 91.2, 90.64, 89.92, 88.64, 89.44, 89.44, 91.68] +22.670700788497925 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'darcy003', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [389874, 389874], 'MATRIX_ROWS': 389874, 'MATRIX_SIZE': 152001735876, 'MATRIX_NNZ': 2101242, 'MATRIX_DENSITY': 1.3823802655215408e-05, 'TIME_S': 20.392087936401367, 'TIME_S_1KI': 20.392087936401367, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1566.7498482894898, 'W': 69.10901709242208} +[26.52, 26.44, 26.28, 26.32, 26.32, 26.12, 26.16, 26.12, 26.12, 26.08, 26.48, 26.28, 26.28, 26.4, 26.2, 26.0, 26.28, 26.28, 26.28, 26.16] +472.5 +23.625 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'darcy003', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [389874, 389874], 'MATRIX_ROWS': 389874, 'MATRIX_SIZE': 152001735876, 'MATRIX_NNZ': 2101242, 'MATRIX_DENSITY': 1.3823802655215408e-05, 'TIME_S': 20.392087936401367, 'TIME_S_1KI': 20.392087936401367, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1566.7498482894898, 'W': 69.10901709242208, 'J_1KI': 1566.7498482894898, 'W_1KI': 69.10901709242208, 'W_D': 45.48401709242208, 'J_D': 1031.1545421612263, 'W_D_1KI': 45.48401709242208, 'J_D_1KI': 45.48401709242208} diff --git a/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_helm2d03.json b/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_helm2d03.json new file mode 100644 index 0000000..1f7ee40 --- /dev/null +++ b/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_helm2d03.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 1000, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "helm2d03", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392257, 392257], "MATRIX_ROWS": 392257, "MATRIX_SIZE": 153865554049, "MATRIX_NNZ": 2741935, "MATRIX_DENSITY": 1.7820330332848923e-05, "TIME_S": 21.815975189208984, "TIME_S_1KI": 21.815975189208984, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1916.1918925571438, "W": 74.64919082171829, "J_1KI": 1916.1918925571438, "W_1KI": 74.64919082171829, "W_D": 51.65619082171829, "J_D": 1325.9778567373746, "W_D_1KI": 51.65619082171829, "J_D_1KI": 51.65619082171829} diff --git a/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_helm2d03.output b/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_helm2d03.output new file mode 100644 index 0000000..256e384 --- /dev/null +++ b/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_helm2d03.output @@ -0,0 +1,51 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/389000+_cols/helm2d03.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "helm2d03", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392257, 392257], "MATRIX_ROWS": 392257, "MATRIX_SIZE": 153865554049, "MATRIX_NNZ": 2741935, "MATRIX_DENSITY": 1.7820330332848923e-05, "TIME_S": 21.815975189208984} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 14, ..., 2741921, + 2741928, 2741935]), + col_indices=tensor([ 0, 98273, 133833, ..., 392252, 392254, + 392256]), + values=tensor([ 3.4808, -0.6217, -0.5806, ..., -0.6940, -0.7602, + 3.5476]), size=(392257, 392257), nnz=2741935, + layout=torch.sparse_csr) +tensor([0.9058, 0.4925, 0.9859, ..., 0.1438, 0.2004, 0.4986]) +Matrix Type: SuiteSparse +Matrix: helm2d03 +Matrix Format: csr +Shape: torch.Size([392257, 392257]) +Rows: 392257 +Size: 153865554049 +NNZ: 2741935 +Density: 1.7820330332848923e-05 +Time: 21.815975189208984 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 14, ..., 2741921, + 2741928, 2741935]), + col_indices=tensor([ 0, 98273, 133833, ..., 392252, 392254, + 392256]), + values=tensor([ 3.4808, -0.6217, -0.5806, ..., -0.6940, -0.7602, + 3.5476]), size=(392257, 392257), nnz=2741935, + layout=torch.sparse_csr) +tensor([0.9058, 0.4925, 0.9859, ..., 0.1438, 0.2004, 0.4986]) +Matrix Type: SuiteSparse +Matrix: helm2d03 +Matrix Format: csr +Shape: torch.Size([392257, 392257]) +Rows: 392257 +Size: 153865554049 +NNZ: 2741935 +Density: 1.7820330332848923e-05 +Time: 21.815975189208984 seconds + +[25.36, 25.4, 25.56, 25.56, 25.6, 25.52, 25.52, 25.28, 25.36, 25.16] +[25.0, 24.96, 25.32, 27.72, 29.24, 40.8, 57.48, 70.72, 86.32, 97.72, 97.72, 96.24, 97.0, 95.72, 95.32, 93.32, 95.8, 96.36, 95.36, 95.72, 94.88, 94.16, 93.92, 92.68, 93.4] +25.669292211532593 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'helm2d03', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [392257, 392257], 'MATRIX_ROWS': 392257, 'MATRIX_SIZE': 153865554049, 'MATRIX_NNZ': 2741935, 'MATRIX_DENSITY': 1.7820330332848923e-05, 'TIME_S': 21.815975189208984, 'TIME_S_1KI': 21.815975189208984, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1916.1918925571438, 'W': 74.64919082171829} +[25.36, 25.4, 25.56, 25.56, 25.6, 25.52, 25.52, 25.28, 25.36, 25.16, 25.44, 25.44, 25.28, 25.52, 25.88, 25.84, 25.68, 25.88, 25.8, 25.52] +459.86 +22.993000000000002 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'helm2d03', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [392257, 392257], 'MATRIX_ROWS': 392257, 'MATRIX_SIZE': 153865554049, 'MATRIX_NNZ': 2741935, 'MATRIX_DENSITY': 1.7820330332848923e-05, 'TIME_S': 21.815975189208984, 'TIME_S_1KI': 21.815975189208984, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1916.1918925571438, 'W': 74.64919082171829, 'J_1KI': 1916.1918925571438, 'W_1KI': 74.64919082171829, 'W_D': 51.65619082171829, 'J_D': 1325.9778567373746, 'W_D_1KI': 51.65619082171829, 'J_D_1KI': 51.65619082171829} diff --git a/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_language.json b/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_language.json new file mode 100644 index 0000000..6674d3c --- /dev/null +++ b/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_language.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 1000, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "language", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [399130, 399130], "MATRIX_ROWS": 399130, "MATRIX_SIZE": 159304756900, "MATRIX_NNZ": 1216334, "MATRIX_DENSITY": 7.635264782228233e-06, "TIME_S": 11.040250062942505, "TIME_S_1KI": 11.040250062942505, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1365.697071151733, "W": 66.06671490426629, "J_1KI": 1365.697071151733, "W_1KI": 66.06671490426629, "W_D": 44.15271490426629, "J_D": 912.7021604681013, "W_D_1KI": 44.15271490426629, "J_D_1KI": 44.15271490426629} diff --git a/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_language.output b/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_language.output new file mode 100644 index 0000000..8666779 --- /dev/null +++ b/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_language.output @@ -0,0 +1,49 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/389000+_cols/language.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "language", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [399130, 399130], "MATRIX_ROWS": 399130, "MATRIX_SIZE": 159304756900, "MATRIX_NNZ": 1216334, "MATRIX_DENSITY": 7.635264782228233e-06, "TIME_S": 11.040250062942505} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 3, ..., 1216330, + 1216332, 1216334]), + col_indices=tensor([ 0, 0, 1, ..., 399128, 399125, + 399129]), + values=tensor([ 1., -1., 1., ..., 1., -1., 1.]), + size=(399130, 399130), nnz=1216334, layout=torch.sparse_csr) +tensor([0.4838, 0.0445, 0.9105, ..., 0.8272, 0.1700, 0.2253]) +Matrix Type: SuiteSparse +Matrix: language +Matrix Format: csr +Shape: torch.Size([399130, 399130]) +Rows: 399130 +Size: 159304756900 +NNZ: 1216334 +Density: 7.635264782228233e-06 +Time: 11.040250062942505 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 3, ..., 1216330, + 1216332, 1216334]), + col_indices=tensor([ 0, 0, 1, ..., 399128, 399125, + 399129]), + values=tensor([ 1., -1., 1., ..., 1., -1., 1.]), + size=(399130, 399130), nnz=1216334, layout=torch.sparse_csr) +tensor([0.4838, 0.0445, 0.9105, ..., 0.8272, 0.1700, 0.2253]) +Matrix Type: SuiteSparse +Matrix: language +Matrix Format: csr +Shape: torch.Size([399130, 399130]) +Rows: 399130 +Size: 159304756900 +NNZ: 1216334 +Density: 7.635264782228233e-06 +Time: 11.040250062942505 seconds + +[24.56, 24.52, 24.08, 24.04, 23.92, 23.84, 23.84, 23.84, 24.04, 24.12] +[24.28, 24.56, 24.56, 28.8, 30.88, 44.64, 59.0, 72.44, 83.72, 90.96, 88.6, 88.6, 85.88, 86.24, 86.16, 87.16, 86.96, 88.8, 89.32, 90.72] +20.67148447036743 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'language', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [399130, 399130], 'MATRIX_ROWS': 399130, 'MATRIX_SIZE': 159304756900, 'MATRIX_NNZ': 1216334, 'MATRIX_DENSITY': 7.635264782228233e-06, 'TIME_S': 11.040250062942505, 'TIME_S_1KI': 11.040250062942505, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1365.697071151733, 'W': 66.06671490426629} +[24.56, 24.52, 24.08, 24.04, 23.92, 23.84, 23.84, 23.84, 24.04, 24.12, 24.64, 24.56, 24.68, 24.52, 24.56, 24.64, 24.8, 24.72, 24.72, 24.6] +438.28 +21.913999999999998 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'language', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [399130, 399130], 'MATRIX_ROWS': 399130, 'MATRIX_SIZE': 159304756900, 'MATRIX_NNZ': 1216334, 'MATRIX_DENSITY': 7.635264782228233e-06, 'TIME_S': 11.040250062942505, 'TIME_S_1KI': 11.040250062942505, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1365.697071151733, 'W': 66.06671490426629, 'J_1KI': 1365.697071151733, 'W_1KI': 66.06671490426629, 'W_D': 44.15271490426629, 'J_D': 912.7021604681013, 'W_D_1KI': 44.15271490426629, 'J_D_1KI': 44.15271490426629} diff --git a/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_marine1.json b/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_marine1.json new file mode 100644 index 0000000..44a4ce9 --- /dev/null +++ b/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_marine1.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 1000, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "marine1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400320, 400320], "MATRIX_ROWS": 400320, "MATRIX_SIZE": 160256102400, "MATRIX_NNZ": 6226538, "MATRIX_DENSITY": 3.885367175883594e-05, "TIME_S": 47.31629490852356, "TIME_S_1KI": 47.31629490852356, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 3588.3839783859257, "W": 77.04772778103, "J_1KI": 3588.3839783859257, "W_1KI": 77.04772778103, "W_D": 53.199727781030006, "J_D": 2477.698646305085, "W_D_1KI": 53.199727781030006, "J_D_1KI": 53.199727781030006} diff --git a/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_marine1.output b/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_marine1.output new file mode 100644 index 0000000..1934d42 --- /dev/null +++ b/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_marine1.output @@ -0,0 +1,51 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/389000+_cols/marine1.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "marine1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400320, 400320], "MATRIX_ROWS": 400320, "MATRIX_SIZE": 160256102400, "MATRIX_NNZ": 6226538, "MATRIX_DENSITY": 3.885367175883594e-05, "TIME_S": 47.31629490852356} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 18, ..., 6226522, + 6226531, 6226538]), + col_indices=tensor([ 0, 1, 10383, ..., 400315, 400318, + 400319]), + values=tensor([ 6.2373e+03, -1.8964e+00, -5.7529e+00, ..., + -6.8099e-01, -6.4187e-01, 1.7595e+01]), + size=(400320, 400320), nnz=6226538, layout=torch.sparse_csr) +tensor([0.9557, 0.7622, 0.1453, ..., 0.2002, 0.7167, 0.8732]) +Matrix Type: SuiteSparse +Matrix: marine1 +Matrix Format: csr +Shape: torch.Size([400320, 400320]) +Rows: 400320 +Size: 160256102400 +NNZ: 6226538 +Density: 3.885367175883594e-05 +Time: 47.31629490852356 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 18, ..., 6226522, + 6226531, 6226538]), + col_indices=tensor([ 0, 1, 10383, ..., 400315, 400318, + 400319]), + values=tensor([ 6.2373e+03, -1.8964e+00, -5.7529e+00, ..., + -6.8099e-01, -6.4187e-01, 1.7595e+01]), + size=(400320, 400320), nnz=6226538, layout=torch.sparse_csr) +tensor([0.9557, 0.7622, 0.1453, ..., 0.2002, 0.7167, 0.8732]) +Matrix Type: SuiteSparse +Matrix: marine1 +Matrix Format: csr +Shape: torch.Size([400320, 400320]) +Rows: 400320 +Size: 160256102400 +NNZ: 6226538 +Density: 3.885367175883594e-05 +Time: 47.31629490852356 seconds + +[26.68, 26.68, 26.56, 26.48, 26.24, 26.08, 26.16, 25.8, 26.04, 26.04] +[26.4, 26.6, 26.72, 27.8, 29.68, 36.68, 47.12, 59.56, 71.44, 82.04, 90.0, 90.2, 89.8, 91.64, 91.36, 89.28, 89.28, 88.72, 90.36, 90.56, 88.08, 89.32, 91.24, 89.92, 91.16, 94.4, 94.0, 92.04, 91.28, 91.0, 89.92, 89.76, 89.76, 89.8, 90.52, 90.56, 92.08, 91.72, 90.16, 89.0, 89.52, 88.96, 88.76, 89.0, 87.92] +46.57352113723755 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'marine1', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [400320, 400320], 'MATRIX_ROWS': 400320, 'MATRIX_SIZE': 160256102400, 'MATRIX_NNZ': 6226538, 'MATRIX_DENSITY': 3.885367175883594e-05, 'TIME_S': 47.31629490852356, 'TIME_S_1KI': 47.31629490852356, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3588.3839783859257, 'W': 77.04772778103} +[26.68, 26.68, 26.56, 26.48, 26.24, 26.08, 26.16, 25.8, 26.04, 26.04, 26.76, 26.48, 26.44, 26.48, 26.72, 26.72, 26.92, 26.92, 27.0, 27.0] +476.9599999999999 +23.847999999999995 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'marine1', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [400320, 400320], 'MATRIX_ROWS': 400320, 'MATRIX_SIZE': 160256102400, 'MATRIX_NNZ': 6226538, 'MATRIX_DENSITY': 3.885367175883594e-05, 'TIME_S': 47.31629490852356, 'TIME_S_1KI': 47.31629490852356, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3588.3839783859257, 'W': 77.04772778103, 'J_1KI': 3588.3839783859257, 'W_1KI': 77.04772778103, 'W_D': 53.199727781030006, 'J_D': 2477.698646305085, 'W_D_1KI': 53.199727781030006, 'J_D_1KI': 53.199727781030006} diff --git a/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_mario002.json b/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_mario002.json new file mode 100644 index 0000000..2a1d024 --- /dev/null +++ b/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_mario002.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 1000, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "mario002", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 26.970608472824097, "TIME_S_1KI": 26.970608472824097, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1533.6805934524534, "W": 70.7905728448904, "J_1KI": 1533.6805934524534, "W_1KI": 70.7905728448904, "W_D": 46.71357284489041, "J_D": 1012.0514249830246, "W_D_1KI": 46.71357284489041, "J_D_1KI": 46.71357284489041} diff --git a/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_mario002.output b/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_mario002.output new file mode 100644 index 0000000..34cf597 --- /dev/null +++ b/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_mario002.output @@ -0,0 +1,49 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/389000+_cols/mario002.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "mario002", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 26.970608472824097} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.0717, 0.4634, 0.0880, ..., 0.8346, 0.7497, 0.2295]) +Matrix Type: SuiteSparse +Matrix: mario002 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 26.970608472824097 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.0717, 0.4634, 0.0880, ..., 0.8346, 0.7497, 0.2295]) +Matrix Type: SuiteSparse +Matrix: mario002 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 26.970608472824097 seconds + +[27.04, 26.84, 26.96, 26.8, 26.8, 27.0, 26.88, 26.6, 26.44, 26.44] +[26.12, 26.4, 26.44, 27.4, 29.4, 37.08, 54.84, 69.0, 83.6, 95.56, 98.68, 97.52, 97.96, 97.76, 97.76, 97.8, 98.4, 95.2, 93.56, 91.64, 89.8] +21.665040016174316 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'mario002', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [389874, 389874], 'MATRIX_ROWS': 389874, 'MATRIX_SIZE': 152001735876, 'MATRIX_NNZ': 2101242, 'MATRIX_DENSITY': 1.3823802655215408e-05, 'TIME_S': 26.970608472824097, 'TIME_S_1KI': 26.970608472824097, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1533.6805934524534, 'W': 70.7905728448904} +[27.04, 26.84, 26.96, 26.8, 26.8, 27.0, 26.88, 26.6, 26.44, 26.44, 26.88, 26.64, 26.8, 27.08, 27.0, 26.92, 26.68, 26.44, 26.32, 26.32] +481.53999999999996 +24.076999999999998 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'mario002', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [389874, 389874], 'MATRIX_ROWS': 389874, 'MATRIX_SIZE': 152001735876, 'MATRIX_NNZ': 2101242, 'MATRIX_DENSITY': 1.3823802655215408e-05, 'TIME_S': 26.970608472824097, 'TIME_S_1KI': 26.970608472824097, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1533.6805934524534, 'W': 70.7905728448904, 'J_1KI': 1533.6805934524534, 'W_1KI': 70.7905728448904, 'W_D': 46.71357284489041, 'J_D': 1012.0514249830246, 'W_D_1KI': 46.71357284489041, 'J_D_1KI': 46.71357284489041} diff --git a/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_test1.json b/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_test1.json new file mode 100644 index 0000000..a64dfeb --- /dev/null +++ b/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_test1.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 1000, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "test1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392908, 392908], "MATRIX_ROWS": 392908, "MATRIX_SIZE": 154376696464, "MATRIX_NNZ": 12968200, "MATRIX_DENSITY": 8.400361127706946e-05, "TIME_S": 92.92496466636658, "TIME_S_1KI": 92.92496466636658, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 8599.791397418978, "W": 85.10134779265783, "J_1KI": 8599.791397418978, "W_1KI": 85.10134779265783, "W_D": 60.408347792657835, "J_D": 6104.4766405498995, "W_D_1KI": 60.408347792657835, "J_D_1KI": 60.408347792657835} diff --git a/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_test1.output b/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_test1.output new file mode 100644 index 0000000..6caaca3 --- /dev/null +++ b/pytorch/output_389000+_maxcore/altra_max_csr_10_10_10_test1.output @@ -0,0 +1,51 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py suitesparse csr 1000 -m matrices/389000+_cols/test1.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "test1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392908, 392908], "MATRIX_ROWS": 392908, "MATRIX_SIZE": 154376696464, "MATRIX_NNZ": 12968200, "MATRIX_DENSITY": 8.400361127706946e-05, "TIME_S": 92.92496466636658} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 24, 48, ..., 12968181, + 12968191, 12968200]), + col_indices=tensor([ 0, 1, 8, ..., 392905, 392906, + 392907]), + values=tensor([1.0000e+00, 0.0000e+00, 0.0000e+00, ..., + 0.0000e+00, 0.0000e+00, 2.1156e-17]), + size=(392908, 392908), nnz=12968200, layout=torch.sparse_csr) +tensor([0.1407, 0.4697, 0.2203, ..., 0.3353, 0.2584, 0.9591]) +Matrix Type: SuiteSparse +Matrix: test1 +Matrix Format: csr +Shape: torch.Size([392908, 392908]) +Rows: 392908 +Size: 154376696464 +NNZ: 12968200 +Density: 8.400361127706946e-05 +Time: 92.92496466636658 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 24, 48, ..., 12968181, + 12968191, 12968200]), + col_indices=tensor([ 0, 1, 8, ..., 392905, 392906, + 392907]), + values=tensor([1.0000e+00, 0.0000e+00, 0.0000e+00, ..., + 0.0000e+00, 0.0000e+00, 2.1156e-17]), + size=(392908, 392908), nnz=12968200, layout=torch.sparse_csr) +tensor([0.1407, 0.4697, 0.2203, ..., 0.3353, 0.2584, 0.9591]) +Matrix Type: SuiteSparse +Matrix: test1 +Matrix Format: csr +Shape: torch.Size([392908, 392908]) +Rows: 392908 +Size: 154376696464 +NNZ: 12968200 +Density: 8.400361127706946e-05 +Time: 92.92496466636658 seconds + +[27.6, 27.24, 27.2, 27.24, 27.04, 26.92, 26.76, 26.64, 26.72, 26.64] +[26.8, 26.88, 30.16, 31.04, 31.04, 35.8, 41.52, 47.96, 57.12, 73.0, 77.8, 92.16, 95.08, 96.56, 94.84, 94.48, 94.88, 92.8, 91.28, 91.28, 91.72, 91.6, 89.08, 89.24, 90.12, 89.76, 89.48, 89.84, 90.24, 88.68, 88.6, 90.24, 88.36, 90.44, 90.44, 93.28, 93.6, 95.92, 95.52, 91.44, 91.84, 89.96, 91.0, 91.04, 90.84, 90.68, 88.92, 93.0, 95.6, 97.32, 97.32, 98.32, 96.56, 92.48, 91.04, 92.0, 91.72, 94.24, 92.8, 92.52, 91.64, 93.04, 93.56, 98.0, 99.0, 97.0, 97.0, 94.36, 93.2, 93.0, 91.36, 92.44, 95.16, 96.28, 96.76, 96.4, 97.92, 92.76, 93.72, 94.64, 95.76, 96.08, 96.08, 99.8, 99.2, 97.52, 97.16, 96.92, 97.2, 99.08, 99.0, 97.56, 97.8, 97.16, 97.4, 98.52, 99.92, 98.48] +101.05352759361267 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'test1', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [392908, 392908], 'MATRIX_ROWS': 392908, 'MATRIX_SIZE': 154376696464, 'MATRIX_NNZ': 12968200, 'MATRIX_DENSITY': 8.400361127706946e-05, 'TIME_S': 92.92496466636658, 'TIME_S_1KI': 92.92496466636658, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 8599.791397418978, 'W': 85.10134779265783} +[27.6, 27.24, 27.2, 27.24, 27.04, 26.92, 26.76, 26.64, 26.72, 26.64, 27.96, 27.76, 27.8, 27.8, 27.96, 28.08, 28.04, 27.96, 27.76, 27.68] +493.86 +24.693 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'test1', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [392908, 392908], 'MATRIX_ROWS': 392908, 'MATRIX_SIZE': 154376696464, 'MATRIX_NNZ': 12968200, 'MATRIX_DENSITY': 8.400361127706946e-05, 'TIME_S': 92.92496466636658, 'TIME_S_1KI': 92.92496466636658, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 8599.791397418978, 'W': 85.10134779265783, 'J_1KI': 8599.791397418978, 'W_1KI': 85.10134779265783, 'W_D': 60.408347792657835, 'J_D': 6104.4766405498995, 'W_D_1KI': 60.408347792657835, 'J_D_1KI': 60.408347792657835} diff --git a/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_amazon0312.json b/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_amazon0312.json new file mode 100644 index 0000000..9573140 --- /dev/null +++ b/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_amazon0312.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 19947, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "amazon0312", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400727, 400727], "MATRIX_ROWS": 400727, "MATRIX_SIZE": 160582128529, "MATRIX_NNZ": 3200440, "MATRIX_DENSITY": 1.9930237750099465e-05, "TIME_S": 10.311090230941772, "TIME_S_1KI": 0.5169243611040143, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1933.8912408947945, "W": 148.79, "J_1KI": 96.951483475951, "W_1KI": 7.459267057702912, "W_D": 113.0615, "J_D": 1469.5116911917924, "W_D_1KI": 5.668095452950318, "J_D_1KI": 0.28415779079311765} diff --git a/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_amazon0312.output b/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_amazon0312.output new file mode 100644 index 0000000..2ce455e --- /dev/null +++ b/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_amazon0312.output @@ -0,0 +1,93 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/amazon0312.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "amazon0312", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400727, 400727], "MATRIX_ROWS": 400727, "MATRIX_SIZE": 160582128529, "MATRIX_NNZ": 3200440, "MATRIX_DENSITY": 1.9930237750099465e-05, "TIME_S": 0.5623607635498047} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 10, ..., 3200428, + 3200438, 3200440]), + col_indices=tensor([ 1, 2, 3, ..., 400724, 6009, + 400707]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(400727, 400727), + nnz=3200440, layout=torch.sparse_csr) +tensor([0.4201, 0.2453, 0.6690, ..., 0.5318, 0.2145, 0.3171]) +Matrix Type: SuiteSparse +Matrix: amazon0312 +Matrix Format: csr +Shape: torch.Size([400727, 400727]) +Rows: 400727 +Size: 160582128529 +NNZ: 3200440 +Density: 1.9930237750099465e-05 +Time: 0.5623607635498047 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '18671', '-m', 'matrices/389000+_cols/amazon0312.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "amazon0312", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400727, 400727], "MATRIX_ROWS": 400727, "MATRIX_SIZE": 160582128529, "MATRIX_NNZ": 3200440, "MATRIX_DENSITY": 1.9930237750099465e-05, "TIME_S": 9.828076839447021} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 10, ..., 3200428, + 3200438, 3200440]), + col_indices=tensor([ 1, 2, 3, ..., 400724, 6009, + 400707]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(400727, 400727), + nnz=3200440, layout=torch.sparse_csr) +tensor([0.8027, 0.1127, 0.8893, ..., 0.1900, 0.1655, 0.6380]) +Matrix Type: SuiteSparse +Matrix: amazon0312 +Matrix Format: csr +Shape: torch.Size([400727, 400727]) +Rows: 400727 +Size: 160582128529 +NNZ: 3200440 +Density: 1.9930237750099465e-05 +Time: 9.828076839447021 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '19947', '-m', 'matrices/389000+_cols/amazon0312.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "amazon0312", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400727, 400727], "MATRIX_ROWS": 400727, "MATRIX_SIZE": 160582128529, "MATRIX_NNZ": 3200440, "MATRIX_DENSITY": 1.9930237750099465e-05, "TIME_S": 10.311090230941772} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 10, ..., 3200428, + 3200438, 3200440]), + col_indices=tensor([ 1, 2, 3, ..., 400724, 6009, + 400707]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(400727, 400727), + nnz=3200440, layout=torch.sparse_csr) +tensor([0.8053, 0.3778, 0.3769, ..., 0.3826, 0.6227, 0.7489]) +Matrix Type: SuiteSparse +Matrix: amazon0312 +Matrix Format: csr +Shape: torch.Size([400727, 400727]) +Rows: 400727 +Size: 160582128529 +NNZ: 3200440 +Density: 1.9930237750099465e-05 +Time: 10.311090230941772 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 10, ..., 3200428, + 3200438, 3200440]), + col_indices=tensor([ 1, 2, 3, ..., 400724, 6009, + 400707]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(400727, 400727), + nnz=3200440, layout=torch.sparse_csr) +tensor([0.8053, 0.3778, 0.3769, ..., 0.3826, 0.6227, 0.7489]) +Matrix Type: SuiteSparse +Matrix: amazon0312 +Matrix Format: csr +Shape: torch.Size([400727, 400727]) +Rows: 400727 +Size: 160582128529 +NNZ: 3200440 +Density: 1.9930237750099465e-05 +Time: 10.311090230941772 seconds + +[40.28, 39.83, 40.54, 39.37, 39.82, 39.28, 40.53, 39.29, 39.59, 39.21] +[148.79] +12.997454404830933 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 19947, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'amazon0312', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [400727, 400727], 'MATRIX_ROWS': 400727, 'MATRIX_SIZE': 160582128529, 'MATRIX_NNZ': 3200440, 'MATRIX_DENSITY': 1.9930237750099465e-05, 'TIME_S': 10.311090230941772, 'TIME_S_1KI': 0.5169243611040143, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1933.8912408947945, 'W': 148.79} +[40.28, 39.83, 40.54, 39.37, 39.82, 39.28, 40.53, 39.29, 39.59, 39.21, 39.98, 39.27, 40.58, 39.91, 39.77, 39.38, 39.3, 39.5, 39.26, 39.23] +714.5699999999999 +35.7285 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 19947, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'amazon0312', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [400727, 400727], 'MATRIX_ROWS': 400727, 'MATRIX_SIZE': 160582128529, 'MATRIX_NNZ': 3200440, 'MATRIX_DENSITY': 1.9930237750099465e-05, 'TIME_S': 10.311090230941772, 'TIME_S_1KI': 0.5169243611040143, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1933.8912408947945, 'W': 148.79, 'J_1KI': 96.951483475951, 'W_1KI': 7.459267057702912, 'W_D': 113.0615, 'J_D': 1469.5116911917924, 'W_D_1KI': 5.668095452950318, 'J_D_1KI': 0.28415779079311765} diff --git a/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_darcy003.json b/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_darcy003.json new file mode 100644 index 0000000..c65cfe6 --- /dev/null +++ b/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_darcy003.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 28421, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "darcy003", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 11.00617504119873, "TIME_S_1KI": 0.38725502414407414, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1901.812058868408, "W": 140.38, "J_1KI": 66.91573339672806, "W_1KI": 4.9393054431582275, "W_D": 104.9495, "J_D": 1421.813824420929, "W_D_1KI": 3.6926744308785757, "J_D_1KI": 0.12992767428586524} diff --git a/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_darcy003.output b/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_darcy003.output new file mode 100644 index 0000000..6b3958e --- /dev/null +++ b/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_darcy003.output @@ -0,0 +1,93 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/darcy003.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "darcy003", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 0.4240763187408447} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.1700, 0.2845, 0.1174, ..., 0.4107, 0.2054, 0.6347]) +Matrix Type: SuiteSparse +Matrix: darcy003 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 0.4240763187408447 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '24759', '-m', 'matrices/389000+_cols/darcy003.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "darcy003", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 9.146852016448975} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.0166, 0.8822, 0.7788, ..., 0.7058, 0.7278, 0.9607]) +Matrix Type: SuiteSparse +Matrix: darcy003 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 9.146852016448975 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '28421', '-m', 'matrices/389000+_cols/darcy003.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "darcy003", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 11.00617504119873} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.0083, 0.7190, 0.2772, ..., 0.4887, 0.1977, 0.6043]) +Matrix Type: SuiteSparse +Matrix: darcy003 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 11.00617504119873 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.0083, 0.7190, 0.2772, ..., 0.4887, 0.1977, 0.6043]) +Matrix Type: SuiteSparse +Matrix: darcy003 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 11.00617504119873 seconds + +[40.16, 39.59, 39.19, 39.3, 39.1, 39.19, 39.25, 39.14, 39.05, 39.19] +[140.38] +13.547599792480469 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 28421, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'darcy003', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [389874, 389874], 'MATRIX_ROWS': 389874, 'MATRIX_SIZE': 152001735876, 'MATRIX_NNZ': 2101242, 'MATRIX_DENSITY': 1.3823802655215408e-05, 'TIME_S': 11.00617504119873, 'TIME_S_1KI': 0.38725502414407414, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1901.812058868408, 'W': 140.38} +[40.16, 39.59, 39.19, 39.3, 39.1, 39.19, 39.25, 39.14, 39.05, 39.19, 41.13, 39.98, 39.37, 39.35, 39.22, 39.22, 39.21, 39.28, 39.09, 39.68] +708.61 +35.4305 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 28421, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'darcy003', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [389874, 389874], 'MATRIX_ROWS': 389874, 'MATRIX_SIZE': 152001735876, 'MATRIX_NNZ': 2101242, 'MATRIX_DENSITY': 1.3823802655215408e-05, 'TIME_S': 11.00617504119873, 'TIME_S_1KI': 0.38725502414407414, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1901.812058868408, 'W': 140.38, 'J_1KI': 66.91573339672806, 'W_1KI': 4.9393054431582275, 'W_D': 104.9495, 'J_D': 1421.813824420929, 'W_D_1KI': 3.6926744308785757, 'J_D_1KI': 0.12992767428586524} diff --git a/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_helm2d03.json b/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_helm2d03.json new file mode 100644 index 0000000..f538d2a --- /dev/null +++ b/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_helm2d03.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 30516, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "helm2d03", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392257, 392257], "MATRIX_ROWS": 392257, "MATRIX_SIZE": 153865554049, "MATRIX_NNZ": 2741935, "MATRIX_DENSITY": 1.7820330332848923e-05, "TIME_S": 10.102073907852173, "TIME_S_1KI": 0.3310418766500253, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1905.9418558597565, "W": 149.24, "J_1KI": 62.45713251604917, "W_1KI": 4.89054922008127, "W_D": 113.06250000000001, "J_D": 1443.9195328205826, "W_D_1KI": 3.7050235941801026, "J_D_1KI": 0.12141249161686009} diff --git a/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_helm2d03.output b/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_helm2d03.output new file mode 100644 index 0000000..e5c9a6c --- /dev/null +++ b/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_helm2d03.output @@ -0,0 +1,97 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/helm2d03.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "helm2d03", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392257, 392257], "MATRIX_ROWS": 392257, "MATRIX_SIZE": 153865554049, "MATRIX_NNZ": 2741935, "MATRIX_DENSITY": 1.7820330332848923e-05, "TIME_S": 0.3914318084716797} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 14, ..., 2741921, + 2741928, 2741935]), + col_indices=tensor([ 0, 98273, 133833, ..., 392252, 392254, + 392256]), + values=tensor([ 3.4808, -0.6217, -0.5806, ..., -0.6940, -0.7602, + 3.5476]), size=(392257, 392257), nnz=2741935, + layout=torch.sparse_csr) +tensor([0.2679, 0.5598, 0.6944, ..., 0.1144, 0.4933, 0.2716]) +Matrix Type: SuiteSparse +Matrix: helm2d03 +Matrix Format: csr +Shape: torch.Size([392257, 392257]) +Rows: 392257 +Size: 153865554049 +NNZ: 2741935 +Density: 1.7820330332848923e-05 +Time: 0.3914318084716797 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '26824', '-m', 'matrices/389000+_cols/helm2d03.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "helm2d03", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392257, 392257], "MATRIX_ROWS": 392257, "MATRIX_SIZE": 153865554049, "MATRIX_NNZ": 2741935, "MATRIX_DENSITY": 1.7820330332848923e-05, "TIME_S": 9.229604244232178} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 14, ..., 2741921, + 2741928, 2741935]), + col_indices=tensor([ 0, 98273, 133833, ..., 392252, 392254, + 392256]), + values=tensor([ 3.4808, -0.6217, -0.5806, ..., -0.6940, -0.7602, + 3.5476]), size=(392257, 392257), nnz=2741935, + layout=torch.sparse_csr) +tensor([0.3693, 0.3607, 0.3383, ..., 0.8476, 0.1262, 0.7740]) +Matrix Type: SuiteSparse +Matrix: helm2d03 +Matrix Format: csr +Shape: torch.Size([392257, 392257]) +Rows: 392257 +Size: 153865554049 +NNZ: 2741935 +Density: 1.7820330332848923e-05 +Time: 9.229604244232178 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '30516', '-m', 'matrices/389000+_cols/helm2d03.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "helm2d03", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392257, 392257], "MATRIX_ROWS": 392257, "MATRIX_SIZE": 153865554049, "MATRIX_NNZ": 2741935, "MATRIX_DENSITY": 1.7820330332848923e-05, "TIME_S": 10.102073907852173} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 14, ..., 2741921, + 2741928, 2741935]), + col_indices=tensor([ 0, 98273, 133833, ..., 392252, 392254, + 392256]), + values=tensor([ 3.4808, -0.6217, -0.5806, ..., -0.6940, -0.7602, + 3.5476]), size=(392257, 392257), nnz=2741935, + layout=torch.sparse_csr) +tensor([0.9962, 0.9854, 0.1629, ..., 0.5690, 0.4270, 0.4262]) +Matrix Type: SuiteSparse +Matrix: helm2d03 +Matrix Format: csr +Shape: torch.Size([392257, 392257]) +Rows: 392257 +Size: 153865554049 +NNZ: 2741935 +Density: 1.7820330332848923e-05 +Time: 10.102073907852173 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 14, ..., 2741921, + 2741928, 2741935]), + col_indices=tensor([ 0, 98273, 133833, ..., 392252, 392254, + 392256]), + values=tensor([ 3.4808, -0.6217, -0.5806, ..., -0.6940, -0.7602, + 3.5476]), size=(392257, 392257), nnz=2741935, + layout=torch.sparse_csr) +tensor([0.9962, 0.9854, 0.1629, ..., 0.5690, 0.4270, 0.4262]) +Matrix Type: SuiteSparse +Matrix: helm2d03 +Matrix Format: csr +Shape: torch.Size([392257, 392257]) +Rows: 392257 +Size: 153865554049 +NNZ: 2741935 +Density: 1.7820330332848923e-05 +Time: 10.102073907852173 seconds + +[40.11, 39.71, 39.79, 39.24, 41.91, 43.96, 39.7, 39.6, 39.18, 40.31] +[149.24] +12.77098536491394 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 30516, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'helm2d03', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [392257, 392257], 'MATRIX_ROWS': 392257, 'MATRIX_SIZE': 153865554049, 'MATRIX_NNZ': 2741935, 'MATRIX_DENSITY': 1.7820330332848923e-05, 'TIME_S': 10.102073907852173, 'TIME_S_1KI': 0.3310418766500253, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1905.9418558597565, 'W': 149.24} +[40.11, 39.71, 39.79, 39.24, 41.91, 43.96, 39.7, 39.6, 39.18, 40.31, 39.88, 39.67, 44.75, 39.75, 39.19, 39.36, 39.18, 39.31, 39.57, 39.06] +723.55 +36.177499999999995 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 30516, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'helm2d03', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [392257, 392257], 'MATRIX_ROWS': 392257, 'MATRIX_SIZE': 153865554049, 'MATRIX_NNZ': 2741935, 'MATRIX_DENSITY': 1.7820330332848923e-05, 'TIME_S': 10.102073907852173, 'TIME_S_1KI': 0.3310418766500253, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1905.9418558597565, 'W': 149.24, 'J_1KI': 62.45713251604917, 'W_1KI': 4.89054922008127, 'W_D': 113.06250000000001, 'J_D': 1443.9195328205826, 'W_D_1KI': 3.7050235941801026, 'J_D_1KI': 0.12141249161686009} diff --git a/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_language.json b/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_language.json new file mode 100644 index 0000000..0278cd1 --- /dev/null +++ b/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_language.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 31490, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "language", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [399130, 399130], "MATRIX_ROWS": 399130, "MATRIX_SIZE": 159304756900, "MATRIX_NNZ": 1216334, "MATRIX_DENSITY": 7.635264782228233e-06, "TIME_S": 10.491079330444336, "TIME_S_1KI": 0.33315590125259875, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1840.929758644104, "W": 139.7, "J_1KI": 58.46077353585595, "W_1KI": 4.436328993331216, "W_D": 104.24499999999999, "J_D": 1373.713118753433, "W_D_1KI": 3.310416005080978, "J_D_1KI": 0.10512594490571539} diff --git a/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_language.output b/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_language.output new file mode 100644 index 0000000..47af1b0 --- /dev/null +++ b/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_language.output @@ -0,0 +1,93 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/language.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "language", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [399130, 399130], "MATRIX_ROWS": 399130, "MATRIX_SIZE": 159304756900, "MATRIX_NNZ": 1216334, "MATRIX_DENSITY": 7.635264782228233e-06, "TIME_S": 0.366832971572876} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 3, ..., 1216330, + 1216332, 1216334]), + col_indices=tensor([ 0, 0, 1, ..., 399128, 399125, + 399129]), + values=tensor([ 1., -1., 1., ..., 1., -1., 1.]), + size=(399130, 399130), nnz=1216334, layout=torch.sparse_csr) +tensor([0.4489, 0.3341, 0.6321, ..., 0.9617, 0.3616, 0.1355]) +Matrix Type: SuiteSparse +Matrix: language +Matrix Format: csr +Shape: torch.Size([399130, 399130]) +Rows: 399130 +Size: 159304756900 +NNZ: 1216334 +Density: 7.635264782228233e-06 +Time: 0.366832971572876 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '28623', '-m', 'matrices/389000+_cols/language.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "language", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [399130, 399130], "MATRIX_ROWS": 399130, "MATRIX_SIZE": 159304756900, "MATRIX_NNZ": 1216334, "MATRIX_DENSITY": 7.635264782228233e-06, "TIME_S": 9.543859958648682} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 3, ..., 1216330, + 1216332, 1216334]), + col_indices=tensor([ 0, 0, 1, ..., 399128, 399125, + 399129]), + values=tensor([ 1., -1., 1., ..., 1., -1., 1.]), + size=(399130, 399130), nnz=1216334, layout=torch.sparse_csr) +tensor([0.6001, 0.9333, 0.1063, ..., 0.8124, 0.4889, 0.2196]) +Matrix Type: SuiteSparse +Matrix: language +Matrix Format: csr +Shape: torch.Size([399130, 399130]) +Rows: 399130 +Size: 159304756900 +NNZ: 1216334 +Density: 7.635264782228233e-06 +Time: 9.543859958648682 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '31490', '-m', 'matrices/389000+_cols/language.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "language", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [399130, 399130], "MATRIX_ROWS": 399130, "MATRIX_SIZE": 159304756900, "MATRIX_NNZ": 1216334, "MATRIX_DENSITY": 7.635264782228233e-06, "TIME_S": 10.491079330444336} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 3, ..., 1216330, + 1216332, 1216334]), + col_indices=tensor([ 0, 0, 1, ..., 399128, 399125, + 399129]), + values=tensor([ 1., -1., 1., ..., 1., -1., 1.]), + size=(399130, 399130), nnz=1216334, layout=torch.sparse_csr) +tensor([0.8719, 0.5932, 0.6007, ..., 0.1184, 0.6398, 0.5112]) +Matrix Type: SuiteSparse +Matrix: language +Matrix Format: csr +Shape: torch.Size([399130, 399130]) +Rows: 399130 +Size: 159304756900 +NNZ: 1216334 +Density: 7.635264782228233e-06 +Time: 10.491079330444336 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 3, ..., 1216330, + 1216332, 1216334]), + col_indices=tensor([ 0, 0, 1, ..., 399128, 399125, + 399129]), + values=tensor([ 1., -1., 1., ..., 1., -1., 1.]), + size=(399130, 399130), nnz=1216334, layout=torch.sparse_csr) +tensor([0.8719, 0.5932, 0.6007, ..., 0.1184, 0.6398, 0.5112]) +Matrix Type: SuiteSparse +Matrix: language +Matrix Format: csr +Shape: torch.Size([399130, 399130]) +Rows: 399130 +Size: 159304756900 +NNZ: 1216334 +Density: 7.635264782228233e-06 +Time: 10.491079330444336 seconds + +[40.31, 39.1, 39.47, 38.93, 39.95, 39.02, 39.07, 38.88, 39.52, 38.92] +[139.7] +13.177736282348633 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 31490, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'language', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [399130, 399130], 'MATRIX_ROWS': 399130, 'MATRIX_SIZE': 159304756900, 'MATRIX_NNZ': 1216334, 'MATRIX_DENSITY': 7.635264782228233e-06, 'TIME_S': 10.491079330444336, 'TIME_S_1KI': 0.33315590125259875, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1840.929758644104, 'W': 139.7} +[40.31, 39.1, 39.47, 38.93, 39.95, 39.02, 39.07, 38.88, 39.52, 38.92, 40.13, 40.06, 39.2, 39.24, 39.63, 39.8, 39.04, 39.77, 39.24, 39.0] +709.1 +35.455 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 31490, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'language', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [399130, 399130], 'MATRIX_ROWS': 399130, 'MATRIX_SIZE': 159304756900, 'MATRIX_NNZ': 1216334, 'MATRIX_DENSITY': 7.635264782228233e-06, 'TIME_S': 10.491079330444336, 'TIME_S_1KI': 0.33315590125259875, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1840.929758644104, 'W': 139.7, 'J_1KI': 58.46077353585595, 'W_1KI': 4.436328993331216, 'W_D': 104.24499999999999, 'J_D': 1373.713118753433, 'W_D_1KI': 3.310416005080978, 'J_D_1KI': 0.10512594490571539} diff --git a/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_marine1.json b/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_marine1.json new file mode 100644 index 0000000..d48619d --- /dev/null +++ b/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_marine1.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 19504, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "marine1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400320, 400320], "MATRIX_ROWS": 400320, "MATRIX_SIZE": 160256102400, "MATRIX_NNZ": 6226538, "MATRIX_DENSITY": 3.885367175883594e-05, "TIME_S": 10.365571975708008, "TIME_S_1KI": 0.5314587764411407, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2136.8143803691864, "W": 158.23, "J_1KI": 109.55775124944557, "W_1KI": 8.112694831829367, "W_D": 121.8915, "J_D": 1646.0817167716025, "W_D_1KI": 6.249564191960623, "J_D_1KI": 0.3204247432301386} diff --git a/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_marine1.output b/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_marine1.output new file mode 100644 index 0000000..5ccf432 --- /dev/null +++ b/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_marine1.output @@ -0,0 +1,97 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/marine1.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "marine1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400320, 400320], "MATRIX_ROWS": 400320, "MATRIX_SIZE": 160256102400, "MATRIX_NNZ": 6226538, "MATRIX_DENSITY": 3.885367175883594e-05, "TIME_S": 0.6027348041534424} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 18, ..., 6226522, + 6226531, 6226538]), + col_indices=tensor([ 0, 1, 10383, ..., 400315, 400318, + 400319]), + values=tensor([ 6.2373e+03, -1.8964e+00, -5.7529e+00, ..., + -6.8099e-01, -6.4187e-01, 1.7595e+01]), + size=(400320, 400320), nnz=6226538, layout=torch.sparse_csr) +tensor([0.5488, 0.1373, 0.1334, ..., 0.6361, 0.1287, 0.1871]) +Matrix Type: SuiteSparse +Matrix: marine1 +Matrix Format: csr +Shape: torch.Size([400320, 400320]) +Rows: 400320 +Size: 160256102400 +NNZ: 6226538 +Density: 3.885367175883594e-05 +Time: 0.6027348041534424 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '17420', '-m', 'matrices/389000+_cols/marine1.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "marine1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400320, 400320], "MATRIX_ROWS": 400320, "MATRIX_SIZE": 160256102400, "MATRIX_NNZ": 6226538, "MATRIX_DENSITY": 3.885367175883594e-05, "TIME_S": 9.3778395652771} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 18, ..., 6226522, + 6226531, 6226538]), + col_indices=tensor([ 0, 1, 10383, ..., 400315, 400318, + 400319]), + values=tensor([ 6.2373e+03, -1.8964e+00, -5.7529e+00, ..., + -6.8099e-01, -6.4187e-01, 1.7595e+01]), + size=(400320, 400320), nnz=6226538, layout=torch.sparse_csr) +tensor([0.5314, 0.3776, 0.6404, ..., 0.2799, 0.5486, 0.5791]) +Matrix Type: SuiteSparse +Matrix: marine1 +Matrix Format: csr +Shape: torch.Size([400320, 400320]) +Rows: 400320 +Size: 160256102400 +NNZ: 6226538 +Density: 3.885367175883594e-05 +Time: 9.3778395652771 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '19504', '-m', 'matrices/389000+_cols/marine1.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "marine1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400320, 400320], "MATRIX_ROWS": 400320, "MATRIX_SIZE": 160256102400, "MATRIX_NNZ": 6226538, "MATRIX_DENSITY": 3.885367175883594e-05, "TIME_S": 10.365571975708008} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 18, ..., 6226522, + 6226531, 6226538]), + col_indices=tensor([ 0, 1, 10383, ..., 400315, 400318, + 400319]), + values=tensor([ 6.2373e+03, -1.8964e+00, -5.7529e+00, ..., + -6.8099e-01, -6.4187e-01, 1.7595e+01]), + size=(400320, 400320), nnz=6226538, layout=torch.sparse_csr) +tensor([0.7619, 0.3106, 0.0542, ..., 0.9939, 0.8451, 0.6161]) +Matrix Type: SuiteSparse +Matrix: marine1 +Matrix Format: csr +Shape: torch.Size([400320, 400320]) +Rows: 400320 +Size: 160256102400 +NNZ: 6226538 +Density: 3.885367175883594e-05 +Time: 10.365571975708008 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 18, ..., 6226522, + 6226531, 6226538]), + col_indices=tensor([ 0, 1, 10383, ..., 400315, 400318, + 400319]), + values=tensor([ 6.2373e+03, -1.8964e+00, -5.7529e+00, ..., + -6.8099e-01, -6.4187e-01, 1.7595e+01]), + size=(400320, 400320), nnz=6226538, layout=torch.sparse_csr) +tensor([0.7619, 0.3106, 0.0542, ..., 0.9939, 0.8451, 0.6161]) +Matrix Type: SuiteSparse +Matrix: marine1 +Matrix Format: csr +Shape: torch.Size([400320, 400320]) +Rows: 400320 +Size: 160256102400 +NNZ: 6226538 +Density: 3.885367175883594e-05 +Time: 10.365571975708008 seconds + +[40.92, 39.6, 39.57, 39.38, 39.87, 44.96, 39.72, 39.88, 39.49, 39.37] +[158.23] +13.504483222961426 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 19504, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'marine1', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [400320, 400320], 'MATRIX_ROWS': 400320, 'MATRIX_SIZE': 160256102400, 'MATRIX_NNZ': 6226538, 'MATRIX_DENSITY': 3.885367175883594e-05, 'TIME_S': 10.365571975708008, 'TIME_S_1KI': 0.5314587764411407, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2136.8143803691864, 'W': 158.23} +[40.92, 39.6, 39.57, 39.38, 39.87, 44.96, 39.72, 39.88, 39.49, 39.37, 40.1, 41.46, 44.99, 39.34, 39.95, 39.5, 40.61, 39.29, 39.33, 39.27] +726.77 +36.338499999999996 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 19504, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'marine1', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [400320, 400320], 'MATRIX_ROWS': 400320, 'MATRIX_SIZE': 160256102400, 'MATRIX_NNZ': 6226538, 'MATRIX_DENSITY': 3.885367175883594e-05, 'TIME_S': 10.365571975708008, 'TIME_S_1KI': 0.5314587764411407, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2136.8143803691864, 'W': 158.23, 'J_1KI': 109.55775124944557, 'W_1KI': 8.112694831829367, 'W_D': 121.8915, 'J_D': 1646.0817167716025, 'W_D_1KI': 6.249564191960623, 'J_D_1KI': 0.3204247432301386} diff --git a/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_mario002.json b/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_mario002.json new file mode 100644 index 0000000..69c4b0e --- /dev/null +++ b/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_mario002.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 24972, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "mario002", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 10.226792573928833, "TIME_S_1KI": 0.4095303769793702, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1665.3517652535438, "W": 140.07, "J_1KI": 66.68876202360819, "W_1KI": 5.609082172032676, "W_D": 104.13924999999999, "J_D": 1238.155806523025, "W_D_1KI": 4.170240669549895, "J_D_1KI": 0.16699666304460578} diff --git a/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_mario002.output b/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_mario002.output new file mode 100644 index 0000000..0369c8f --- /dev/null +++ b/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_mario002.output @@ -0,0 +1,71 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/mario002.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "mario002", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 0.4204576015472412} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.4700, 0.6519, 0.6666, ..., 0.7659, 0.1482, 0.5452]) +Matrix Type: SuiteSparse +Matrix: mario002 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 0.4204576015472412 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '24972', '-m', 'matrices/389000+_cols/mario002.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "mario002", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 10.226792573928833} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.0699, 0.8434, 0.7786, ..., 0.4343, 0.2465, 0.4017]) +Matrix Type: SuiteSparse +Matrix: mario002 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 10.226792573928833 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.0699, 0.8434, 0.7786, ..., 0.4343, 0.2465, 0.4017]) +Matrix Type: SuiteSparse +Matrix: mario002 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 10.226792573928833 seconds + +[40.64, 39.1, 39.09, 39.17, 39.18, 39.36, 39.46, 39.42, 38.91, 38.87] +[140.07] +11.889425039291382 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 24972, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'mario002', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [389874, 389874], 'MATRIX_ROWS': 389874, 'MATRIX_SIZE': 152001735876, 'MATRIX_NNZ': 2101242, 'MATRIX_DENSITY': 1.3823802655215408e-05, 'TIME_S': 10.226792573928833, 'TIME_S_1KI': 0.4095303769793702, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1665.3517652535438, 'W': 140.07} +[40.64, 39.1, 39.09, 39.17, 39.18, 39.36, 39.46, 39.42, 38.91, 38.87, 39.88, 39.14, 39.13, 39.09, 39.22, 39.35, 39.38, 39.37, 40.66, 59.78] +718.615 +35.93075 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 24972, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'mario002', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [389874, 389874], 'MATRIX_ROWS': 389874, 'MATRIX_SIZE': 152001735876, 'MATRIX_NNZ': 2101242, 'MATRIX_DENSITY': 1.3823802655215408e-05, 'TIME_S': 10.226792573928833, 'TIME_S_1KI': 0.4095303769793702, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1665.3517652535438, 'W': 140.07, 'J_1KI': 66.68876202360819, 'W_1KI': 5.609082172032676, 'W_D': 104.13924999999999, 'J_D': 1238.155806523025, 'W_D_1KI': 4.170240669549895, 'J_D_1KI': 0.16699666304460578} diff --git a/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_test1.json b/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_test1.json new file mode 100644 index 0000000..f7d6df9 --- /dev/null +++ b/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_test1.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 2723, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "test1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392908, 392908], "MATRIX_ROWS": 392908, "MATRIX_SIZE": 154376696464, "MATRIX_NNZ": 12968200, "MATRIX_DENSITY": 8.400361127706946e-05, "TIME_S": 11.063719511032104, "TIME_S_1KI": 4.063062618814581, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1680.215021328926, "W": 125.38, "J_1KI": 617.045545842426, "W_1KI": 46.04480352552332, "W_D": 89.84949999999999, "J_D": 1204.071459235072, "W_D_1KI": 32.99651120088138, "J_D_1KI": 12.117705178436056} diff --git a/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_test1.output b/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_test1.output new file mode 100644 index 0000000..a60431b --- /dev/null +++ b/pytorch/output_389000+_maxcore/epyc_7313p_max_csr_10_10_10_test1.output @@ -0,0 +1,74 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/test1.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "test1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392908, 392908], "MATRIX_ROWS": 392908, "MATRIX_SIZE": 154376696464, "MATRIX_NNZ": 12968200, "MATRIX_DENSITY": 8.400361127706946e-05, "TIME_S": 3.8556222915649414} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 24, 48, ..., 12968181, + 12968191, 12968200]), + col_indices=tensor([ 0, 1, 8, ..., 392905, 392906, + 392907]), + values=tensor([1.0000e+00, 0.0000e+00, 0.0000e+00, ..., + 0.0000e+00, 0.0000e+00, 2.1156e-17]), + size=(392908, 392908), nnz=12968200, layout=torch.sparse_csr) +tensor([0.7510, 0.1721, 0.0746, ..., 0.5838, 0.0016, 0.2497]) +Matrix Type: SuiteSparse +Matrix: test1 +Matrix Format: csr +Shape: torch.Size([392908, 392908]) +Rows: 392908 +Size: 154376696464 +NNZ: 12968200 +Density: 8.400361127706946e-05 +Time: 3.8556222915649414 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'csr', '2723', '-m', 'matrices/389000+_cols/test1.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "test1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392908, 392908], "MATRIX_ROWS": 392908, "MATRIX_SIZE": 154376696464, "MATRIX_NNZ": 12968200, "MATRIX_DENSITY": 8.400361127706946e-05, "TIME_S": 11.063719511032104} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 24, 48, ..., 12968181, + 12968191, 12968200]), + col_indices=tensor([ 0, 1, 8, ..., 392905, 392906, + 392907]), + values=tensor([1.0000e+00, 0.0000e+00, 0.0000e+00, ..., + 0.0000e+00, 0.0000e+00, 2.1156e-17]), + size=(392908, 392908), nnz=12968200, layout=torch.sparse_csr) +tensor([0.3646, 0.9778, 0.6678, ..., 0.8764, 0.3618, 0.1561]) +Matrix Type: SuiteSparse +Matrix: test1 +Matrix Format: csr +Shape: torch.Size([392908, 392908]) +Rows: 392908 +Size: 154376696464 +NNZ: 12968200 +Density: 8.400361127706946e-05 +Time: 11.063719511032104 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 24, 48, ..., 12968181, + 12968191, 12968200]), + col_indices=tensor([ 0, 1, 8, ..., 392905, 392906, + 392907]), + values=tensor([1.0000e+00, 0.0000e+00, 0.0000e+00, ..., + 0.0000e+00, 0.0000e+00, 2.1156e-17]), + size=(392908, 392908), nnz=12968200, layout=torch.sparse_csr) +tensor([0.3646, 0.9778, 0.6678, ..., 0.8764, 0.3618, 0.1561]) +Matrix Type: SuiteSparse +Matrix: test1 +Matrix Format: csr +Shape: torch.Size([392908, 392908]) +Rows: 392908 +Size: 154376696464 +NNZ: 12968200 +Density: 8.400361127706946e-05 +Time: 11.063719511032104 seconds + +[40.25, 40.25, 39.32, 39.94, 39.64, 39.88, 39.53, 39.13, 39.13, 39.17] +[125.38] +13.400981187820435 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 2723, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'test1', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [392908, 392908], 'MATRIX_ROWS': 392908, 'MATRIX_SIZE': 154376696464, 'MATRIX_NNZ': 12968200, 'MATRIX_DENSITY': 8.400361127706946e-05, 'TIME_S': 11.063719511032104, 'TIME_S_1KI': 4.063062618814581, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1680.215021328926, 'W': 125.38} +[40.25, 40.25, 39.32, 39.94, 39.64, 39.88, 39.53, 39.13, 39.13, 39.17, 40.93, 39.54, 39.73, 39.16, 39.13, 39.04, 39.13, 39.33, 39.0, 39.11] +710.61 +35.5305 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 2723, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'test1', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [392908, 392908], 'MATRIX_ROWS': 392908, 'MATRIX_SIZE': 154376696464, 'MATRIX_NNZ': 12968200, 'MATRIX_DENSITY': 8.400361127706946e-05, 'TIME_S': 11.063719511032104, 'TIME_S_1KI': 4.063062618814581, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1680.215021328926, 'W': 125.38, 'J_1KI': 617.045545842426, 'W_1KI': 46.04480352552332, 'W_D': 89.84949999999999, 'J_D': 1204.071459235072, 'W_D_1KI': 32.99651120088138, 'J_D_1KI': 12.117705178436056} diff --git a/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_amazon0312.json b/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_amazon0312.json new file mode 100644 index 0000000..17b55ec --- /dev/null +++ b/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_amazon0312.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 8225, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "amazon0312", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400727, 400727], "MATRIX_ROWS": 400727, "MATRIX_SIZE": 160582128529, "MATRIX_NNZ": 3200440, "MATRIX_DENSITY": 1.9930237750099465e-05, "TIME_S": 10.766162395477295, "TIME_S_1KI": 1.3089559143437441, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1300.989917767048, "W": 89.05, "J_1KI": 158.17506598991466, "W_1KI": 10.826747720364741, "W_D": 72.6925, "J_D": 1062.012460384965, "W_D_1KI": 8.837993920972645, "J_D_1KI": 1.0745281362884673} diff --git a/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_amazon0312.output b/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_amazon0312.output new file mode 100644 index 0000000..81fbe8a --- /dev/null +++ b/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_amazon0312.output @@ -0,0 +1,71 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/amazon0312.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "amazon0312", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400727, 400727], "MATRIX_ROWS": 400727, "MATRIX_SIZE": 160582128529, "MATRIX_NNZ": 3200440, "MATRIX_DENSITY": 1.9930237750099465e-05, "TIME_S": 1.2765758037567139} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 10, ..., 3200428, + 3200438, 3200440]), + col_indices=tensor([ 1, 2, 3, ..., 400724, 6009, + 400707]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(400727, 400727), + nnz=3200440, layout=torch.sparse_csr) +tensor([0.8946, 0.9409, 0.1159, ..., 0.4084, 0.7142, 0.8849]) +Matrix Type: SuiteSparse +Matrix: amazon0312 +Matrix Format: csr +Shape: torch.Size([400727, 400727]) +Rows: 400727 +Size: 160582128529 +NNZ: 3200440 +Density: 1.9930237750099465e-05 +Time: 1.2765758037567139 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '8225', '-m', 'matrices/389000+_cols/amazon0312.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "amazon0312", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400727, 400727], "MATRIX_ROWS": 400727, "MATRIX_SIZE": 160582128529, "MATRIX_NNZ": 3200440, "MATRIX_DENSITY": 1.9930237750099465e-05, "TIME_S": 10.766162395477295} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 10, ..., 3200428, + 3200438, 3200440]), + col_indices=tensor([ 1, 2, 3, ..., 400724, 6009, + 400707]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(400727, 400727), + nnz=3200440, layout=torch.sparse_csr) +tensor([0.2056, 0.8237, 0.5206, ..., 0.1956, 0.5378, 0.6984]) +Matrix Type: SuiteSparse +Matrix: amazon0312 +Matrix Format: csr +Shape: torch.Size([400727, 400727]) +Rows: 400727 +Size: 160582128529 +NNZ: 3200440 +Density: 1.9930237750099465e-05 +Time: 10.766162395477295 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 10, ..., 3200428, + 3200438, 3200440]), + col_indices=tensor([ 1, 2, 3, ..., 400724, 6009, + 400707]), + values=tensor([1., 1., 1., ..., 1., 1., 1.]), size=(400727, 400727), + nnz=3200440, layout=torch.sparse_csr) +tensor([0.2056, 0.8237, 0.5206, ..., 0.1956, 0.5378, 0.6984]) +Matrix Type: SuiteSparse +Matrix: amazon0312 +Matrix Format: csr +Shape: torch.Size([400727, 400727]) +Rows: 400727 +Size: 160582128529 +NNZ: 3200440 +Density: 1.9930237750099465e-05 +Time: 10.766162395477295 seconds + +[17.94, 17.59, 17.86, 17.65, 17.7, 17.49, 18.22, 21.41, 17.9, 17.57] +[89.05] +14.609656572341919 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8225, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'amazon0312', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [400727, 400727], 'MATRIX_ROWS': 400727, 'MATRIX_SIZE': 160582128529, 'MATRIX_NNZ': 3200440, 'MATRIX_DENSITY': 1.9930237750099465e-05, 'TIME_S': 10.766162395477295, 'TIME_S_1KI': 1.3089559143437441, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1300.989917767048, 'W': 89.05} +[17.94, 17.59, 17.86, 17.65, 17.7, 17.49, 18.22, 21.41, 17.9, 17.57, 18.25, 17.56, 21.38, 17.98, 17.81, 17.52, 18.04, 17.55, 17.82, 17.58] +327.15 +16.357499999999998 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8225, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'amazon0312', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [400727, 400727], 'MATRIX_ROWS': 400727, 'MATRIX_SIZE': 160582128529, 'MATRIX_NNZ': 3200440, 'MATRIX_DENSITY': 1.9930237750099465e-05, 'TIME_S': 10.766162395477295, 'TIME_S_1KI': 1.3089559143437441, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1300.989917767048, 'W': 89.05, 'J_1KI': 158.17506598991466, 'W_1KI': 10.826747720364741, 'W_D': 72.6925, 'J_D': 1062.012460384965, 'W_D_1KI': 8.837993920972645, 'J_D_1KI': 1.0745281362884673} diff --git a/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_darcy003.json b/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_darcy003.json new file mode 100644 index 0000000..f6c6715 --- /dev/null +++ b/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_darcy003.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 13645, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "darcy003", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 10.674118757247925, "TIME_S_1KI": 0.7822732691277335, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1295.4411267971993, "W": 89.73, "J_1KI": 94.93888800272622, "W_1KI": 6.576035177720778, "W_D": 73.57575, "J_D": 1062.2205782341362, "W_D_1KI": 5.392139978013925, "J_D_1KI": 0.3951733219504525} diff --git a/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_darcy003.output b/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_darcy003.output new file mode 100644 index 0000000..528c9fd --- /dev/null +++ b/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_darcy003.output @@ -0,0 +1,71 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/darcy003.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "darcy003", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 0.7694816589355469} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.3795, 0.4102, 0.1265, ..., 0.4337, 0.0977, 0.4509]) +Matrix Type: SuiteSparse +Matrix: darcy003 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 0.7694816589355469 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '13645', '-m', 'matrices/389000+_cols/darcy003.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "darcy003", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 10.674118757247925} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.9376, 0.2227, 0.1775, ..., 0.8669, 0.9587, 0.8096]) +Matrix Type: SuiteSparse +Matrix: darcy003 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 10.674118757247925 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.9376, 0.2227, 0.1775, ..., 0.8669, 0.9587, 0.8096]) +Matrix Type: SuiteSparse +Matrix: darcy003 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 10.674118757247925 seconds + +[18.58, 18.09, 18.09, 17.74, 17.81, 17.92, 17.82, 18.14, 18.2, 17.95] +[89.73] +14.437101602554321 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 13645, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'darcy003', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [389874, 389874], 'MATRIX_ROWS': 389874, 'MATRIX_SIZE': 152001735876, 'MATRIX_NNZ': 2101242, 'MATRIX_DENSITY': 1.3823802655215408e-05, 'TIME_S': 10.674118757247925, 'TIME_S_1KI': 0.7822732691277335, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1295.4411267971993, 'W': 89.73} +[18.58, 18.09, 18.09, 17.74, 17.81, 17.92, 17.82, 18.14, 18.2, 17.95, 18.29, 17.6, 18.31, 17.77, 18.5, 17.52, 18.0, 17.51, 17.78, 17.75] +323.085 +16.154249999999998 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 13645, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'darcy003', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [389874, 389874], 'MATRIX_ROWS': 389874, 'MATRIX_SIZE': 152001735876, 'MATRIX_NNZ': 2101242, 'MATRIX_DENSITY': 1.3823802655215408e-05, 'TIME_S': 10.674118757247925, 'TIME_S_1KI': 0.7822732691277335, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1295.4411267971993, 'W': 89.73, 'J_1KI': 94.93888800272622, 'W_1KI': 6.576035177720778, 'W_D': 73.57575, 'J_D': 1062.2205782341362, 'W_D_1KI': 5.392139978013925, 'J_D_1KI': 0.3951733219504525} diff --git a/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_helm2d03.json b/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_helm2d03.json new file mode 100644 index 0000000..d957b86 --- /dev/null +++ b/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_helm2d03.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 12273, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "helm2d03", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392257, 392257], "MATRIX_ROWS": 392257, "MATRIX_SIZE": 153865554049, "MATRIX_NNZ": 2741935, "MATRIX_DENSITY": 1.7820330332848923e-05, "TIME_S": 10.708429336547852, "TIME_S_1KI": 0.8725192973639575, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1282.9024416732789, "W": 89.76, "J_1KI": 104.53046864444545, "W_1KI": 7.313615252994378, "W_D": 73.62625, "J_D": 1052.309446259439, "W_D_1KI": 5.999042613867839, "J_D_1KI": 0.48880001742588114} diff --git a/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_helm2d03.output b/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_helm2d03.output new file mode 100644 index 0000000..61cc75a --- /dev/null +++ b/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_helm2d03.output @@ -0,0 +1,74 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/helm2d03.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "helm2d03", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392257, 392257], "MATRIX_ROWS": 392257, "MATRIX_SIZE": 153865554049, "MATRIX_NNZ": 2741935, "MATRIX_DENSITY": 1.7820330332848923e-05, "TIME_S": 0.855471134185791} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 14, ..., 2741921, + 2741928, 2741935]), + col_indices=tensor([ 0, 98273, 133833, ..., 392252, 392254, + 392256]), + values=tensor([ 3.4808, -0.6217, -0.5806, ..., -0.6940, -0.7602, + 3.5476]), size=(392257, 392257), nnz=2741935, + layout=torch.sparse_csr) +tensor([0.2921, 0.7848, 0.3759, ..., 0.1614, 0.6217, 0.8908]) +Matrix Type: SuiteSparse +Matrix: helm2d03 +Matrix Format: csr +Shape: torch.Size([392257, 392257]) +Rows: 392257 +Size: 153865554049 +NNZ: 2741935 +Density: 1.7820330332848923e-05 +Time: 0.855471134185791 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '12273', '-m', 'matrices/389000+_cols/helm2d03.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "helm2d03", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392257, 392257], "MATRIX_ROWS": 392257, "MATRIX_SIZE": 153865554049, "MATRIX_NNZ": 2741935, "MATRIX_DENSITY": 1.7820330332848923e-05, "TIME_S": 10.708429336547852} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 14, ..., 2741921, + 2741928, 2741935]), + col_indices=tensor([ 0, 98273, 133833, ..., 392252, 392254, + 392256]), + values=tensor([ 3.4808, -0.6217, -0.5806, ..., -0.6940, -0.7602, + 3.5476]), size=(392257, 392257), nnz=2741935, + layout=torch.sparse_csr) +tensor([0.0043, 0.4903, 0.3538, ..., 0.3528, 0.3455, 0.3342]) +Matrix Type: SuiteSparse +Matrix: helm2d03 +Matrix Format: csr +Shape: torch.Size([392257, 392257]) +Rows: 392257 +Size: 153865554049 +NNZ: 2741935 +Density: 1.7820330332848923e-05 +Time: 10.708429336547852 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 14, ..., 2741921, + 2741928, 2741935]), + col_indices=tensor([ 0, 98273, 133833, ..., 392252, 392254, + 392256]), + values=tensor([ 3.4808, -0.6217, -0.5806, ..., -0.6940, -0.7602, + 3.5476]), size=(392257, 392257), nnz=2741935, + layout=torch.sparse_csr) +tensor([0.0043, 0.4903, 0.3538, ..., 0.3528, 0.3455, 0.3342]) +Matrix Type: SuiteSparse +Matrix: helm2d03 +Matrix Format: csr +Shape: torch.Size([392257, 392257]) +Rows: 392257 +Size: 153865554049 +NNZ: 2741935 +Density: 1.7820330332848923e-05 +Time: 10.708429336547852 seconds + +[18.04, 18.17, 18.08, 18.71, 18.3, 17.59, 17.7, 17.54, 17.78, 17.75] +[89.76] +14.292585134506226 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 12273, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'helm2d03', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [392257, 392257], 'MATRIX_ROWS': 392257, 'MATRIX_SIZE': 153865554049, 'MATRIX_NNZ': 2741935, 'MATRIX_DENSITY': 1.7820330332848923e-05, 'TIME_S': 10.708429336547852, 'TIME_S_1KI': 0.8725192973639575, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1282.9024416732789, 'W': 89.76} +[18.04, 18.17, 18.08, 18.71, 18.3, 17.59, 17.7, 17.54, 17.78, 17.75, 18.01, 17.48, 17.78, 17.28, 17.51, 17.82, 17.71, 17.91, 19.53, 17.77] +322.67499999999995 +16.13375 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 12273, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'helm2d03', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [392257, 392257], 'MATRIX_ROWS': 392257, 'MATRIX_SIZE': 153865554049, 'MATRIX_NNZ': 2741935, 'MATRIX_DENSITY': 1.7820330332848923e-05, 'TIME_S': 10.708429336547852, 'TIME_S_1KI': 0.8725192973639575, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1282.9024416732789, 'W': 89.76, 'J_1KI': 104.53046864444545, 'W_1KI': 7.313615252994378, 'W_D': 73.62625, 'J_D': 1052.309446259439, 'W_D_1KI': 5.999042613867839, 'J_D_1KI': 0.48880001742588114} diff --git a/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_language.json b/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_language.json new file mode 100644 index 0000000..51eb669 --- /dev/null +++ b/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_language.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 13367, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "language", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [399130, 399130], "MATRIX_ROWS": 399130, "MATRIX_SIZE": 159304756900, "MATRIX_NNZ": 1216334, "MATRIX_DENSITY": 7.635264782228233e-06, "TIME_S": 10.647616863250732, "TIME_S_1KI": 0.7965599508678636, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1274.2656374621392, "W": 89.59, "J_1KI": 95.32921653790224, "W_1KI": 6.702326625271191, "W_D": 73.54725, "J_D": 1046.0847572813632, "W_D_1KI": 5.502150819181567, "J_D_1KI": 0.4116219659745318} diff --git a/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_language.output b/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_language.output new file mode 100644 index 0000000..467022c --- /dev/null +++ b/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_language.output @@ -0,0 +1,71 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/language.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "language", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [399130, 399130], "MATRIX_ROWS": 399130, "MATRIX_SIZE": 159304756900, "MATRIX_NNZ": 1216334, "MATRIX_DENSITY": 7.635264782228233e-06, "TIME_S": 0.785470724105835} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 3, ..., 1216330, + 1216332, 1216334]), + col_indices=tensor([ 0, 0, 1, ..., 399128, 399125, + 399129]), + values=tensor([ 1., -1., 1., ..., 1., -1., 1.]), + size=(399130, 399130), nnz=1216334, layout=torch.sparse_csr) +tensor([0.6209, 0.8638, 0.8531, ..., 0.3261, 0.5283, 0.1956]) +Matrix Type: SuiteSparse +Matrix: language +Matrix Format: csr +Shape: torch.Size([399130, 399130]) +Rows: 399130 +Size: 159304756900 +NNZ: 1216334 +Density: 7.635264782228233e-06 +Time: 0.785470724105835 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '13367', '-m', 'matrices/389000+_cols/language.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "language", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [399130, 399130], "MATRIX_ROWS": 399130, "MATRIX_SIZE": 159304756900, "MATRIX_NNZ": 1216334, "MATRIX_DENSITY": 7.635264782228233e-06, "TIME_S": 10.647616863250732} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 3, ..., 1216330, + 1216332, 1216334]), + col_indices=tensor([ 0, 0, 1, ..., 399128, 399125, + 399129]), + values=tensor([ 1., -1., 1., ..., 1., -1., 1.]), + size=(399130, 399130), nnz=1216334, layout=torch.sparse_csr) +tensor([0.6025, 0.9576, 0.6646, ..., 0.8829, 0.1742, 0.2421]) +Matrix Type: SuiteSparse +Matrix: language +Matrix Format: csr +Shape: torch.Size([399130, 399130]) +Rows: 399130 +Size: 159304756900 +NNZ: 1216334 +Density: 7.635264782228233e-06 +Time: 10.647616863250732 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 3, ..., 1216330, + 1216332, 1216334]), + col_indices=tensor([ 0, 0, 1, ..., 399128, 399125, + 399129]), + values=tensor([ 1., -1., 1., ..., 1., -1., 1.]), + size=(399130, 399130), nnz=1216334, layout=torch.sparse_csr) +tensor([0.6025, 0.9576, 0.6646, ..., 0.8829, 0.1742, 0.2421]) +Matrix Type: SuiteSparse +Matrix: language +Matrix Format: csr +Shape: torch.Size([399130, 399130]) +Rows: 399130 +Size: 159304756900 +NNZ: 1216334 +Density: 7.635264782228233e-06 +Time: 10.647616863250732 seconds + +[18.88, 17.79, 17.82, 17.8, 17.88, 17.82, 17.92, 17.54, 17.65, 17.87] +[89.59] +14.223302125930786 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 13367, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'language', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [399130, 399130], 'MATRIX_ROWS': 399130, 'MATRIX_SIZE': 159304756900, 'MATRIX_NNZ': 1216334, 'MATRIX_DENSITY': 7.635264782228233e-06, 'TIME_S': 10.647616863250732, 'TIME_S_1KI': 0.7965599508678636, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1274.2656374621392, 'W': 89.59} +[18.88, 17.79, 17.82, 17.8, 17.88, 17.82, 17.92, 17.54, 17.65, 17.87, 18.16, 17.58, 17.67, 17.88, 17.53, 17.55, 17.46, 18.42, 17.89, 18.4] +320.855 +16.04275 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 13367, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'language', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [399130, 399130], 'MATRIX_ROWS': 399130, 'MATRIX_SIZE': 159304756900, 'MATRIX_NNZ': 1216334, 'MATRIX_DENSITY': 7.635264782228233e-06, 'TIME_S': 10.647616863250732, 'TIME_S_1KI': 0.7965599508678636, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1274.2656374621392, 'W': 89.59, 'J_1KI': 95.32921653790224, 'W_1KI': 6.702326625271191, 'W_D': 73.54725, 'J_D': 1046.0847572813632, 'W_D_1KI': 5.502150819181567, 'J_D_1KI': 0.4116219659745318} diff --git a/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_marine1.json b/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_marine1.json new file mode 100644 index 0000000..735b097 --- /dev/null +++ b/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_marine1.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 5913, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "marine1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400320, 400320], "MATRIX_ROWS": 400320, "MATRIX_SIZE": 160256102400, "MATRIX_NNZ": 6226538, "MATRIX_DENSITY": 3.885367175883594e-05, "TIME_S": 10.544729232788086, "TIME_S_1KI": 1.783312909316436, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1297.8479092025757, "W": 89.36, "J_1KI": 219.49059854601313, "W_1KI": 15.112464062235752, "W_D": 73.2365, "J_D": 1063.6732139918804, "W_D_1KI": 12.385675629967869, "J_D_1KI": 2.094651721624872} diff --git a/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_marine1.output b/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_marine1.output new file mode 100644 index 0000000..41b8f99 --- /dev/null +++ b/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_marine1.output @@ -0,0 +1,74 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/marine1.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "marine1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400320, 400320], "MATRIX_ROWS": 400320, "MATRIX_SIZE": 160256102400, "MATRIX_NNZ": 6226538, "MATRIX_DENSITY": 3.885367175883594e-05, "TIME_S": 1.7754507064819336} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 18, ..., 6226522, + 6226531, 6226538]), + col_indices=tensor([ 0, 1, 10383, ..., 400315, 400318, + 400319]), + values=tensor([ 6.2373e+03, -1.8964e+00, -5.7529e+00, ..., + -6.8099e-01, -6.4187e-01, 1.7595e+01]), + size=(400320, 400320), nnz=6226538, layout=torch.sparse_csr) +tensor([0.1497, 0.9722, 0.3177, ..., 0.4783, 0.5290, 0.8575]) +Matrix Type: SuiteSparse +Matrix: marine1 +Matrix Format: csr +Shape: torch.Size([400320, 400320]) +Rows: 400320 +Size: 160256102400 +NNZ: 6226538 +Density: 3.885367175883594e-05 +Time: 1.7754507064819336 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '5913', '-m', 'matrices/389000+_cols/marine1.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "marine1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [400320, 400320], "MATRIX_ROWS": 400320, "MATRIX_SIZE": 160256102400, "MATRIX_NNZ": 6226538, "MATRIX_DENSITY": 3.885367175883594e-05, "TIME_S": 10.544729232788086} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 18, ..., 6226522, + 6226531, 6226538]), + col_indices=tensor([ 0, 1, 10383, ..., 400315, 400318, + 400319]), + values=tensor([ 6.2373e+03, -1.8964e+00, -5.7529e+00, ..., + -6.8099e-01, -6.4187e-01, 1.7595e+01]), + size=(400320, 400320), nnz=6226538, layout=torch.sparse_csr) +tensor([0.3921, 0.4534, 0.1657, ..., 0.0431, 0.7782, 0.5404]) +Matrix Type: SuiteSparse +Matrix: marine1 +Matrix Format: csr +Shape: torch.Size([400320, 400320]) +Rows: 400320 +Size: 160256102400 +NNZ: 6226538 +Density: 3.885367175883594e-05 +Time: 10.544729232788086 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 18, ..., 6226522, + 6226531, 6226538]), + col_indices=tensor([ 0, 1, 10383, ..., 400315, 400318, + 400319]), + values=tensor([ 6.2373e+03, -1.8964e+00, -5.7529e+00, ..., + -6.8099e-01, -6.4187e-01, 1.7595e+01]), + size=(400320, 400320), nnz=6226538, layout=torch.sparse_csr) +tensor([0.3921, 0.4534, 0.1657, ..., 0.0431, 0.7782, 0.5404]) +Matrix Type: SuiteSparse +Matrix: marine1 +Matrix Format: csr +Shape: torch.Size([400320, 400320]) +Rows: 400320 +Size: 160256102400 +NNZ: 6226538 +Density: 3.885367175883594e-05 +Time: 10.544729232788086 seconds + +[18.11, 17.77, 17.64, 17.87, 17.55, 17.7, 17.81, 17.95, 18.63, 17.7] +[89.36] +14.523812770843506 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 5913, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'marine1', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [400320, 400320], 'MATRIX_ROWS': 400320, 'MATRIX_SIZE': 160256102400, 'MATRIX_NNZ': 6226538, 'MATRIX_DENSITY': 3.885367175883594e-05, 'TIME_S': 10.544729232788086, 'TIME_S_1KI': 1.783312909316436, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1297.8479092025757, 'W': 89.36} +[18.11, 17.77, 17.64, 17.87, 17.55, 17.7, 17.81, 17.95, 18.63, 17.7, 18.27, 17.88, 17.72, 17.88, 17.99, 17.97, 17.75, 18.36, 18.01, 17.9] +322.46999999999997 +16.1235 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 5913, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'marine1', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [400320, 400320], 'MATRIX_ROWS': 400320, 'MATRIX_SIZE': 160256102400, 'MATRIX_NNZ': 6226538, 'MATRIX_DENSITY': 3.885367175883594e-05, 'TIME_S': 10.544729232788086, 'TIME_S_1KI': 1.783312909316436, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1297.8479092025757, 'W': 89.36, 'J_1KI': 219.49059854601313, 'W_1KI': 15.112464062235752, 'W_D': 73.2365, 'J_D': 1063.6732139918804, 'W_D_1KI': 12.385675629967869, 'J_D_1KI': 2.094651721624872} diff --git a/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_mario002.json b/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_mario002.json new file mode 100644 index 0000000..6bd73ad --- /dev/null +++ b/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_mario002.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 13717, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "mario002", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 10.75633430480957, "TIME_S_1KI": 0.7841608445585456, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1299.2889261174203, "W": 89.67, "J_1KI": 94.72107065082892, "W_1KI": 6.537143690311293, "W_D": 73.277, "J_D": 1061.7597260968685, "W_D_1KI": 5.342057301159145, "J_D_1KI": 0.3894479333060542} diff --git a/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_mario002.output b/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_mario002.output new file mode 100644 index 0000000..c3613ad --- /dev/null +++ b/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_mario002.output @@ -0,0 +1,71 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/mario002.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "mario002", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 0.7654633522033691} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.4671, 0.2005, 0.4838, ..., 0.7591, 0.5753, 0.8077]) +Matrix Type: SuiteSparse +Matrix: mario002 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 0.7654633522033691 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '13717', '-m', 'matrices/389000+_cols/mario002.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "mario002", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [389874, 389874], "MATRIX_ROWS": 389874, "MATRIX_SIZE": 152001735876, "MATRIX_NNZ": 2101242, "MATRIX_DENSITY": 1.3823802655215408e-05, "TIME_S": 10.75633430480957} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.1039, 0.6560, 0.4015, ..., 0.5631, 0.0997, 0.1433]) +Matrix Type: SuiteSparse +Matrix: mario002 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 10.75633430480957 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 2101236, + 2101239, 2101242]), + col_indices=tensor([ 0, 1027, 1028, ..., 196606, 233926, + 234127]), + values=tensor([ 1., 0., 0., ..., -1., -1., -1.]), + size=(389874, 389874), nnz=2101242, layout=torch.sparse_csr) +tensor([0.1039, 0.6560, 0.4015, ..., 0.5631, 0.0997, 0.1433]) +Matrix Type: SuiteSparse +Matrix: mario002 +Matrix Format: csr +Shape: torch.Size([389874, 389874]) +Rows: 389874 +Size: 152001735876 +NNZ: 2101242 +Density: 1.3823802655215408e-05 +Time: 10.75633430480957 seconds + +[17.93, 17.78, 18.16, 17.81, 21.75, 18.47, 18.11, 17.7, 18.15, 18.27] +[89.67] +14.489672422409058 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 13717, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'mario002', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [389874, 389874], 'MATRIX_ROWS': 389874, 'MATRIX_SIZE': 152001735876, 'MATRIX_NNZ': 2101242, 'MATRIX_DENSITY': 1.3823802655215408e-05, 'TIME_S': 10.75633430480957, 'TIME_S_1KI': 0.7841608445585456, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1299.2889261174203, 'W': 89.67} +[17.93, 17.78, 18.16, 17.81, 21.75, 18.47, 18.11, 17.7, 18.15, 18.27, 22.42, 17.61, 17.63, 17.67, 17.98, 17.5, 17.66, 18.19, 17.57, 17.62] +327.86 +16.393 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 13717, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'mario002', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [389874, 389874], 'MATRIX_ROWS': 389874, 'MATRIX_SIZE': 152001735876, 'MATRIX_NNZ': 2101242, 'MATRIX_DENSITY': 1.3823802655215408e-05, 'TIME_S': 10.75633430480957, 'TIME_S_1KI': 0.7841608445585456, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1299.2889261174203, 'W': 89.67, 'J_1KI': 94.72107065082892, 'W_1KI': 6.537143690311293, 'W_D': 73.277, 'J_D': 1061.7597260968685, 'W_D_1KI': 5.342057301159145, 'J_D_1KI': 0.3894479333060542} diff --git a/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_test1.json b/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_test1.json new file mode 100644 index 0000000..54a5eaf --- /dev/null +++ b/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_test1.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 1798, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "test1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392908, 392908], "MATRIX_ROWS": 392908, "MATRIX_SIZE": 154376696464, "MATRIX_NNZ": 12968200, "MATRIX_DENSITY": 8.400361127706946e-05, "TIME_S": 10.02889084815979, "TIME_S_1KI": 5.577803586295767, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1373.981861114502, "W": 84.07, "J_1KI": 764.1723365486662, "W_1KI": 46.75750834260289, "W_D": 67.9785, "J_D": 1110.9935285568238, "W_D_1KI": 37.80784204671858, "J_D_1KI": 21.027720826873512} diff --git a/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_test1.output b/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_test1.output new file mode 100644 index 0000000..1a5d3f5 --- /dev/null +++ b/pytorch/output_389000+_maxcore/xeon_4216_max_csr_10_10_10_test1.output @@ -0,0 +1,74 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1000', '-m', 'matrices/389000+_cols/test1.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "test1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392908, 392908], "MATRIX_ROWS": 392908, "MATRIX_SIZE": 154376696464, "MATRIX_NNZ": 12968200, "MATRIX_DENSITY": 8.400361127706946e-05, "TIME_S": 5.838165521621704} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 24, 48, ..., 12968181, + 12968191, 12968200]), + col_indices=tensor([ 0, 1, 8, ..., 392905, 392906, + 392907]), + values=tensor([1.0000e+00, 0.0000e+00, 0.0000e+00, ..., + 0.0000e+00, 0.0000e+00, 2.1156e-17]), + size=(392908, 392908), nnz=12968200, layout=torch.sparse_csr) +tensor([0.7305, 0.5169, 0.5807, ..., 0.4360, 0.1397, 0.7206]) +Matrix Type: SuiteSparse +Matrix: test1 +Matrix Format: csr +Shape: torch.Size([392908, 392908]) +Rows: 392908 +Size: 154376696464 +NNZ: 12968200 +Density: 8.400361127706946e-05 +Time: 5.838165521621704 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'suitesparse', 'csr', '1798', '-m', 'matrices/389000+_cols/test1.mtx'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "test1", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [392908, 392908], "MATRIX_ROWS": 392908, "MATRIX_SIZE": 154376696464, "MATRIX_NNZ": 12968200, "MATRIX_DENSITY": 8.400361127706946e-05, "TIME_S": 10.02889084815979} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 24, 48, ..., 12968181, + 12968191, 12968200]), + col_indices=tensor([ 0, 1, 8, ..., 392905, 392906, + 392907]), + values=tensor([1.0000e+00, 0.0000e+00, 0.0000e+00, ..., + 0.0000e+00, 0.0000e+00, 2.1156e-17]), + size=(392908, 392908), nnz=12968200, layout=torch.sparse_csr) +tensor([0.9760, 0.7722, 0.6451, ..., 0.4999, 0.6092, 0.1757]) +Matrix Type: SuiteSparse +Matrix: test1 +Matrix Format: csr +Shape: torch.Size([392908, 392908]) +Rows: 392908 +Size: 154376696464 +NNZ: 12968200 +Density: 8.400361127706946e-05 +Time: 10.02889084815979 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 24, 48, ..., 12968181, + 12968191, 12968200]), + col_indices=tensor([ 0, 1, 8, ..., 392905, 392906, + 392907]), + values=tensor([1.0000e+00, 0.0000e+00, 0.0000e+00, ..., + 0.0000e+00, 0.0000e+00, 2.1156e-17]), + size=(392908, 392908), nnz=12968200, layout=torch.sparse_csr) +tensor([0.9760, 0.7722, 0.6451, ..., 0.4999, 0.6092, 0.1757]) +Matrix Type: SuiteSparse +Matrix: test1 +Matrix Format: csr +Shape: torch.Size([392908, 392908]) +Rows: 392908 +Size: 154376696464 +NNZ: 12968200 +Density: 8.400361127706946e-05 +Time: 10.02889084815979 seconds + +[17.94, 17.76, 17.81, 17.53, 17.58, 17.64, 18.37, 17.65, 18.76, 17.74] +[84.07] +16.343307495117188 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1798, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'test1', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [392908, 392908], 'MATRIX_ROWS': 392908, 'MATRIX_SIZE': 154376696464, 'MATRIX_NNZ': 12968200, 'MATRIX_DENSITY': 8.400361127706946e-05, 'TIME_S': 10.02889084815979, 'TIME_S_1KI': 5.577803586295767, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1373.981861114502, 'W': 84.07} +[17.94, 17.76, 17.81, 17.53, 17.58, 17.64, 18.37, 17.65, 18.76, 17.74, 18.31, 17.88, 17.67, 17.64, 18.57, 17.92, 17.84, 17.7, 17.61, 17.81] +321.83000000000004 +16.091500000000003 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1798, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'test1', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [392908, 392908], 'MATRIX_ROWS': 392908, 'MATRIX_SIZE': 154376696464, 'MATRIX_NNZ': 12968200, 'MATRIX_DENSITY': 8.400361127706946e-05, 'TIME_S': 10.02889084815979, 'TIME_S_1KI': 5.577803586295767, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1373.981861114502, 'W': 84.07, 'J_1KI': 764.1723365486662, 'W_1KI': 46.75750834260289, 'W_D': 67.9785, 'J_D': 1110.9935285568238, 'W_D_1KI': 37.80784204671858, 'J_D_1KI': 21.027720826873512} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.0001.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.0001.json index 8bab18f..980d684 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.0001.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.0001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 1770, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.45119595527649, "TIME_S_1KI": 5.904630483207056, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 518.2880183124543, "W": 35.449367856062224, "J_1KI": 292.81808944206455, "W_1KI": 20.027891444102952, "W_D": 16.922367856062227, "J_D": 247.4137349045278, "W_D_1KI": 9.560659805684875, "J_D_1KI": 5.401502715076201} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 1755, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.253487825393677, "TIME_S_1KI": 5.842443205352523, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 467.2838370990753, "W": 31.934101368331916, "J_1KI": 266.2585966376497, "W_1KI": 18.196069155744684, "W_D": 16.879101368331916, "J_D": 246.98773149132728, "W_D_1KI": 9.617721577397104, "J_D_1KI": 5.480183234984104} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.0001.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.0001.output index 9bd0023..365e810 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.0001.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.0001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 100000 -sd 0.0001 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 5.932083368301392} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 5.980836629867554} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 10, ..., 999982, - 999993, 1000000]), - col_indices=tensor([37897, 46445, 60989, ..., 76977, 92294, 96477]), - values=tensor([0.9469, 0.5853, 0.3833, ..., 0.6631, 0.6410, 0.8148]), +tensor(crow_indices=tensor([ 0, 11, 22, ..., 999980, + 999989, 1000000]), + col_indices=tensor([ 6100, 13265, 27848, ..., 84407, 91090, 94721]), + values=tensor([0.4400, 0.3445, 0.5606, ..., 0.5861, 0.7102, 0.2795]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.0925, 0.0591, 0.1895, ..., 0.1208, 0.2736, 0.9441]) +tensor([0.6757, 0.5029, 0.1898, ..., 0.2612, 0.6123, 0.0844]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 5.932083368301392 seconds +Time: 5.980836629867554 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1770 -ss 100000 -sd 0.0001 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.45119595527649} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1755 -ss 100000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.253487825393677} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 15, ..., 999984, - 999991, 1000000]), - col_indices=tensor([ 6148, 23043, 28153, ..., 62723, 86562, 96964]), - values=tensor([0.4836, 0.5090, 0.9509, ..., 0.7452, 0.4499, 0.9407]), +tensor(crow_indices=tensor([ 0, 5, 18, ..., 999983, + 999994, 1000000]), + col_indices=tensor([22305, 51740, 53616, ..., 72974, 76091, 88145]), + values=tensor([0.7756, 0.0657, 0.7358, ..., 0.9841, 0.0331, 0.5251]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.2481, 0.7173, 0.6398, ..., 0.9063, 0.5779, 0.5048]) +tensor([0.4750, 0.6821, 0.2847, ..., 0.3502, 0.4038, 0.5877]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,16 +36,16 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 10.45119595527649 seconds +Time: 10.253487825393677 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 15, ..., 999984, - 999991, 1000000]), - col_indices=tensor([ 6148, 23043, 28153, ..., 62723, 86562, 96964]), - values=tensor([0.4836, 0.5090, 0.9509, ..., 0.7452, 0.4499, 0.9407]), +tensor(crow_indices=tensor([ 0, 5, 18, ..., 999983, + 999994, 1000000]), + col_indices=tensor([22305, 51740, 53616, ..., 72974, 76091, 88145]), + values=tensor([0.7756, 0.0657, 0.7358, ..., 0.9841, 0.0331, 0.5251]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.2481, 0.7173, 0.6398, ..., 0.9063, 0.5779, 0.5048]) +tensor([0.4750, 0.6821, 0.2847, ..., 0.3502, 0.4038, 0.5877]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -53,13 +53,13 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 10.45119595527649 seconds +Time: 10.253487825393677 seconds -[20.2, 20.56, 20.52, 20.56, 20.56, 20.6, 20.4, 20.64, 20.8, 20.88] -[20.88, 21.44, 21.2, 22.44, 23.6, 27.28, 34.76, 40.76, 46.84, 51.32, 53.0, 52.92, 53.08, 52.84] -14.62051510810852 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1770, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.45119595527649, 'TIME_S_1KI': 5.904630483207056, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 518.2880183124543, 'W': 35.449367856062224} -[20.2, 20.56, 20.52, 20.56, 20.56, 20.6, 20.4, 20.64, 20.8, 20.88, 20.56, 20.52, 20.64, 20.64, 20.52, 20.52, 20.6, 20.68, 20.6, 20.72] -370.53999999999996 -18.526999999999997 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1770, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.45119595527649, 'TIME_S_1KI': 5.904630483207056, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 518.2880183124543, 'W': 35.449367856062224, 'J_1KI': 292.81808944206455, 'W_1KI': 20.027891444102952, 'W_D': 16.922367856062227, 'J_D': 247.4137349045278, 'W_D_1KI': 9.560659805684875, 'J_D_1KI': 5.401502715076201} +[16.48, 16.16, 16.52, 16.48, 16.48, 16.48, 16.72, 16.72, 16.84, 17.12] +[17.08, 17.16, 17.96, 19.72, 22.2, 27.16, 34.52, 39.08, 43.72, 45.96, 46.32, 46.32, 46.2, 46.28] +14.632753610610962 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1755, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.253487825393677, 'TIME_S_1KI': 5.842443205352523, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 467.2838370990753, 'W': 31.934101368331916} +[16.48, 16.16, 16.52, 16.48, 16.48, 16.48, 16.72, 16.72, 16.84, 17.12, 16.72, 16.8, 16.96, 17.0, 17.04, 16.88, 16.8, 16.88, 16.76, 16.84] +301.1 +15.055000000000001 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1755, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.253487825393677, 'TIME_S_1KI': 5.842443205352523, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 467.2838370990753, 'W': 31.934101368331916, 'J_1KI': 266.2585966376497, 'W_1KI': 18.196069155744684, 'W_D': 16.879101368331916, 'J_D': 246.98773149132728, 'W_D_1KI': 9.617721577397104, 'J_D_1KI': 5.480183234984104} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.001.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.001.json new file mode 100644 index 0000000..0a97dfa --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 57.53653693199158, "TIME_S_1KI": 57.53653693199158, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2684.526071929932, "W": 41.311972802980506, "J_1KI": 2684.526071929932, "W_1KI": 41.311972802980506, "W_D": 26.003972802980506, "J_D": 1689.784782156945, "W_D_1KI": 26.003972802980506, "J_D_1KI": 26.003972802980506} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.001.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.001.output new file mode 100644 index 0000000..01b19d4 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.001.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 100000 -sd 0.001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 57.53653693199158} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 92, 190, ..., 9999802, + 9999900, 10000000]), + col_indices=tensor([ 766, 3080, 3658, ..., 98863, 99077, 99078]), + values=tensor([0.0329, 0.7493, 0.2063, ..., 0.1215, 0.3807, 0.7288]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.3516, 0.3347, 0.9443, ..., 0.8917, 0.2195, 0.8723]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 57.53653693199158 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 92, 190, ..., 9999802, + 9999900, 10000000]), + col_indices=tensor([ 766, 3080, 3658, ..., 98863, 99077, 99078]), + values=tensor([0.0329, 0.7493, 0.2063, ..., 0.1215, 0.3807, 0.7288]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.3516, 0.3347, 0.9443, ..., 0.8917, 0.2195, 0.8723]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 57.53653693199158 seconds + +[17.2, 17.2, 16.8, 16.8, 17.0, 16.88, 16.88, 17.0, 17.0, 16.6] +[16.88, 16.84, 17.36, 19.48, 20.16, 21.92, 24.16, 24.88, 27.6, 33.04, 37.56, 43.08, 47.0, 46.96, 47.68, 47.76, 47.76, 47.48, 47.4, 46.92, 47.28, 47.04, 47.56, 48.36, 48.0, 47.68, 47.44, 46.16, 45.68, 46.04, 46.32, 47.44, 47.76, 47.84, 47.64, 47.36, 47.08, 46.96, 46.96, 47.16, 46.68, 46.24, 46.2, 46.44, 46.56, 47.0, 48.08, 48.0, 48.12, 48.44, 48.48, 48.2, 47.64, 47.32, 47.2, 47.2, 47.56, 47.52, 47.68, 47.8, 47.8, 48.0] +64.98179316520691 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 57.53653693199158, 'TIME_S_1KI': 57.53653693199158, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2684.526071929932, 'W': 41.311972802980506} +[17.2, 17.2, 16.8, 16.8, 17.0, 16.88, 16.88, 17.0, 17.0, 16.6, 17.12, 17.12, 17.04, 17.0, 17.0, 16.96, 16.88, 16.92, 17.32, 17.8] +306.16 +15.308000000000002 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 57.53653693199158, 'TIME_S_1KI': 57.53653693199158, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2684.526071929932, 'W': 41.311972802980506, 'J_1KI': 2684.526071929932, 'W_1KI': 41.311972802980506, 'W_D': 26.003972802980506, 'J_D': 1689.784782156945, 'W_D_1KI': 26.003972802980506, 'J_D_1KI': 26.003972802980506} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_1e-05.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_1e-05.json index f3aace9..9a23625 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_1e-05.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_1e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 11801, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.278456687927246, "TIME_S_1KI": 0.8709818394989616, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 437.5742044067384, "W": 32.19958635623455, "J_1KI": 37.0794173719802, "W_1KI": 2.728547271945983, "W_D": 13.391586356234548, "J_D": 181.9841000671388, "W_D_1KI": 1.1347840315426274, "J_D_1KI": 0.09615998911470446} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 11928, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.779903888702393, "TIME_S_1KI": 0.9037478109240772, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 428.71311614990236, "W": 29.263266081724595, "J_1KI": 35.94174347333186, "W_1KI": 2.4533254595677896, "W_D": 14.015266081724594, "J_D": 205.326650100708, "W_D_1KI": 1.1749887727803985, "J_D_1KI": 0.09850677169520444} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_1e-05.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_1e-05.output index d71dde9..0a26541 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_1e-05.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_1e-05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 100000 -sd 1e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.063995361328125} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.8802759647369385} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 99997, 99999, +tensor(crow_indices=tensor([ 0, 1, 1, ..., 99998, 99998, 100000]), - col_indices=tensor([67343, 31299, 81155, ..., 33224, 88457, 24576]), - values=tensor([0.5842, 0.8218, 0.6188, ..., 0.3932, 0.6826, 0.0146]), + col_indices=tensor([50190, 32056, 73796, ..., 55938, 31334, 37461]), + values=tensor([0.0722, 0.7116, 0.8310, ..., 0.7930, 0.8115, 0.4149]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.9733, 0.2979, 0.3395, ..., 0.2786, 0.7488, 0.6423]) +tensor([0.5168, 0.3496, 0.0063, ..., 0.9888, 0.0960, 0.5324]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 1.063995361328125 seconds +Time: 0.8802759647369385 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 9868 -ss 100000 -sd 1e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 8.779469966888428} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 11928 -ss 100000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.779903888702393} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 4, ..., 99997, 99999, +tensor(crow_indices=tensor([ 0, 2, 3, ..., 99995, 99996, 100000]), - col_indices=tensor([14435, 22527, 43950, ..., 8583, 8872, 18967]), - values=tensor([0.6873, 0.0224, 0.4938, ..., 0.6581, 0.7037, 0.6316]), + col_indices=tensor([15079, 22431, 71484, ..., 38240, 57604, 63673]), + values=tensor([0.6856, 0.2309, 0.0261, ..., 0.6883, 0.7108, 0.1151]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.2290, 0.1645, 0.1242, ..., 0.3445, 0.2954, 0.7059]) +tensor([0.6131, 0.6051, 0.4027, ..., 0.3545, 0.9505, 0.4978]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,19 +36,16 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 8.779469966888428 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 11801 -ss 100000 -sd 1e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.278456687927246} +Time: 10.779903888702393 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 100000, 100000, +tensor(crow_indices=tensor([ 0, 2, 3, ..., 99995, 99996, 100000]), - col_indices=tensor([88946, 66534, 50450, ..., 63020, 21924, 98776]), - values=tensor([0.0165, 0.3102, 0.5959, ..., 0.2885, 0.2555, 0.6064]), + col_indices=tensor([15079, 22431, 71484, ..., 38240, 57604, 63673]), + values=tensor([0.6856, 0.2309, 0.0261, ..., 0.6883, 0.7108, 0.1151]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.1483, 0.9193, 0.9702, ..., 0.6151, 0.3023, 0.2526]) +tensor([0.6131, 0.6051, 0.4027, ..., 0.3545, 0.9505, 0.4978]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -56,30 +53,13 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 10.278456687927246 seconds +Time: 10.779903888702393 seconds -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 100000, 100000, - 100000]), - col_indices=tensor([88946, 66534, 50450, ..., 63020, 21924, 98776]), - values=tensor([0.0165, 0.3102, 0.5959, ..., 0.2885, 0.2555, 0.6064]), - size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.1483, 0.9193, 0.9702, ..., 0.6151, 0.3023, 0.2526]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 100000 -Density: 1e-05 -Time: 10.278456687927246 seconds - -[20.52, 20.48, 20.76, 20.96, 20.96, 21.16, 21.32, 21.28, 21.28, 21.2] -[21.36, 21.64, 21.64, 23.32, 23.96, 29.24, 34.28, 39.64, 43.16, 45.96, 45.88, 46.84, 47.12] -13.589435577392578 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 11801, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.278456687927246, 'TIME_S_1KI': 0.8709818394989616, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 437.5742044067384, 'W': 32.19958635623455} -[20.52, 20.48, 20.76, 20.96, 20.96, 21.16, 21.32, 21.28, 21.28, 21.2, 21.04, 20.92, 20.64, 20.52, 20.52, 20.4, 20.72, 20.96, 21.24, 21.32] -376.16 -18.808 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 11801, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.278456687927246, 'TIME_S_1KI': 0.8709818394989616, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 437.5742044067384, 'W': 32.19958635623455, 'J_1KI': 37.0794173719802, 'W_1KI': 2.728547271945983, 'W_D': 13.391586356234548, 'J_D': 181.9841000671388, 'W_D_1KI': 1.1347840315426274, 'J_D_1KI': 0.09615998911470446} +[17.04, 17.24, 17.0, 17.0, 16.8, 16.64, 16.64, 16.68, 16.92, 16.84] +[16.64, 16.52, 16.72, 17.8, 20.04, 25.4, 30.88, 34.96, 39.88, 41.96, 42.28, 42.44, 42.56, 42.56] +14.650214195251465 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 11928, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.779903888702393, 'TIME_S_1KI': 0.9037478109240772, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 428.71311614990236, 'W': 29.263266081724595} +[17.04, 17.24, 17.0, 17.0, 16.8, 16.64, 16.64, 16.68, 16.92, 16.84, 16.84, 16.84, 17.0, 17.0, 16.92, 17.0, 17.16, 17.0, 17.16, 17.2] +304.96000000000004 +15.248000000000001 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 11928, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.779903888702393, 'TIME_S_1KI': 0.9037478109240772, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 428.71311614990236, 'W': 29.263266081724595, 'J_1KI': 35.94174347333186, 'W_1KI': 2.4533254595677896, 'W_D': 14.015266081724594, 'J_D': 205.326650100708, 'W_D_1KI': 1.1749887727803985, 'J_D_1KI': 0.09850677169520444} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.0001.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.0001.json index ddd2b32..47534f3 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.0001.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.0001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 33464, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.751937627792358, "TIME_S_1KI": 0.321298638172136, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 327.9264199829101, "W": 23.099563679174377, "J_1KI": 9.799379033675296, "W_1KI": 0.6902810088206544, "W_D": 4.345563679174376, "J_D": 61.690565237998875, "W_D_1KI": 0.12985786753449605, "J_D_1KI": 0.0038805243705025113} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 32824, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.612937211990356, "TIME_S_1KI": 0.3233285770165232, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 275.6484677696228, "W": 19.42651909848855, "J_1KI": 8.397771989081855, "W_1KI": 0.5918388709020398, "W_D": 4.498519098488551, "J_D": 63.83078154373167, "W_D_1KI": 0.13704969225227123, "J_D_1KI": 0.004175289186335341} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.0001.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.0001.output index c550e8c..0d66d25 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.0001.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.0001.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 10000 -sd 0.0001 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.358994722366333} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.3622722625732422} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 9999, 9999, 10000]), - col_indices=tensor([4769, 2640, 4731, ..., 7727, 9096, 344]), - values=tensor([0.5549, 0.8764, 0.0270, ..., 0.0575, 0.5131, 0.9423]), +tensor(crow_indices=tensor([ 0, 2, 4, ..., 9997, 10000, 10000]), + col_indices=tensor([2430, 5032, 1477, ..., 758, 3153, 4599]), + values=tensor([0.8038, 0.4543, 0.3152, ..., 0.6785, 0.4391, 0.0535]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.2724, 0.3491, 0.1026, ..., 0.4580, 0.8295, 0.5142]) +tensor([0.9594, 0.1900, 0.3074, ..., 0.8950, 0.9459, 0.6732]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -15,18 +15,18 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 0.358994722366333 seconds +Time: 0.3622722625732422 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 29248 -ss 10000 -sd 0.0001 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.177036046981812} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 28983 -ss 10000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.27123761177063} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 9997, 9998, 10000]), - col_indices=tensor([8143, 7461, 5162, ..., 7740, 5053, 9684]), - values=tensor([0.7267, 0.3238, 0.0105, ..., 0.5150, 0.5465, 0.0983]), +tensor(crow_indices=tensor([ 0, 1, 2, ..., 10000, 10000, 10000]), + col_indices=tensor([1532, 2817, 884, ..., 2356, 6175, 1948]), + values=tensor([0.3809, 0.2852, 0.7235, ..., 0.6592, 0.2563, 0.7726]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.8883, 0.6326, 0.2674, ..., 0.1564, 0.2088, 0.8392]) +tensor([0.6771, 0.1497, 0.5070, ..., 0.8092, 0.9643, 0.7887]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -34,18 +34,18 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 9.177036046981812 seconds +Time: 9.27123761177063 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 33464 -ss 10000 -sd 0.0001 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.751937627792358} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 32824 -ss 10000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.612937211990356} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 5, ..., 9994, 9997, 10000]), - col_indices=tensor([1608, 4931, 8613, ..., 2107, 3637, 7054]), - values=tensor([0.4097, 0.1049, 0.8257, ..., 0.2263, 0.1754, 0.1229]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 9999, 9999, 10000]), + col_indices=tensor([3350, 4490, 6839, ..., 6784, 8596, 8737]), + values=tensor([0.3991, 0.5600, 0.2439, ..., 0.8859, 0.9485, 0.6345]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.9092, 0.1064, 0.7261, ..., 0.1695, 0.8231, 0.3389]) +tensor([0.2861, 0.2741, 0.4038, ..., 0.8389, 0.9796, 0.7969]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,15 +53,15 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 10.751937627792358 seconds +Time: 10.612937211990356 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 5, ..., 9994, 9997, 10000]), - col_indices=tensor([1608, 4931, 8613, ..., 2107, 3637, 7054]), - values=tensor([0.4097, 0.1049, 0.8257, ..., 0.2263, 0.1754, 0.1229]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 9999, 9999, 10000]), + col_indices=tensor([3350, 4490, 6839, ..., 6784, 8596, 8737]), + values=tensor([0.3991, 0.5600, 0.2439, ..., 0.8859, 0.9485, 0.6345]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.9092, 0.1064, 0.7261, ..., 0.1695, 0.8231, 0.3389]) +tensor([0.2861, 0.2741, 0.4038, ..., 0.8389, 0.9796, 0.7969]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -69,13 +69,13 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 10.751937627792358 seconds +Time: 10.612937211990356 seconds -[20.16, 20.16, 20.16, 20.32, 20.36, 20.88, 21.6, 22.28, 22.28, 22.28] -[21.52, 20.68, 23.48, 24.56, 27.0, 27.0, 27.6, 28.4, 25.44, 25.08, 23.88, 23.84, 23.72, 23.68] -14.196217060089111 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 33464, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.751937627792358, 'TIME_S_1KI': 0.321298638172136, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 327.9264199829101, 'W': 23.099563679174377} -[20.16, 20.16, 20.16, 20.32, 20.36, 20.88, 21.6, 22.28, 22.28, 22.28, 20.28, 20.68, 20.64, 20.84, 20.84, 20.88, 20.6, 20.6, 20.48, 20.24] -375.08000000000004 -18.754 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 33464, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.751937627792358, 'TIME_S_1KI': 0.321298638172136, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 327.9264199829101, 'W': 23.099563679174377, 'J_1KI': 9.799379033675296, 'W_1KI': 0.6902810088206544, 'W_D': 4.345563679174376, 'J_D': 61.690565237998875, 'W_D_1KI': 0.12985786753449605, 'J_D_1KI': 0.0038805243705025113} +[16.64, 16.84, 16.96, 16.88, 16.84, 16.72, 17.08, 16.96, 16.92, 16.92] +[17.08, 16.72, 16.76, 21.08, 22.52, 24.76, 25.6, 23.4, 22.04, 20.32, 20.04, 20.0, 20.0, 20.12] +14.189287662506104 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 32824, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.612937211990356, 'TIME_S_1KI': 0.3233285770165232, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 275.6484677696228, 'W': 19.42651909848855} +[16.64, 16.84, 16.96, 16.88, 16.84, 16.72, 17.08, 16.96, 16.92, 16.92, 16.36, 16.04, 15.84, 15.92, 16.12, 16.28, 16.36, 16.68, 16.72, 16.88] +298.56 +14.928 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 32824, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.612937211990356, 'TIME_S_1KI': 0.3233285770165232, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 275.6484677696228, 'W': 19.42651909848855, 'J_1KI': 8.397771989081855, 'W_1KI': 0.5918388709020398, 'W_D': 4.498519098488551, 'J_D': 63.83078154373167, 'W_D_1KI': 0.13704969225227123, 'J_D_1KI': 0.004175289186335341} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.001.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.001.json index 91eebfb..650d27e 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.001.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 4693, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.608984231948853, "TIME_S_1KI": 2.260597535041307, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 333.61959093093867, "W": 23.443356307834602, "J_1KI": 71.08876857680346, "W_1KI": 4.995388090312082, "W_D": 4.929356307834599, "J_D": 70.14907820272437, "W_D_1KI": 1.0503635857307905, "J_D_1KI": 0.223814955408223} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 4599, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.21649432182312, "TIME_S_1KI": 2.2214599525599303, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 276.3690100479126, "W": 19.391688491473598, "J_1KI": 60.09328333287945, "W_1KI": 4.2165010853389, "W_D": 4.4646884914736, "J_D": 63.630433167457575, "W_D_1KI": 0.9707954971675582, "J_D_1KI": 0.21108838816428752} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.001.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.001.output index bec4359..7a35653 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.001.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 10000 -sd 0.001 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.2371175289154053} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.282747268676758} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 13, ..., 99981, 99991, +tensor(crow_indices=tensor([ 0, 15, 26, ..., 99983, 99992, 100000]), - col_indices=tensor([ 11, 880, 2486, ..., 7621, 8410, 9572]), - values=tensor([0.7919, 0.7111, 0.9252, ..., 0.0051, 0.9566, 0.6694]), + col_indices=tensor([ 746, 1254, 2691, ..., 5665, 9904, 9986]), + values=tensor([0.7024, 0.2927, 0.8116, ..., 0.2675, 0.5863, 0.1724]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.8227, 0.5043, 0.0669, ..., 0.5765, 0.9663, 0.4234]) +tensor([0.2042, 0.3555, 0.3767, ..., 0.6038, 0.4952, 0.0036]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 2.2371175289154053 seconds +Time: 2.282747268676758 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 4693 -ss 10000 -sd 0.001 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.608984231948853} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 4599 -ss 10000 -sd 0.001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.21649432182312} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 14, 27, ..., 99982, 99994, +tensor(crow_indices=tensor([ 0, 6, 18, ..., 99975, 99989, 100000]), - col_indices=tensor([ 135, 2132, 2413, ..., 7244, 7277, 8789]), - values=tensor([0.8089, 0.0016, 0.7063, ..., 0.2204, 0.7876, 0.4440]), + col_indices=tensor([5193, 5456, 6247, ..., 5100, 5946, 8330]), + values=tensor([0.7086, 0.0012, 0.4180, ..., 0.5448, 0.8405, 0.8114]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.2483, 0.7850, 0.0043, ..., 0.4009, 0.1492, 0.4510]) +tensor([0.0495, 0.0946, 0.7654, ..., 0.8976, 0.3544, 0.9283]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,16 +36,16 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 10.608984231948853 seconds +Time: 10.21649432182312 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 14, 27, ..., 99982, 99994, +tensor(crow_indices=tensor([ 0, 6, 18, ..., 99975, 99989, 100000]), - col_indices=tensor([ 135, 2132, 2413, ..., 7244, 7277, 8789]), - values=tensor([0.8089, 0.0016, 0.7063, ..., 0.2204, 0.7876, 0.4440]), + col_indices=tensor([5193, 5456, 6247, ..., 5100, 5946, 8330]), + values=tensor([0.7086, 0.0012, 0.4180, ..., 0.5448, 0.8405, 0.8114]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.2483, 0.7850, 0.0043, ..., 0.4009, 0.1492, 0.4510]) +tensor([0.0495, 0.0946, 0.7654, ..., 0.8976, 0.3544, 0.9283]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,13 +53,13 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 10.608984231948853 seconds +Time: 10.21649432182312 seconds -[20.32, 20.32, 20.36, 20.6, 20.68, 20.44, 20.64, 20.8, 20.88, 20.84] -[20.84, 20.52, 23.32, 24.96, 27.48, 27.48, 28.36, 28.96, 25.92, 25.2, 24.36, 24.56, 24.48, 24.08] -14.23088002204895 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 4693, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.608984231948853, 'TIME_S_1KI': 2.260597535041307, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 333.61959093093867, 'W': 23.443356307834602} -[20.32, 20.32, 20.36, 20.6, 20.68, 20.44, 20.64, 20.8, 20.88, 20.84, 20.68, 20.8, 20.52, 20.64, 20.64, 20.68, 20.4, 20.48, 20.36, 20.24] -370.28000000000003 -18.514000000000003 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 4693, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.608984231948853, 'TIME_S_1KI': 2.260597535041307, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 333.61959093093867, 'W': 23.443356307834602, 'J_1KI': 71.08876857680346, 'W_1KI': 4.995388090312082, 'W_D': 4.929356307834599, 'J_D': 70.14907820272437, 'W_D_1KI': 1.0503635857307905, 'J_D_1KI': 0.223814955408223} +[16.44, 16.28, 16.32, 16.56, 16.56, 16.6, 16.76, 16.76, 16.72, 16.68] +[16.52, 16.48, 16.6, 20.0, 22.08, 24.8, 25.56, 23.6, 23.04, 20.28, 20.28, 20.04, 20.16, 20.2] +14.251931190490723 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 4599, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.21649432182312, 'TIME_S_1KI': 2.2214599525599303, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 276.3690100479126, 'W': 19.391688491473598} +[16.44, 16.28, 16.32, 16.56, 16.56, 16.6, 16.76, 16.76, 16.72, 16.68, 16.4, 16.48, 16.68, 16.36, 16.44, 16.64, 16.64, 16.8, 16.8, 16.76] +298.53999999999996 +14.926999999999998 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 4599, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.21649432182312, 'TIME_S_1KI': 2.2214599525599303, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 276.3690100479126, 'W': 19.391688491473598, 'J_1KI': 60.09328333287945, 'W_1KI': 4.2165010853389, 'W_D': 4.4646884914736, 'J_D': 63.630433167457575, 'W_D_1KI': 0.9707954971675582, 'J_D_1KI': 0.21108838816428752} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.01.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.01.json index dd61d28..d799afd 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.01.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.01.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 21.223905086517334, "TIME_S_1KI": 21.223905086517334, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 606.5871645927429, "W": 23.902485146880146, "J_1KI": 606.5871645927429, "W_1KI": 23.902485146880146, "W_D": 5.469485146880146, "J_D": 138.80228213262555, "W_D_1KI": 5.469485146880146, "J_D_1KI": 5.469485146880146} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 21.366477489471436, "TIME_S_1KI": 21.366477489471436, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 489.4337509441374, "W": 19.31282985940674, "J_1KI": 489.4337509441374, "W_1KI": 19.31282985940674, "W_D": 4.539829859406739, "J_D": 115.05025275492645, "W_D_1KI": 4.539829859406739, "J_D_1KI": 4.539829859406739} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.01.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.01.output index dbf2821..7e1dd33 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.01.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.01.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 10000 -sd 0.01 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 21.223905086517334} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 21.366477489471436} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 100, 193, ..., 999807, - 999898, 1000000]), - col_indices=tensor([ 45, 67, 78, ..., 9873, 9905, 9941]), - values=tensor([0.2793, 0.5501, 0.9236, ..., 0.0106, 0.8963, 0.7259]), +tensor(crow_indices=tensor([ 0, 93, 201, ..., 999801, + 999899, 1000000]), + col_indices=tensor([ 106, 113, 159, ..., 9934, 9937, 9966]), + values=tensor([0.1214, 0.4144, 0.1866, ..., 0.5194, 0.7412, 0.0565]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.2312, 0.2281, 0.2895, ..., 0.4123, 0.5947, 0.5960]) +tensor([0.4749, 0.5757, 0.5717, ..., 0.5026, 0.5396, 0.1085]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,16 +16,16 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 21.223905086517334 seconds +Time: 21.366477489471436 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 100, 193, ..., 999807, - 999898, 1000000]), - col_indices=tensor([ 45, 67, 78, ..., 9873, 9905, 9941]), - values=tensor([0.2793, 0.5501, 0.9236, ..., 0.0106, 0.8963, 0.7259]), +tensor(crow_indices=tensor([ 0, 93, 201, ..., 999801, + 999899, 1000000]), + col_indices=tensor([ 106, 113, 159, ..., 9934, 9937, 9966]), + values=tensor([0.1214, 0.4144, 0.1866, ..., 0.5194, 0.7412, 0.0565]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.2312, 0.2281, 0.2895, ..., 0.4123, 0.5947, 0.5960]) +tensor([0.4749, 0.5757, 0.5717, ..., 0.5026, 0.5396, 0.1085]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -33,13 +33,13 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 21.223905086517334 seconds +Time: 21.366477489471436 seconds -[20.16, 20.16, 20.16, 20.04, 20.28, 20.72, 20.6, 20.64, 20.6, 20.44] -[20.44, 20.64, 23.68, 24.76, 27.96, 27.96, 29.28, 30.08, 27.32, 27.04, 23.96, 23.92, 23.72, 23.6, 23.72, 23.92, 24.08, 24.24, 24.24, 24.36, 24.24, 24.12, 24.4, 23.96, 24.12] -25.377577304840088 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 21.223905086517334, 'TIME_S_1KI': 21.223905086517334, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 606.5871645927429, 'W': 23.902485146880146} -[20.16, 20.16, 20.16, 20.04, 20.28, 20.72, 20.6, 20.64, 20.6, 20.44, 20.2, 20.32, 20.32, 20.52, 20.52, 20.8, 20.8, 20.72, 20.68, 20.76] -368.65999999999997 -18.433 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 21.223905086517334, 'TIME_S_1KI': 21.223905086517334, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 606.5871645927429, 'W': 23.902485146880146, 'J_1KI': 606.5871645927429, 'W_1KI': 23.902485146880146, 'W_D': 5.469485146880146, 'J_D': 138.80228213262555, 'W_D_1KI': 5.469485146880146, 'J_D_1KI': 5.469485146880146} +[16.76, 16.8, 16.72, 16.6, 16.36, 16.12, 16.0, 16.04, 16.28, 16.36] +[16.48, 16.48, 16.36, 17.76, 18.4, 22.04, 22.96, 22.96, 22.68, 21.88, 20.28, 20.28, 20.36, 20.0, 20.0, 19.8, 19.72, 19.84, 19.96, 20.12, 20.32, 20.36, 20.56, 20.72, 20.6] +25.34241509437561 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 21.366477489471436, 'TIME_S_1KI': 21.366477489471436, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 489.4337509441374, 'W': 19.31282985940674} +[16.76, 16.8, 16.72, 16.6, 16.36, 16.12, 16.0, 16.04, 16.28, 16.36, 16.6, 16.56, 16.28, 16.28, 16.28, 16.24, 16.56, 16.68, 16.52, 16.56] +295.46000000000004 +14.773000000000001 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 21.366477489471436, 'TIME_S_1KI': 21.366477489471436, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 489.4337509441374, 'W': 19.31282985940674, 'J_1KI': 489.4337509441374, 'W_1KI': 19.31282985940674, 'W_D': 4.539829859406739, 'J_D': 115.05025275492645, 'W_D_1KI': 4.539829859406739, 'J_D_1KI': 4.539829859406739} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.05.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.05.json index af8ffc0..fda3205 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.05.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 112.2527105808258, "TIME_S_1KI": 112.2527105808258, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2847.1031341934195, "W": 24.02975891792854, "J_1KI": 2847.1031341934195, "W_1KI": 24.02975891792854, "W_D": 5.456758917928539, "J_D": 646.5298079283226, "W_D_1KI": 5.456758917928539, "J_D_1KI": 5.456758917928539} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 106.60694670677185, "TIME_S_1KI": 106.60694670677185, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2300.291395263673, "W": 20.29267619003776, "J_1KI": 2300.291395263673, "W_1KI": 20.29267619003776, "W_D": 5.365676190037762, "J_D": 608.2302134094255, "W_D_1KI": 5.365676190037762, "J_D_1KI": 5.365676190037762} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.05.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.05.output index d49413b..b832c55 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.05.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 10000 -sd 0.05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 112.2527105808258} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 106.60694670677185} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 493, 999, ..., 4999078, - 4999538, 5000000]), - col_indices=tensor([ 9, 32, 79, ..., 9948, 9954, 9975]), - values=tensor([0.7230, 0.3394, 0.4856, ..., 0.5860, 0.3031, 0.1676]), +tensor(crow_indices=tensor([ 0, 513, 983, ..., 4998990, + 4999536, 5000000]), + col_indices=tensor([ 3, 6, 54, ..., 9902, 9976, 9979]), + values=tensor([0.3821, 0.3276, 0.4096, ..., 0.9878, 0.3843, 0.9439]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.5227, 0.7065, 0.1059, ..., 0.0574, 0.9985, 0.1783]) +tensor([0.8065, 0.5635, 0.0733, ..., 0.7202, 0.3714, 0.0072]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,16 +16,16 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 112.2527105808258 seconds +Time: 106.60694670677185 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 493, 999, ..., 4999078, - 4999538, 5000000]), - col_indices=tensor([ 9, 32, 79, ..., 9948, 9954, 9975]), - values=tensor([0.7230, 0.3394, 0.4856, ..., 0.5860, 0.3031, 0.1676]), +tensor(crow_indices=tensor([ 0, 513, 983, ..., 4998990, + 4999536, 5000000]), + col_indices=tensor([ 3, 6, 54, ..., 9902, 9976, 9979]), + values=tensor([0.3821, 0.3276, 0.4096, ..., 0.9878, 0.3843, 0.9439]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.5227, 0.7065, 0.1059, ..., 0.0574, 0.9985, 0.1783]) +tensor([0.8065, 0.5635, 0.0733, ..., 0.7202, 0.3714, 0.0072]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -33,13 +33,13 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 112.2527105808258 seconds +Time: 106.60694670677185 seconds -[20.36, 20.76, 20.76, 20.64, 20.76, 20.64, 20.44, 20.2, 20.64, 20.52] -[20.84, 20.72, 21.32, 21.96, 24.12, 27.04, 27.04, 28.68, 28.56, 28.24, 25.72, 24.44, 24.36, 24.24, 24.24, 24.56, 24.28, 24.4, 24.4, 24.44, 24.56, 24.2, 24.24, 24.04, 24.28, 24.12, 24.12, 24.28, 24.32, 24.24, 24.56, 24.56, 24.6, 24.44, 24.6, 24.6, 24.44, 24.44, 24.44, 24.4, 24.36, 24.36, 24.28, 24.28, 24.32, 24.24, 24.28, 24.08, 24.04, 24.04, 24.2, 24.24, 24.32, 24.6, 24.68, 24.36, 24.36, 24.28, 24.24, 24.08, 24.24, 24.32, 24.36, 24.6, 24.6, 24.64, 24.68, 24.6, 24.6, 24.4, 24.28, 24.4, 24.4, 24.2, 24.32, 24.36, 24.4, 24.44, 24.56, 24.44, 24.44, 24.4, 24.28, 24.4, 24.56, 24.56, 24.64, 24.76, 24.68, 24.44, 24.44, 24.36, 24.32, 24.32, 24.16, 24.24, 24.2, 24.12, 23.8, 23.88, 23.88, 23.76, 24.08, 24.24, 24.4, 24.4, 24.6, 24.52, 24.4, 24.56, 24.48, 24.4, 24.68, 24.72, 24.68, 24.8, 24.8] -118.48238444328308 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 112.2527105808258, 'TIME_S_1KI': 112.2527105808258, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2847.1031341934195, 'W': 24.02975891792854} -[20.36, 20.76, 20.76, 20.64, 20.76, 20.64, 20.44, 20.2, 20.64, 20.52, 20.52, 20.56, 20.56, 20.56, 20.8, 20.88, 20.8, 20.8, 20.68, 20.56] -371.46000000000004 -18.573 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 112.2527105808258, 'TIME_S_1KI': 112.2527105808258, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2847.1031341934195, 'W': 24.02975891792854, 'J_1KI': 2847.1031341934195, 'W_1KI': 24.02975891792854, 'W_D': 5.456758917928539, 'J_D': 646.5298079283226, 'W_D_1KI': 5.456758917928539, 'J_D_1KI': 5.456758917928539} +[16.56, 16.36, 16.32, 16.32, 16.44, 16.44, 16.32, 16.32, 16.32, 16.04] +[16.0, 16.2, 16.52, 20.8, 21.96, 24.64, 26.28, 24.4, 23.76, 23.12, 21.52, 21.52, 20.64, 20.52, 20.48, 20.32, 20.28, 20.28, 20.28, 20.6, 20.68, 20.88, 20.8, 20.8, 20.64, 20.6, 20.6, 20.4, 20.32, 20.48, 20.32, 20.16, 20.32, 20.36, 20.24, 20.4, 20.4, 20.56, 20.48, 20.48, 20.84, 20.92, 20.8, 20.68, 20.48, 20.44, 20.28, 20.68, 20.68, 20.56, 20.52, 20.4, 20.24, 20.28, 20.32, 20.32, 20.56, 20.6, 20.56, 20.76, 21.0, 21.0, 21.0, 21.04, 21.0, 20.8, 20.56, 20.4, 20.32, 20.24, 20.32, 20.72, 20.68, 20.68, 20.84, 20.8, 20.56, 20.56, 20.72, 20.8, 20.72, 20.92, 20.92, 20.88, 20.92, 20.92, 20.88, 20.88, 20.68, 20.32, 20.12, 20.08, 20.12, 20.4, 20.48, 20.56, 20.64, 20.52, 20.52, 20.4, 20.32, 20.28, 20.24, 20.24, 20.36, 20.52, 20.32, 20.32, 20.44, 20.44, 20.44, 20.44] +113.35574340820312 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 106.60694670677185, 'TIME_S_1KI': 106.60694670677185, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2300.291395263673, 'W': 20.29267619003776} +[16.56, 16.36, 16.32, 16.32, 16.44, 16.44, 16.32, 16.32, 16.32, 16.04, 16.52, 16.72, 16.8, 16.96, 17.08, 17.12, 17.0, 16.64, 16.52, 16.6] +298.53999999999996 +14.926999999999998 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 106.60694670677185, 'TIME_S_1KI': 106.60694670677185, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2300.291395263673, 'W': 20.29267619003776, 'J_1KI': 2300.291395263673, 'W_1KI': 20.29267619003776, 'W_D': 5.365676190037762, 'J_D': 608.2302134094255, 'W_D_1KI': 5.365676190037762, 'J_D_1KI': 5.365676190037762} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.1.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.1.json new file mode 100644 index 0000000..ce0e17e --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 215.95656299591064, "TIME_S_1KI": 215.95656299591064, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 4458.621953725812, "W": 20.314604170190528, "J_1KI": 4458.621953725812, "W_1KI": 20.314604170190528, "W_D": 5.2026041701905275, "J_D": 1141.8605538864108, "W_D_1KI": 5.2026041701905275, "J_D_1KI": 5.2026041701905275} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.1.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.1.output new file mode 100644 index 0000000..3241974 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.1.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 10000 -sd 0.1 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 215.95656299591064} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 987, 1996, ..., 9998019, + 9999013, 10000000]), + col_indices=tensor([ 25, 29, 35, ..., 9989, 9993, 9996]), + values=tensor([0.8438, 0.2270, 0.6737, ..., 0.5218, 0.6879, 0.5182]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.6846, 0.7185, 0.0206, ..., 0.2576, 0.7966, 0.0945]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 215.95656299591064 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 987, 1996, ..., 9998019, + 9999013, 10000000]), + col_indices=tensor([ 25, 29, 35, ..., 9989, 9993, 9996]), + values=tensor([0.8438, 0.2270, 0.6737, ..., 0.5218, 0.6879, 0.5182]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.6846, 0.7185, 0.0206, ..., 0.2576, 0.7966, 0.0945]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 215.95656299591064 seconds + +[16.56, 16.44, 16.64, 16.6, 16.56, 16.92, 16.84, 16.92, 16.96, 16.72] +[16.52, 16.4, 17.44, 17.44, 19.24, 21.04, 24.24, 25.48, 25.32, 24.52, 22.6, 22.16, 20.6, 20.6, 21.08, 21.08, 20.92, 21.04, 20.88, 20.64, 20.6, 20.72, 20.52, 20.76, 20.76, 20.52, 20.44, 20.28, 20.28, 20.28, 20.44, 20.68, 20.72, 20.96, 20.68, 20.72, 20.48, 20.4, 20.2, 20.48, 20.48, 20.24, 20.28, 20.28, 20.2, 20.04, 20.16, 20.28, 20.4, 20.56, 20.6, 20.52, 20.56, 20.56, 20.6, 20.64, 20.72, 20.52, 20.4, 20.28, 20.44, 20.56, 20.52, 20.6, 20.56, 20.56, 20.4, 20.48, 20.28, 20.24, 20.36, 20.44, 20.48, 20.52, 20.36, 20.44, 20.36, 20.36, 20.32, 20.52, 20.52, 20.64, 20.56, 20.52, 20.56, 20.64, 20.36, 20.64, 20.64, 20.72, 20.72, 20.64, 20.8, 20.52, 20.36, 20.32, 20.44, 20.4, 20.56, 20.8, 21.08, 20.84, 20.84, 20.84, 20.76, 20.4, 20.36, 20.48, 20.6, 20.56, 20.76, 20.64, 20.68, 20.72, 20.72, 20.56, 20.56, 20.56, 20.8, 20.6, 20.56, 20.44, 20.44, 20.28, 20.48, 20.56, 20.72, 20.72, 20.56, 20.72, 20.76, 20.68, 20.72, 20.6, 20.64, 20.76, 20.88, 21.08, 20.96, 20.96, 20.72, 20.64, 20.52, 20.44, 20.32, 20.48, 20.6, 20.56, 20.6, 20.84, 20.68, 20.68, 20.64, 20.64, 20.6, 20.44, 20.28, 20.4, 20.16, 20.52, 20.76, 20.92, 20.96, 20.68, 20.68, 20.64, 20.24, 20.16, 20.36, 20.56, 20.6, 20.72, 20.48, 20.48, 20.4, 20.24, 20.24, 20.32, 20.44, 20.28, 20.64, 20.8, 20.88, 21.0, 21.16, 20.76, 20.68, 20.4, 20.4, 20.48, 20.48, 20.52, 20.6, 20.56, 20.32, 20.2, 20.04, 20.04, 20.16, 20.36, 20.4, 20.4, 20.4, 20.4, 20.28, 20.36, 20.4, 20.52, 20.8, 21.04, 21.2, 21.04, 20.72, 20.72, 20.6, 20.6, 20.52] +219.47865271568298 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 215.95656299591064, 'TIME_S_1KI': 215.95656299591064, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 4458.621953725812, 'W': 20.314604170190528} +[16.56, 16.44, 16.64, 16.6, 16.56, 16.92, 16.84, 16.92, 16.96, 16.72, 16.8, 17.08, 16.92, 17.2, 17.12, 16.88, 16.68, 16.52, 16.6, 16.64] +302.24 +15.112 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 215.95656299591064, 'TIME_S_1KI': 215.95656299591064, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 4458.621953725812, 'W': 20.314604170190528, 'J_1KI': 4458.621953725812, 'W_1KI': 20.314604170190528, 'W_D': 5.2026041701905275, 'J_D': 1141.8605538864108, 'W_D_1KI': 5.2026041701905275, 'J_D_1KI': 5.2026041701905275} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_1e-05.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_1e-05.json index b09b822..7258aa7 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_1e-05.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_1e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 141369, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.808244943618774, "TIME_S_1KI": 0.0764541373541496, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 314.87872554779057, "W": 22.096174468711904, "J_1KI": 2.2273534194044706, "W_1KI": 0.15630141310125914, "W_D": 3.7551744687119033, "J_D": 53.51263643360139, "W_D_1KI": 0.02656292729461129, "J_D_1KI": 0.00018789782268114857} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 141920, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.197850227355957, "TIME_S_1KI": 0.0718563291104563, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 278.53162281036373, "W": 19.575620643059622, "J_1KI": 1.9625959893627658, "W_1KI": 0.1379341928062262, "W_D": 4.584620643059623, "J_D": 65.2322524514198, "W_D_1KI": 0.032304260449969154, "J_D_1KI": 0.00022762303022808028} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_1e-05.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_1e-05.output index ea3d934..5a62a56 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_1e-05.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_1e-05.output @@ -1,373 +1,266 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 10000 -sd 1e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.09768295288085938} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.08288788795471191} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 1000, 1000, 1000]), - col_indices=tensor([9792, 3011, 8315, 6730, 9843, 4902, 9114, 233, 327, - 1077, 5110, 4341, 9618, 1148, 4296, 9363, 2120, 5266, - 1510, 7695, 5476, 9179, 3305, 25, 5170, 9334, 9520, - 18, 2410, 8122, 6722, 5458, 1189, 9940, 135, 139, - 2746, 4302, 817, 8119, 9183, 5557, 7078, 7532, 5204, - 9640, 2857, 2903, 8250, 2446, 4645, 4964, 6111, 2787, - 4305, 8541, 2087, 6834, 9039, 5610, 449, 6263, 9809, - 5478, 8383, 5854, 2328, 3230, 867, 3772, 2544, 739, - 5716, 4182, 7270, 9111, 2105, 2273, 7055, 6308, 4091, - 9837, 2327, 5713, 7469, 8593, 3004, 1329, 1982, 6739, - 2484, 7531, 861, 603, 3312, 9947, 2174, 8338, 554, - 5146, 610, 333, 2059, 2323, 8214, 7253, 520, 2440, - 6747, 6391, 6453, 8692, 6979, 9688, 8514, 2146, 5042, - 9573, 4252, 1574, 2537, 436, 6639, 6623, 2518, 4727, - 2296, 1644, 9804, 6277, 7729, 2049, 6367, 7493, 7667, - 9462, 9071, 774, 5867, 4293, 6321, 3828, 2008, 1939, - 2236, 1977, 7507, 5348, 3725, 219, 3205, 807, 73, - 2687, 7978, 8256, 1078, 4313, 7128, 6500, 6252, 2583, - 9745, 3139, 421, 464, 4788, 8521, 5551, 7352, 6138, - 2196, 1230, 8030, 6107, 9447, 1650, 9498, 3057, 208, - 6423, 791, 3773, 2437, 1425, 9123, 9688, 777, 6169, - 2978, 5480, 8386, 8675, 3317, 9250, 4808, 2626, 3836, - 6049, 7965, 4797, 144, 2153, 1800, 6876, 5900, 3203, - 8704, 2454, 5563, 7457, 5321, 988, 3129, 3035, 8288, - 7117, 7904, 1711, 8667, 7253, 9565, 9574, 6169, 1173, - 8859, 7821, 3367, 2539, 5401, 6150, 5503, 32, 4874, - 457, 7102, 6703, 9779, 6403, 6731, 8810, 7361, 3381, - 6421, 5956, 129, 1166, 5720, 7654, 159, 4181, 7055, - 1967, 8488, 1584, 1842, 8508, 2958, 8715, 3789, 8840, - 4377, 9219, 4638, 2619, 5440, 3657, 9925, 2551, 995, - 1723, 5389, 1404, 4574, 2739, 6959, 3058, 9080, 2031, - 8984, 5389, 1349, 5813, 7136, 480, 5283, 5584, 5761, - 5503, 9598, 4112, 9769, 1912, 284, 8557, 4577, 3598, - 7400, 6471, 9266, 7423, 181, 547, 1708, 2217, 3427, - 1505, 745, 7566, 5833, 867, 3999, 5022, 8690, 4305, - 5228, 1769, 3228, 1613, 4985, 3045, 8897, 3696, 4082, - 9166, 5733, 4104, 7168, 1873, 5017, 8961, 5996, 4553, - 7627, 2820, 2130, 305, 5545, 6465, 4590, 1371, 174, - 7873, 1810, 2887, 9649, 8279, 8772, 5892, 9213, 6586, - 1163, 3158, 5082, 8524, 1930, 9343, 8281, 3120, 8449, - 600, 5485, 4322, 6884, 7048, 7308, 2967, 3418, 7490, - 3344, 4257, 4605, 6323, 9225, 3126, 8896, 9886, 5796, - 3701, 2719, 576, 7498, 987, 2280, 6310, 8362, 3442, - 2965, 9718, 459, 4443, 2991, 8486, 7861, 8908, 1116, - 4841, 4004, 9555, 9381, 6796, 6359, 5653, 2399, 2081, - 2575, 4124, 3953, 9593, 7589, 862, 2954, 8958, 589, - 507, 9240, 654, 425, 2276, 1060, 8633, 3105, 5668, - 6724, 6621, 4197, 1395, 849, 8873, 2795, 3202, 9876, - 2380, 7308, 6598, 5463, 1709, 7572, 7856, 4938, 1903, - 8232, 6864, 7897, 7976, 1736, 7624, 8013, 9751, 7149, - 4196, 5898, 7155, 685, 364, 8901, 9453, 6582, 7252, - 7583, 7652, 2420, 8972, 6482, 7201, 5439, 1701, 1847, - 5836, 2240, 6342, 2219, 5265, 1208, 2534, 1558, 4291, - 3680, 5027, 1311, 2040, 6947, 4719, 1246, 951, 8331, - 7564, 6347, 5994, 7908, 4381, 1981, 7084, 5076, 820, - 532, 8025, 3913, 2032, 5169, 2410, 7896, 7049, 9219, - 5711, 1033, 8565, 3846, 6349, 4287, 7540, 4920, 4963, - 3446, 6484, 6451, 1467, 8264, 6937, 5901, 8223, 9585, - 4490, 5507, 7203, 2685, 991, 7965, 4245, 5602, 2069, - 5695, 2048, 6521, 7103, 8227, 6516, 2695, 9235, 7774, - 6047, 559, 8942, 9551, 4095, 8698, 3792, 3519, 2359, - 3892, 9316, 2312, 6774, 1199, 7746, 2618, 977, 7672, - 3233, 6589, 7440, 3930, 4748, 9563, 8782, 8551, 9592, - 3814, 9947, 1075, 4155, 6052, 5717, 9066, 9233, 1157, - 5807, 8233, 6011, 624, 2462, 9807, 736, 215, 8248, - 9819, 3680, 7728, 55, 2164, 9665, 3183, 1848, 729, - 6782, 7590, 6337, 4298, 4510, 6666, 3669, 6293, 5654, - 2318, 725, 1694, 1362, 7302, 8752, 3054, 3332, 7028, - 5237, 3699, 6029, 8375, 8434, 9757, 6148, 8202, 1722, - 6598, 3199, 9169, 6033, 5701, 9829, 7451, 8411, 3143, - 7522, 6223, 5724, 5941, 7257, 8868, 9886, 39, 4763, - 4659, 3979, 3194, 2323, 3625, 973, 915, 3241, 9855, - 2124, 7532, 3520, 7552, 9940, 406, 8413, 4149, 4550, - 5658, 5167, 6265, 537, 2240, 8481, 9237, 1792, 6383, - 1808, 2570, 4050, 4847, 1118, 2408, 5357, 9217, 3974, - 151, 7094, 5350, 8483, 1495, 6165, 1764, 1885, 1824, - 6881, 6330, 3399, 5651, 8902, 1965, 2801, 3106, 8217, - 3870, 4262, 1103, 4426, 2679, 4736, 4197, 5863, 289, - 2343, 8638, 4210, 9921, 8468, 5134, 5166, 6284, 9542, - 8873, 1416, 7584, 7692, 8709, 709, 3013, 7987, 654, - 6815, 6359, 2594, 3057, 8551, 8513, 6209, 8165, 1943, - 963, 353, 5470, 3721, 7993, 5445, 1706, 5676, 9475, - 4653, 2450, 6561, 9432, 4724, 1712, 1495, 3430, 2264, - 7169, 800, 1631, 6908, 7499, 4132, 8029, 3361, 6028, - 2851, 1274, 3508, 5503, 2181, 2084, 6075, 4239, 1043, - 7983, 1206, 4290, 6454, 5369, 6371, 5879, 218, 1023, - 7138, 3789, 3972, 4950, 5889, 5964, 6188, 9228, 1935, - 9415, 5432, 1587, 9707, 4273, 561, 1430, 6403, 4846, - 9851, 2683, 4111, 9233, 7351, 1007, 4230, 7573, 79, - 9864, 1783, 1443, 2854, 7700, 8361, 1243, 6687, 3788, - 5328, 5475, 2587, 9459, 9592, 7613, 7467, 2956, 434, - 8430, 3986, 2166, 7136, 447, 7701, 6188, 4461, 8389, - 7187, 5543, 3801, 5683, 8218, 399, 2950, 979, 9096, - 5137, 6796, 9561, 3087, 1350, 7324, 4053, 5054, 8338, - 664, 9029, 6677, 3083, 7254, 4695, 9209, 2044, 9443, - 2889, 9822, 1702, 2873, 8870, 9229, 3331, 7287, 3290, - 2283, 5005, 1781, 1272, 960, 3156, 3859, 7344, 885, - 8637, 9530, 7482, 4602, 7643, 6173, 2498, 8864, 1333, - 9807, 9951, 3844, 2909, 3820, 3605, 6719, 9851, 2231, - 3241, 3906, 506, 4076, 8442, 5136, 436, 1695, 5825, - 6880, 1789, 1695, 1173, 2308, 73, 9178, 5877, 2611, - 6726, 7452, 4378, 4740, 7284, 2326, 9891, 6929, 5663, - 4730, 4836, 4703, 5291, 4376, 2488, 3592, 8230, 70, - 6636, 9221, 7469, 3462, 7397, 7142, 1004, 439, 7250, - 2311, 2367, 560, 7078, 1776, 6717, 492, 8126, 2677, - 9360, 3347, 6411, 3110, 2090, 8563, 3489, 7076, 1570, - 6098, 5436, 2275, 5402, 616, 7329, 5797, 2841, 2717, - 1248, 8901, 4345, 8350, 7903, 7439, 2528, 7106, 7914, - 3688, 8886, 2805, 8909, 8270, 4288, 3971, 5797, 9215, - 3264, 3458, 1607, 3306, 9621, 7331, 2583, 6867, 4955, - 6286, 1957, 3296, 1735, 5395, 8367, 7828, 9915, 2192, - 5818]), - values=tensor([5.0866e-01, 8.2556e-01, 1.6629e-01, 1.0052e-01, - 4.5905e-01, 5.4702e-02, 5.8559e-01, 2.6495e-01, - 8.0871e-01, 1.0787e-01, 1.5014e-01, 5.3292e-01, - 9.7189e-02, 7.5456e-01, 8.9551e-01, 2.4969e-01, - 5.3115e-01, 6.2585e-01, 6.0336e-01, 9.9929e-02, - 1.4075e-01, 9.6756e-01, 3.0094e-01, 9.2389e-01, - 4.9818e-01, 7.2940e-01, 5.6649e-01, 4.3555e-01, - 1.5700e-01, 9.8102e-01, 1.3525e-01, 9.3621e-01, - 2.1936e-01, 5.9625e-01, 9.4311e-01, 4.8593e-01, - 1.0183e-01, 6.6274e-01, 6.7805e-01, 9.7841e-01, - 2.7797e-01, 5.0483e-01, 2.7985e-01, 3.9216e-01, - 6.0938e-01, 6.7832e-01, 9.2775e-04, 8.1516e-01, - 4.1971e-01, 5.8177e-01, 7.2538e-01, 6.6860e-01, - 8.0944e-01, 5.1846e-01, 5.4205e-01, 2.5958e-01, - 7.8766e-01, 8.5924e-01, 1.0336e-01, 2.2093e-01, - 8.3145e-01, 1.1984e-01, 3.5641e-01, 5.8313e-01, - 1.6736e-01, 9.6756e-01, 7.2470e-01, 6.9853e-01, - 9.6314e-01, 8.2263e-01, 2.6544e-01, 1.5832e-01, - 7.5669e-02, 5.3056e-01, 3.7433e-01, 3.1341e-01, - 5.3789e-01, 7.3083e-01, 2.4930e-01, 4.1952e-01, - 7.2981e-01, 4.9267e-01, 1.9040e-01, 1.1666e-02, - 3.2889e-01, 4.2285e-01, 5.5285e-01, 4.7618e-01, - 6.0516e-01, 6.2215e-01, 8.1222e-01, 7.2851e-01, - 8.3011e-01, 4.2525e-01, 3.1984e-01, 8.5776e-01, - 2.3910e-02, 4.5217e-02, 5.6497e-01, 9.4829e-01, - 5.4979e-01, 8.8479e-01, 4.1936e-01, 6.3043e-01, - 3.8277e-01, 7.5099e-01, 6.4383e-01, 2.7975e-01, - 7.0467e-01, 2.4416e-01, 9.5810e-01, 4.8403e-01, - 3.5209e-01, 8.9373e-01, 4.6621e-01, 2.1906e-01, - 1.6670e-01, 4.2479e-01, 5.8573e-01, 6.3110e-01, - 7.1519e-01, 2.1330e-01, 4.6183e-01, 1.4175e-02, - 5.4109e-01, 2.2325e-01, 5.0472e-01, 9.3804e-01, - 1.3084e-01, 6.2419e-01, 1.2081e-01, 9.9270e-01, - 7.4783e-01, 7.0881e-01, 5.7487e-01, 1.8171e-01, - 2.1590e-01, 7.0519e-01, 4.4461e-01, 7.1904e-01, - 1.4054e-01, 6.8365e-01, 9.7100e-01, 6.3651e-01, - 6.3905e-01, 4.9785e-01, 5.4655e-01, 5.9405e-01, - 6.1534e-01, 1.0836e-01, 4.0330e-01, 3.1521e-01, - 5.5032e-01, 5.8275e-01, 9.9974e-01, 5.7100e-01, - 6.5850e-01, 5.7537e-01, 1.7662e-01, 4.7591e-01, - 4.8121e-01, 4.0495e-01, 5.7726e-01, 1.6867e-02, - 3.7622e-01, 3.5971e-01, 4.6898e-01, 5.6387e-01, - 1.6728e-01, 9.0116e-01, 6.3449e-01, 9.9809e-01, - 1.4214e-01, 3.0622e-01, 4.9574e-01, 4.9483e-01, - 9.6863e-01, 9.4638e-01, 9.3132e-01, 2.0506e-01, - 4.4838e-01, 3.2563e-01, 7.2974e-01, 7.5064e-01, - 4.0405e-01, 4.3081e-01, 7.9137e-01, 2.1346e-01, - 1.5722e-01, 9.6333e-01, 9.5630e-01, 7.0044e-03, - 7.0105e-01, 6.4138e-01, 9.2876e-01, 4.0120e-01, - 8.2360e-01, 2.8059e-01, 5.3775e-01, 7.3213e-01, - 7.5753e-01, 3.3988e-01, 3.2106e-01, 8.4210e-01, - 1.2925e-01, 2.9320e-01, 5.0106e-01, 2.1539e-01, - 3.7391e-01, 2.4839e-01, 6.3459e-01, 4.5552e-01, - 5.3847e-01, 1.6613e-01, 2.5389e-01, 8.6420e-01, - 4.1393e-01, 4.6119e-01, 9.2260e-01, 3.4941e-01, - 4.2435e-01, 1.3605e-01, 3.5468e-01, 3.8841e-03, - 2.4690e-01, 9.9538e-01, 4.4802e-01, 3.7318e-01, - 6.2415e-01, 9.4994e-01, 8.0739e-01, 2.1057e-01, - 7.5328e-01, 2.6464e-02, 1.7875e-01, 9.4515e-01, - 5.9210e-01, 2.6184e-01, 5.6867e-01, 9.6811e-01, - 5.9111e-01, 9.1116e-01, 5.0053e-01, 2.9886e-01, - 5.8382e-01, 5.5693e-01, 1.0540e-01, 3.5342e-01, - 6.3057e-01, 8.5177e-01, 3.3499e-01, 3.6474e-01, - 4.7717e-01, 8.4171e-01, 6.7627e-01, 6.7387e-01, - 9.3284e-01, 2.7271e-01, 3.1644e-01, 4.1518e-01, - 4.3058e-01, 5.2266e-01, 3.6904e-01, 5.2653e-01, - 9.1266e-01, 1.8733e-01, 9.8092e-01, 4.6536e-01, - 3.3882e-01, 6.6167e-01, 3.5347e-01, 4.5748e-01, - 8.5251e-01, 8.7864e-01, 3.7154e-01, 3.8627e-01, - 9.8316e-01, 3.6417e-01, 6.7510e-01, 4.5295e-01, - 6.6285e-01, 6.2878e-01, 3.2265e-01, 2.0555e-01, - 2.6692e-01, 3.6414e-01, 5.9439e-01, 9.5623e-01, - 3.8654e-01, 6.3329e-01, 4.7536e-01, 5.2089e-01, - 1.0990e-03, 2.9105e-01, 3.8837e-02, 1.2199e-01, - 7.2737e-01, 4.1824e-01, 5.2942e-01, 1.9681e-01, - 9.6403e-02, 2.1935e-01, 1.7686e-01, 9.5863e-01, - 2.1975e-01, 4.7730e-01, 7.1014e-01, 2.4752e-01, - 7.5828e-01, 6.0542e-01, 2.5632e-01, 3.2592e-01, - 8.6553e-01, 5.6348e-01, 5.2667e-01, 6.6528e-01, - 2.7986e-01, 2.1659e-01, 3.3569e-01, 4.4905e-01, - 3.3948e-02, 5.1005e-01, 1.9515e-01, 8.3204e-01, - 8.4792e-01, 9.6641e-01, 4.9869e-01, 8.5809e-01, - 3.2585e-01, 8.5929e-01, 6.1749e-02, 3.0374e-01, - 9.0610e-01, 8.5386e-01, 4.5555e-01, 3.4826e-01, - 6.9432e-01, 2.4559e-01, 1.2175e-01, 9.5219e-01, - 8.1544e-01, 5.6712e-01, 7.4973e-01, 9.4891e-01, - 4.1636e-01, 9.8106e-01, 3.3842e-01, 6.9545e-01, - 8.2856e-01, 8.0420e-01, 8.1129e-01, 1.3312e-01, - 2.3955e-01, 9.0993e-01, 9.6615e-01, 3.8657e-03, - 5.5676e-01, 4.6897e-01, 2.6394e-01, 2.5401e-01, - 6.9164e-01, 7.0496e-01, 6.3311e-02, 7.3229e-01, - 7.5283e-01, 1.1448e-01, 5.6638e-01, 7.0333e-01, - 9.2664e-01, 2.3339e-01, 7.4089e-01, 7.0304e-01, - 7.9628e-01, 2.9566e-01, 4.8534e-01, 6.0286e-01, - 7.3051e-01, 8.6310e-01, 1.4003e-01, 7.8446e-01, - 9.0977e-02, 8.8491e-01, 8.0104e-02, 3.9759e-01, - 5.2958e-01, 1.2637e-02, 2.0035e-01, 4.7715e-01, - 2.9745e-01, 1.1113e-01, 1.7877e-01, 2.7845e-01, - 1.9844e-01, 1.9297e-01, 9.0864e-01, 1.8566e-02, - 5.7114e-01, 9.5857e-01, 9.2082e-01, 6.4832e-01, - 2.7882e-01, 3.3440e-01, 7.0682e-01, 6.5257e-01, - 4.4426e-01, 1.1208e-01, 5.0365e-01, 5.4993e-01, - 9.2901e-01, 2.2730e-01, 9.4328e-01, 9.6707e-01, - 3.6776e-01, 5.5635e-01, 3.9752e-01, 4.5092e-01, - 4.4951e-01, 7.0557e-01, 7.7244e-01, 3.3629e-01, - 8.4136e-01, 1.6438e-01, 6.6244e-01, 3.8877e-01, - 1.9699e-01, 4.9056e-01, 5.6961e-01, 8.0210e-01, - 6.8118e-01, 4.7812e-01, 6.7591e-01, 5.9523e-01, - 5.3971e-01, 7.8503e-01, 4.9642e-01, 5.4618e-01, - 6.6752e-01, 6.0840e-01, 8.8708e-01, 2.8901e-01, - 1.0109e-01, 9.8435e-01, 5.6153e-01, 5.9826e-01, - 7.9258e-01, 9.8853e-01, 7.5993e-01, 3.3438e-02, - 8.1340e-01, 8.0492e-01, 6.2120e-01, 5.8831e-01, - 5.2354e-01, 2.0958e-01, 4.5671e-01, 8.8901e-01, - 4.7891e-02, 8.4671e-01, 8.9188e-01, 2.7374e-01, - 5.5977e-01, 6.4122e-02, 2.5037e-01, 3.0519e-01, - 9.7643e-01, 9.9227e-01, 8.8279e-01, 9.3445e-01, - 2.4768e-01, 2.1275e-01, 4.8713e-01, 2.3223e-01, - 1.9760e-01, 2.1350e-01, 8.1167e-01, 1.7134e-01, - 9.6495e-01, 7.3397e-01, 3.8349e-01, 8.8222e-01, - 3.1653e-01, 8.1744e-01, 3.1919e-01, 5.2279e-01, - 3.9093e-02, 9.2027e-01, 2.6092e-01, 8.4092e-01, - 9.4656e-01, 2.7672e-01, 6.5653e-01, 5.1761e-01, - 1.8061e-01, 6.8235e-01, 8.6988e-01, 5.1257e-03, - 3.3197e-01, 7.5581e-01, 7.6118e-01, 8.9258e-01, - 8.9933e-01, 8.6342e-01, 3.4926e-01, 6.7870e-02, - 1.9108e-01, 7.3369e-01, 2.7109e-01, 9.9664e-01, - 5.3488e-01, 4.9100e-01, 9.8954e-01, 8.5699e-01, - 4.8594e-01, 9.0011e-01, 5.0181e-01, 1.7384e-01, - 6.6739e-02, 7.1934e-02, 9.9463e-03, 5.3093e-01, - 6.1666e-01, 4.3311e-01, 4.1180e-01, 4.6798e-01, - 6.4198e-01, 7.4021e-01, 3.2068e-01, 4.7200e-02, - 2.5494e-01, 1.7352e-01, 4.1876e-02, 2.4760e-01, - 5.0745e-01, 1.6498e-01, 1.6233e-01, 6.1837e-01, - 1.9993e-01, 6.5932e-02, 7.6349e-01, 9.0034e-01, - 5.5449e-01, 6.9665e-01, 1.2088e-01, 7.1465e-01, - 9.8968e-02, 9.0389e-01, 5.5304e-01, 7.9631e-01, - 9.7618e-01, 4.2147e-01, 8.1274e-01, 4.3836e-01, - 1.7630e-01, 3.5780e-01, 1.6038e-03, 7.1155e-01, - 9.1076e-01, 9.0818e-01, 1.7369e-01, 2.5602e-01, - 3.8217e-01, 6.9373e-01, 1.2674e-02, 5.3936e-01, - 2.1893e-01, 6.8960e-01, 9.1582e-01, 8.0365e-01, - 3.6548e-01, 3.9584e-01, 6.4044e-01, 8.6570e-01, - 8.8024e-01, 6.1342e-01, 5.1198e-01, 6.7976e-01, - 6.8666e-01, 7.5782e-01, 1.5631e-01, 8.9918e-01, - 9.7927e-02, 3.2673e-01, 5.7885e-01, 5.5851e-01, - 6.2305e-01, 3.5624e-01, 1.5707e-01, 6.1570e-01, - 3.5974e-01, 2.1360e-01, 7.1446e-01, 6.1801e-02, - 2.7151e-01, 5.4858e-01, 6.0721e-01, 1.0735e-01, - 8.6685e-01, 9.0807e-01, 9.6912e-01, 3.1961e-01, - 1.2832e-01, 6.0896e-01, 4.9966e-02, 4.9049e-01, - 6.7840e-01, 4.1456e-01, 6.2130e-01, 6.0786e-01, - 1.4460e-01, 5.3117e-01, 9.7206e-01, 3.8304e-01, - 6.8412e-01, 7.8141e-01, 8.2518e-01, 3.7590e-02, - 9.6749e-01, 4.7059e-01, 1.8641e-01, 8.9854e-01, - 2.1269e-01, 1.6522e-01, 8.8367e-01, 9.1815e-01, - 8.7968e-01, 5.9668e-01, 7.2915e-01, 9.8099e-01, - 8.8173e-01, 4.4891e-01, 2.6317e-01, 2.1249e-01, - 7.7492e-01, 4.8431e-01, 9.7658e-01, 4.3848e-01, - 3.1833e-02, 7.4527e-02, 8.9392e-01, 7.7900e-01, - 8.0593e-01, 9.5855e-01, 3.5727e-01, 2.6940e-01, - 3.5688e-01, 2.6632e-02, 4.0129e-01, 9.6528e-01, - 7.9687e-01, 1.9298e-01, 8.2016e-01, 9.1224e-01, - 2.4712e-01, 3.6894e-01, 9.6659e-01, 5.3756e-01, - 5.4794e-01, 3.1679e-01, 9.6897e-01, 4.4518e-01, - 6.5975e-01, 9.9334e-01, 7.1656e-01, 8.0277e-01, - 5.0648e-01, 7.3228e-01, 3.8058e-02, 9.3421e-01, - 7.9929e-02, 3.2158e-01, 1.5929e-01, 3.7949e-01, - 2.0829e-01, 6.5259e-01, 4.6825e-01, 2.7941e-01, - 1.9953e-01, 5.1897e-01, 3.8819e-01, 4.9336e-01, - 3.3069e-01, 8.4765e-01, 5.2211e-01, 4.9166e-01, - 6.9455e-01, 6.3768e-01, 6.2778e-01, 9.2101e-01, - 9.2626e-01, 5.9904e-01, 5.0311e-01, 8.3735e-01, - 4.4603e-01, 9.2262e-01, 4.4998e-01, 6.9776e-01, - 8.1877e-01, 7.8257e-01, 2.5050e-01, 7.2180e-01, - 1.1486e-01, 4.0817e-01, 5.2981e-01, 7.7343e-01, - 5.5123e-01, 8.6134e-01, 6.5969e-01, 4.8677e-01, - 8.8516e-01, 7.0775e-01, 3.3500e-01, 6.7549e-01, - 2.1351e-01, 6.0709e-01, 3.6218e-01, 5.1299e-01, - 7.4130e-01, 7.7677e-01, 6.3379e-01, 2.7527e-01, - 9.9887e-02, 2.2210e-01, 7.5276e-01, 4.4428e-01, - 1.6609e-01, 6.2936e-01, 5.3735e-01, 8.3225e-02, - 7.7063e-01, 1.3155e-01, 4.9738e-01, 1.0567e-01, - 1.4713e-01, 4.2368e-01, 6.6936e-02, 8.8321e-01, - 9.5339e-01, 5.7492e-01, 4.6441e-01, 4.4506e-01, - 9.9751e-01, 5.9347e-01, 6.4776e-01, 3.5639e-01, - 3.0012e-01, 6.0198e-01, 6.8924e-01, 4.4556e-01, - 1.8603e-01, 4.5480e-01, 6.5886e-01, 1.5843e-01, - 7.0337e-01, 8.9524e-01, 2.7304e-01, 3.6642e-01, - 3.5490e-01, 6.7636e-01, 4.7131e-01, 5.6626e-01, - 9.8272e-01, 9.2694e-01, 4.5864e-03, 8.8925e-01, - 9.5917e-01, 2.0832e-01, 4.9597e-01, 2.5278e-01, - 2.4852e-01, 3.1727e-01, 4.3312e-01, 9.6007e-01, - 8.8876e-02, 6.1221e-01, 3.9817e-01, 2.5411e-02, - 9.7648e-01, 9.8652e-01, 3.9766e-01, 8.8589e-01, - 1.1188e-01, 5.1689e-01, 6.4742e-02, 9.9407e-01, - 6.1552e-01, 7.3617e-01, 1.6003e-01, 1.6974e-01, - 8.4497e-01, 4.9609e-01, 9.8217e-02, 9.5291e-01, - 4.0326e-01, 6.3081e-01, 7.2652e-01, 5.7743e-01, - 2.2170e-01, 7.8272e-01, 7.3391e-01, 7.9808e-01, - 5.7786e-01, 8.0307e-01, 4.2660e-01, 1.3650e-01, - 2.7882e-02, 2.4102e-01, 2.0745e-01, 5.8123e-01, - 3.4352e-01, 2.1247e-01, 4.8279e-01, 3.7276e-01, - 9.4707e-01, 7.3718e-01, 9.1258e-01, 3.6274e-01, - 2.4102e-01, 9.9207e-01, 2.2145e-01, 2.6863e-01, - 2.5719e-01, 5.1020e-01, 4.0730e-01, 2.2349e-01, - 9.7510e-01, 8.2175e-01, 1.2219e-01, 6.2505e-01, - 8.9346e-01, 1.8482e-01, 8.7034e-01, 5.8396e-01, - 6.0211e-01, 7.7022e-02, 3.2938e-01, 2.7877e-01, - 3.0504e-01, 7.3242e-01, 8.6726e-01, 5.2525e-02, - 8.3949e-01, 9.2123e-01, 9.7969e-01, 5.8142e-01, - 5.2488e-01, 4.6433e-01, 2.2818e-01, 2.4717e-01, - 2.5460e-01, 8.5714e-02, 3.8831e-01, 9.0686e-01, - 4.6754e-01, 6.3993e-01, 8.5531e-01, 9.5916e-02, - 7.7458e-01, 5.4327e-01, 9.2559e-01, 1.9217e-01, - 4.6577e-01, 4.9623e-01, 8.9795e-01, 6.3356e-01, - 6.7837e-02, 8.4986e-01, 5.0801e-01, 2.5619e-01, - 9.5677e-01, 4.9628e-01, 9.3112e-01, 4.4714e-01, - 6.8936e-01, 8.0523e-01, 4.3303e-01, 1.8878e-02, - 6.4632e-01, 8.1886e-01, 3.3755e-01, 9.7814e-01, - 8.7620e-01, 5.8761e-02, 2.6465e-01, 6.2798e-01, - 8.9595e-01, 7.9580e-02, 8.0211e-01, 7.2755e-01, - 9.1035e-01, 8.4643e-01, 2.6957e-01, 3.9778e-01, - 5.5133e-01, 7.1369e-01, 3.6727e-01, 1.2819e-01, - 3.8548e-01, 4.9232e-01, 3.0467e-02, 3.2802e-01, - 1.0528e-01, 7.3630e-02, 9.3062e-01, 6.1493e-01, - 6.3887e-01, 7.4694e-02, 3.3923e-01, 2.0729e-01, - 4.4764e-02, 4.3261e-01, 5.5894e-01, 9.6581e-01, - 6.7662e-02, 9.2539e-01, 1.8783e-01, 8.1675e-01, - 8.4143e-01, 3.9923e-01, 2.2729e-01, 6.6154e-01, - 8.9653e-01, 4.0370e-01, 3.2538e-01, 1.5236e-01, - 5.5629e-01, 6.8462e-01, 7.7109e-01, 5.9567e-01, - 1.4903e-01, 5.3588e-01, 6.2758e-01, 7.5046e-01, - 8.0168e-01, 8.9376e-01, 2.0340e-01, 1.2453e-01, - 7.4704e-02, 5.3506e-01, 9.6637e-01, 9.7204e-01, - 5.7290e-01, 3.5230e-01, 9.9294e-01, 5.0129e-01, - 8.3881e-01, 7.2880e-01, 6.2694e-01, 5.8390e-01, - 3.0916e-01, 5.5307e-01, 1.1634e-01, 5.7508e-01, - 4.9841e-01, 9.4315e-02, 8.0570e-01, 3.7012e-02, - 9.4338e-01, 5.0504e-01, 3.3316e-01, 7.8524e-01, - 1.6928e-01, 7.2395e-01, 3.6104e-01, 1.6070e-01, - 8.9509e-01, 6.6490e-01, 3.2072e-01, 8.1494e-01, - 7.5298e-01, 8.8006e-01, 1.8608e-01, 6.6299e-01, - 3.0445e-01, 1.4084e-01, 9.1637e-02, 6.5827e-02, - 9.9326e-01, 6.6325e-01, 5.2357e-01, 5.2534e-01, - 7.0772e-01, 3.3604e-01, 6.0567e-01, 8.4960e-01, - 7.8662e-01, 8.5686e-01, 6.0279e-01, 7.7631e-01, - 5.7508e-01, 2.9735e-01, 6.3188e-01, 2.1348e-01, - 4.3258e-01, 2.2410e-01, 7.3643e-01, 3.4474e-01, - 7.8888e-01, 9.2573e-01, 2.1892e-01, 9.5993e-02]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 999, 1000, 1000]), + col_indices=tensor([8276, 8512, 1857, 9453, 5594, 1786, 645, 6447, 5709, + 9757, 8196, 1491, 9162, 8503, 189, 10, 1296, 6876, + 119, 8563, 749, 1550, 2318, 8246, 5733, 8006, 6329, + 7471, 3720, 9382, 1064, 3289, 8282, 2516, 8872, 2976, + 6001, 4221, 9050, 2, 6851, 1283, 8840, 7445, 9952, + 3963, 7775, 4665, 7671, 1625, 7201, 7423, 9095, 4003, + 9039, 8222, 4002, 4851, 2216, 6397, 2573, 1155, 1658, + 5402, 1265, 1882, 3515, 3745, 1128, 1774, 309, 5274, + 84, 2261, 875, 7389, 8770, 9055, 1245, 4813, 5184, + 2172, 1801, 9623, 6859, 2196, 1926, 1262, 2389, 9131, + 4836, 349, 6978, 9280, 3446, 5839, 9885, 3634, 6691, + 6641, 6961, 8613, 9421, 2247, 8553, 789, 6806, 4532, + 3327, 6426, 7167, 5921, 1683, 7786, 6904, 6367, 1125, + 9619, 9609, 8436, 4889, 8881, 3076, 6406, 2789, 4771, + 2473, 5921, 8649, 8808, 7551, 2275, 7532, 4047, 4956, + 5524, 4175, 8896, 4876, 8812, 1107, 9810, 8830, 3587, + 136, 2620, 31, 3149, 3602, 2128, 2913, 8880, 726, + 7360, 1751, 1097, 2482, 144, 3484, 7883, 399, 5282, + 6192, 617, 8440, 5826, 8493, 582, 6530, 6362, 9475, + 8907, 7405, 6669, 1159, 3728, 5359, 1439, 8034, 9646, + 4353, 7554, 6570, 5321, 5696, 6269, 7260, 7711, 2189, + 5467, 325, 2394, 1067, 3162, 962, 7835, 314, 776, + 6740, 6522, 5597, 7108, 5973, 3994, 4684, 1352, 9931, + 3211, 8724, 1338, 7817, 6195, 3455, 5915, 5511, 3061, + 4751, 4079, 855, 2750, 867, 3028, 909, 1327, 4954, + 3595, 1917, 91, 504, 6823, 8015, 2422, 1403, 4497, + 5229, 4813, 2971, 8367, 8880, 1956, 3166, 5488, 9128, + 6641, 7389, 1943, 8532, 4925, 7368, 3456, 4332, 7488, + 2324, 6868, 2121, 7163, 7046, 8735, 121, 8887, 9517, + 183, 1044, 2494, 5670, 5007, 6137, 6582, 303, 2805, + 3015, 7091, 3855, 7442, 4578, 3251, 4654, 5804, 5413, + 2177, 6177, 9913, 1625, 687, 1292, 8586, 2035, 982, + 755, 6758, 8888, 6048, 6343, 4633, 5897, 8444, 8150, + 7781, 725, 9438, 5710, 9382, 2574, 6058, 2513, 1778, + 122, 5128, 3907, 7410, 714, 8662, 6688, 8127, 2969, + 3345, 2095, 8474, 4307, 7012, 4768, 8186, 9653, 2657, + 2922, 365, 1018, 5800, 2932, 4696, 53, 4859, 6082, + 3621, 7055, 5341, 3697, 8788, 5491, 9475, 5441, 8426, + 468, 4496, 9637, 2301, 6187, 8587, 7515, 9602, 474, + 8388, 4988, 1646, 4000, 1983, 6870, 1879, 6888, 8175, + 5903, 9721, 4513, 6443, 1310, 1878, 3545, 9708, 3759, + 4145, 3434, 3654, 9428, 7967, 9740, 6844, 1449, 7083, + 1834, 5218, 6245, 5070, 2311, 7720, 5551, 5987, 6790, + 8087, 9122, 7936, 8051, 8437, 8069, 5792, 3862, 8013, + 8124, 4516, 9113, 4585, 1855, 712, 9434, 2244, 3421, + 4542, 6206, 2077, 2592, 7046, 1572, 7802, 3200, 77, + 6437, 48, 1913, 9201, 7000, 8497, 9838, 3928, 3446, + 7826, 5623, 7464, 4422, 5549, 9989, 7594, 978, 1027, + 6384, 3539, 9787, 925, 7836, 8272, 9420, 3565, 8139, + 2606, 3726, 8508, 2076, 9512, 7264, 1105, 7456, 5922, + 436, 5531, 390, 2872, 3410, 7077, 3257, 3900, 6181, + 5454, 2692, 5017, 7524, 23, 8143, 6664, 4624, 2842, + 2670, 1422, 8225, 5272, 1575, 8020, 5275, 6922, 6385, + 4869, 9485, 2381, 2187, 6952, 4575, 1420, 1739, 4554, + 5624, 1084, 6261, 9289, 1978, 8620, 1011, 8635, 7344, + 382, 3903, 3275, 7645, 2123, 214, 9598, 1316, 1120, + 2758, 2802, 7016, 8168, 6862, 1434, 5271, 3562, 7784, + 3573, 1026, 3030, 3397, 4904, 3559, 3831, 4325, 2931, + 6053, 4715, 4402, 5494, 5195, 4357, 7489, 4384, 5428, + 4227, 6900, 7913, 5278, 4013, 2964, 4776, 8792, 3615, + 7657, 8348, 7496, 3765, 8272, 7356, 7651, 2612, 6825, + 9694, 7320, 6793, 1758, 1804, 3531, 7520, 6794, 3575, + 4660, 3318, 7366, 7252, 3513, 8003, 1227, 4245, 740, + 1978, 5598, 4116, 9710, 2221, 9171, 3774, 7468, 3136, + 7275, 7997, 662, 9678, 6614, 8059, 2192, 9061, 5551, + 4200, 4515, 7372, 3749, 3276, 2792, 7457, 2381, 6612, + 7249, 7320, 6891, 9247, 1261, 9238, 7375, 339, 9753, + 7589, 42, 2877, 8207, 6655, 9620, 3940, 9667, 8842, + 5016, 5306, 133, 1644, 3110, 7972, 8268, 1236, 3030, + 3019, 6866, 1577, 9822, 1764, 678, 9569, 3249, 4696, + 18, 7170, 8217, 2029, 7944, 6850, 6037, 8945, 6928, + 7526, 4511, 2970, 5334, 9872, 9778, 3374, 3290, 9601, + 7633, 8431, 4263, 6931, 4613, 5918, 5854, 3669, 5964, + 8766, 9373, 1519, 897, 7762, 3322, 317, 2004, 4986, + 2838, 1416, 6403, 9000, 5671, 5891, 6742, 6946, 9606, + 1593, 9057, 2615, 5046, 5323, 4070, 7686, 887, 7814, + 7112, 1506, 1176, 1966, 6574, 6725, 9003, 3522, 1536, + 721, 968, 2645, 3006, 1202, 8059, 4357, 257, 5956, + 8291, 3587, 1324, 4026, 5017, 6089, 3533, 4487, 4336, + 4372, 523, 6809, 2865, 106, 8919, 2101, 5588, 9401, + 3615, 294, 5552, 1273, 4483, 2278, 6780, 8548, 8854, + 6295, 6334, 610, 1787, 2551, 2927, 7034, 1297, 9805, + 5095, 9343, 1737, 5095, 5412, 4606, 4112, 4125, 8521, + 240, 5569, 7517, 719, 4246, 9580, 3217, 607, 7225, + 8130, 9863, 59, 3517, 5417, 3027, 1563, 5660, 5670, + 134, 2586, 4182, 5906, 6707, 5824, 6326, 9832, 7981, + 1245, 7038, 2749, 9123, 1573, 7744, 9273, 3171, 6936, + 8920, 9094, 1562, 7017, 8555, 6399, 214, 9350, 1242, + 9441, 6024, 8234, 2555, 3841, 4838, 5524, 6282, 2049, + 5804, 2937, 8104, 8373, 3557, 1729, 75, 3087, 5017, + 6320, 7348, 3083, 3813, 2460, 1915, 6617, 9077, 5280, + 7414, 2772, 8800, 8002, 7643, 517, 7232, 1736, 2354, + 3354, 3614, 7610, 1724, 1872, 3047, 5536, 498, 8083, + 6992, 3529, 4235, 790, 8660, 5410, 4700, 674, 2734, + 6580, 8218, 3383, 800, 694, 1064, 6285, 2980, 3993, + 1143, 7009, 5232, 6283, 4927, 9228, 4526, 9337, 1565, + 1843, 8745, 4576, 5017, 8330, 565, 1296, 2370, 3236, + 1490, 8360, 4005, 1703, 7955, 922, 8095, 8868, 6378, + 9202, 5596, 7385, 3993, 6695, 5210, 3534, 9358, 4634, + 2010, 3022, 4876, 2404, 9761, 5559, 2165, 255, 7891, + 4079, 5756, 4511, 1871, 3057, 6381, 2383, 8869, 5138, + 1577, 1723, 1186, 3546, 3866, 1005, 6690, 1524, 9037, + 9232, 1927, 9957, 9615, 306, 2669, 6217, 4611, 1726, + 604, 1333, 3992, 1237, 545, 671, 9514, 993, 3862, + 8762, 9011, 665, 2875, 5108, 4200, 9504, 532, 6855, + 4348, 4716, 4934, 5606, 6782, 7589, 2798, 8991, 9289, + 6406, 607, 3680, 7537, 1575, 6436, 5327, 5806, 4006, + 7679, 3762, 721, 3215, 8429, 3165, 9396, 5645, 5117, + 9584, 6594, 9927, 4550, 5236, 1966, 1697, 6465, 3372, + 3084, 279, 6182, 2834, 2239, 2312, 4050, 7053, 6341, + 2787, 2419, 3190, 2698, 5971, 4594, 4787, 50, 5176, + 8465]), + values=tensor([0.1245, 0.3818, 0.2094, 0.1577, 0.3656, 0.7612, 0.6327, + 0.6604, 0.2965, 0.4693, 0.5770, 0.7401, 0.9832, 0.4056, + 0.5757, 0.4783, 0.0984, 0.4679, 0.7672, 0.1199, 0.5875, + 0.4548, 0.4261, 0.7656, 0.8501, 0.6874, 0.2179, 0.3269, + 0.1971, 0.0899, 0.3434, 0.6804, 0.2757, 0.7906, 0.0273, + 0.1203, 0.7662, 0.1366, 0.8859, 0.3751, 0.5143, 0.5055, + 0.7621, 0.4188, 0.1528, 0.5383, 0.2740, 0.1571, 0.8247, + 0.0143, 0.5848, 0.8785, 0.1587, 0.2973, 0.5407, 0.9462, + 0.2767, 0.3658, 0.6420, 0.1081, 0.1550, 0.4887, 0.3542, + 0.4467, 0.8817, 0.7810, 0.4989, 0.6946, 0.1755, 0.1838, + 0.0013, 0.0832, 0.2867, 0.5049, 0.4506, 0.4576, 0.3050, + 0.8657, 0.2519, 0.7953, 0.4007, 0.8761, 0.8492, 0.1099, + 0.7929, 0.3747, 0.0524, 0.4391, 0.3472, 0.8201, 0.0424, + 0.8252, 0.8114, 0.1823, 0.6663, 0.6930, 0.5818, 0.4644, + 0.4318, 0.7739, 0.0173, 0.6102, 0.6665, 0.3372, 0.7108, + 0.1573, 0.3062, 0.2868, 0.1402, 0.6120, 0.2102, 0.5513, + 0.5413, 0.9452, 0.4056, 0.1381, 0.7432, 0.0571, 0.3706, + 0.4822, 0.5782, 0.1239, 0.0775, 0.5319, 0.3228, 0.6896, + 0.2726, 0.1341, 0.5306, 0.9142, 0.9096, 0.5020, 0.9514, + 0.9017, 0.5102, 0.0340, 0.7420, 0.4322, 0.8736, 0.4773, + 0.0061, 0.8970, 0.2282, 0.1135, 0.4735, 0.7641, 0.6684, + 0.7803, 0.5548, 0.7874, 0.7264, 0.5921, 0.4576, 0.0450, + 0.0638, 0.0382, 0.2119, 0.8834, 0.6833, 0.8537, 0.2833, + 0.3744, 0.7613, 0.7596, 0.0793, 0.7556, 0.3236, 0.5321, + 0.9360, 0.4142, 0.7940, 0.3038, 0.0240, 0.4817, 0.9965, + 0.9610, 0.0716, 0.2156, 0.2327, 0.3825, 0.2509, 0.9412, + 0.7528, 0.5792, 0.8462, 0.3324, 0.5622, 0.9454, 0.0244, + 0.4780, 0.3934, 0.1591, 0.2852, 0.2205, 0.9671, 0.9748, + 0.3564, 0.4950, 0.6302, 0.1186, 0.5454, 0.5686, 0.4811, + 0.2045, 0.4676, 0.4340, 0.3804, 0.9345, 0.2026, 0.5164, + 0.3309, 0.8805, 0.8710, 0.4757, 0.6928, 0.9663, 0.4633, + 0.1033, 0.2782, 0.5097, 0.1747, 0.0522, 0.4778, 0.9716, + 0.5347, 0.5449, 0.6951, 0.7746, 0.6695, 0.9769, 0.1496, + 0.1827, 0.7502, 0.3151, 0.6496, 0.0381, 0.8213, 0.4136, + 0.7596, 0.1014, 0.0237, 0.5817, 0.3057, 0.6599, 0.3209, + 0.1791, 0.4293, 0.2334, 0.8138, 0.1052, 0.1613, 0.2399, + 0.7322, 0.1756, 0.3117, 0.4917, 0.6137, 0.8067, 0.1741, + 0.3133, 0.3665, 0.2732, 0.0524, 0.5998, 0.1848, 0.7671, + 0.1991, 0.4054, 0.5139, 0.9878, 0.0305, 0.7011, 0.3081, + 0.9427, 0.6765, 0.3244, 0.6294, 0.5028, 0.9150, 0.4199, + 0.6526, 0.5757, 0.2641, 0.5860, 0.0452, 0.8240, 0.1926, + 0.8869, 0.4359, 0.6882, 0.7532, 0.9317, 0.9139, 0.5780, + 0.8631, 0.2248, 0.6258, 0.9840, 0.4124, 0.7692, 0.2864, + 0.0444, 0.9926, 0.9374, 0.3794, 0.4518, 0.6429, 0.3953, + 0.8250, 0.1534, 0.5111, 0.0795, 0.3267, 0.9581, 0.3156, + 0.7268, 0.8133, 0.8663, 0.7222, 0.2367, 0.3868, 0.0399, + 0.7807, 0.0074, 0.9426, 0.1418, 0.0064, 0.3379, 0.9855, + 0.8657, 0.5112, 0.8864, 0.5991, 0.3144, 0.3511, 0.0243, + 0.9685, 0.5134, 0.3114, 0.5038, 0.7213, 0.4452, 0.7423, + 0.6360, 0.6354, 0.9825, 0.0279, 0.1208, 0.7218, 0.7425, + 0.7616, 0.1573, 0.6443, 0.7731, 0.4128, 0.0865, 0.6156, + 0.7669, 0.8044, 0.0235, 0.5931, 0.6874, 0.9140, 0.4077, + 0.0687, 0.7123, 0.7951, 0.8856, 0.8880, 0.1863, 0.9067, + 0.5306, 0.8132, 0.8043, 0.3196, 0.4647, 0.4612, 0.9179, + 0.7496, 0.2101, 0.0530, 0.2839, 0.5042, 0.2441, 0.7641, + 0.3206, 0.4726, 0.2974, 0.7204, 0.4526, 0.5767, 0.2708, + 0.8534, 0.4637, 0.8968, 0.1213, 0.9959, 0.4052, 0.0078, + 0.0658, 0.5272, 0.0316, 0.4140, 0.4811, 0.1680, 0.5471, + 0.6979, 0.8319, 0.7834, 0.2648, 0.5388, 0.4331, 0.4875, + 0.8169, 0.4048, 0.2791, 0.7377, 0.2582, 0.6781, 0.0955, + 0.5159, 0.1713, 0.1085, 0.5551, 0.3083, 0.3436, 0.7631, + 0.1028, 0.3923, 0.3445, 0.7995, 0.4349, 0.0777, 0.6485, + 0.0491, 0.8348, 0.3699, 0.0058, 0.7453, 0.5659, 0.6523, + 0.9968, 0.3097, 0.7458, 0.8322, 0.5862, 0.0936, 0.6042, + 0.8788, 0.2508, 0.2797, 0.3842, 0.5270, 0.6266, 0.2776, + 0.8671, 0.3005, 0.1594, 0.8424, 0.9687, 0.1043, 0.5038, + 0.4791, 0.4092, 0.5708, 0.0590, 0.7282, 0.6973, 0.5168, + 0.7962, 0.7980, 0.9092, 0.8486, 0.2280, 0.4549, 0.9124, + 0.1570, 0.5221, 0.0479, 0.0369, 0.8882, 0.6277, 0.4187, + 0.6274, 0.2305, 0.4948, 0.8926, 0.6738, 0.7183, 0.7407, + 0.6741, 0.6388, 0.9883, 0.0987, 0.2665, 0.9773, 0.6037, + 0.5178, 0.6422, 0.8373, 0.3511, 0.8642, 0.8879, 0.5537, + 0.7700, 0.4488, 0.4880, 0.6921, 0.1486, 0.6546, 0.3013, + 0.7849, 0.6191, 0.4842, 0.1815, 0.7591, 0.4614, 0.1535, + 0.3979, 0.8546, 0.4054, 0.2999, 0.0048, 0.1366, 0.3412, + 0.8072, 0.6366, 0.6288, 0.1982, 0.9033, 0.0954, 0.9062, + 0.0513, 0.1147, 0.0631, 0.2263, 0.8361, 0.0565, 0.3142, + 0.4270, 0.0695, 0.5763, 0.6962, 0.3865, 0.7696, 0.2598, + 0.0060, 0.7960, 0.9025, 0.7023, 0.8571, 0.2155, 0.8520, + 0.9719, 0.7297, 0.2123, 0.4489, 0.2584, 0.5603, 0.3917, + 0.2858, 0.4662, 0.9411, 0.0181, 0.2832, 0.4354, 0.4500, + 0.0128, 0.1405, 0.3871, 0.9958, 0.1200, 0.4749, 0.7452, + 0.8394, 0.3346, 0.7561, 0.3818, 0.0145, 0.0460, 0.9487, + 0.1402, 0.1902, 0.0543, 0.1052, 0.0981, 0.5798, 0.3034, + 0.6376, 0.9449, 0.1583, 0.5504, 0.1919, 0.1303, 0.3605, + 0.9440, 0.0620, 0.4890, 0.6488, 0.3940, 0.0067, 0.4663, + 0.6567, 0.3770, 0.5054, 0.1380, 0.2497, 0.6100, 0.0902, + 0.1967, 0.9021, 0.3610, 0.1279, 0.3340, 0.8779, 0.4824, + 0.7443, 0.9637, 0.3980, 0.3829, 0.6872, 0.5466, 0.0185, + 0.5055, 0.6057, 0.4788, 0.5933, 0.8939, 0.9818, 0.6818, + 0.6749, 0.7665, 0.9848, 0.1299, 0.4256, 0.5598, 0.6809, + 0.8879, 0.1537, 0.1813, 0.7981, 0.5256, 0.9639, 0.4583, + 0.4319, 0.6998, 0.9903, 0.7229, 0.4589, 0.6796, 0.6045, + 0.7007, 0.8420, 0.9619, 0.3304, 0.0617, 0.4075, 0.7436, + 0.2040, 0.4557, 0.5995, 0.4210, 0.1320, 0.0195, 0.8479, + 0.7959, 0.7938, 0.5863, 0.3990, 0.1914, 0.9004, 0.6627, + 0.5619, 0.8791, 0.7615, 0.9836, 0.4387, 0.5637, 0.6659, + 0.0086, 0.7805, 0.4991, 0.9871, 0.4798, 0.8041, 0.0192, + 0.2124, 0.5684, 0.8468, 0.9113, 0.6212, 0.5698, 0.8014, + 0.3065, 0.3411, 0.0734, 0.3332, 0.1320, 0.9902, 0.1724, + 0.5897, 0.9345, 0.5116, 0.2368, 0.3790, 0.1573, 0.8815, + 0.0944, 0.0525, 0.5771, 0.9508, 0.6884, 0.2247, 0.0927, + 0.2997, 0.8991, 0.4303, 0.0318, 0.7181, 0.8480, 0.2590, + 0.2112, 0.7754, 0.2854, 0.1226, 0.5505, 0.9374, 0.7026, + 0.1723, 0.5362, 0.9042, 0.9043, 0.2444, 0.0024, 0.4516, + 0.1736, 0.8349, 0.7438, 0.8713, 0.3333, 0.9702, 0.0976, + 0.3725, 0.8106, 0.5712, 0.7101, 0.9500, 0.8201, 0.1752, + 0.0728, 0.1177, 0.2203, 0.7342, 0.9231, 0.8108, 0.6705, + 0.8900, 0.7335, 0.2987, 0.4853, 0.9568, 0.7158, 0.1787, + 0.8435, 0.4204, 0.6816, 0.2429, 0.7845, 0.3783, 0.9300, + 0.1803, 0.9735, 0.1494, 0.0737, 0.0658, 0.8234, 0.2687, + 0.1869, 0.6593, 0.7727, 0.6024, 0.7869, 0.2111, 0.2790, + 0.3397, 0.8180, 0.2802, 0.8573, 0.7564, 0.2726, 0.4840, + 0.5071, 0.2160, 0.6845, 0.3046, 0.3110, 0.0319, 0.2883, + 0.8756, 0.8700, 0.9477, 0.7582, 0.0312, 0.7275, 0.5753, + 0.4665, 0.0331, 0.4322, 0.1182, 0.2475, 0.4723, 0.8688, + 0.3075, 0.8869, 0.2497, 0.5244, 0.5040, 0.7162, 0.7628, + 0.7902, 0.8786, 0.6224, 0.7451, 0.8022, 0.4326, 0.9537, + 0.1995, 0.5730, 0.6864, 0.6112, 0.2079, 0.1363, 0.6884, + 0.8766, 0.7722, 0.9711, 0.4384, 0.7103, 0.9813, 0.6983, + 0.6490, 0.5048, 0.9815, 0.5532, 0.3076, 0.5743, 0.8792, + 0.2884, 0.4181, 0.9345, 0.2975, 0.8904, 0.8585, 0.7041, + 0.7858, 0.9509, 0.9913, 0.9947, 0.1505, 0.7515, 0.5658, + 0.3947, 0.2227, 0.5169, 0.1156, 0.4560, 0.4151, 0.8225, + 0.8598, 0.6976, 0.0313, 0.4616, 0.0585, 0.1294, 0.0517, + 0.1040, 0.5525, 0.5945, 0.8019, 0.2127, 0.5916, 0.2221, + 0.8648, 0.6361, 0.6550, 0.4248, 0.1319, 0.4808, 0.4100, + 0.3212, 0.5505, 0.1283, 0.1416, 0.6578, 0.7816, 0.0779, + 0.1876, 0.4334, 0.8940, 0.6358, 0.3036, 0.4683, 0.8709, + 0.5340, 0.2787, 0.2064, 0.9470, 0.4341, 0.0752, 0.1006, + 0.5759, 0.2817, 0.9669, 0.8867, 0.9491, 0.4506, 0.8648, + 0.1579, 0.2283, 0.8323, 0.1121, 0.5750, 0.3976, 0.9212, + 0.1706, 0.8255, 0.1475, 0.5604, 0.4833, 0.5802, 0.3597, + 0.7856, 0.9771, 0.2688, 0.4479, 0.9486, 0.8916, 0.8197, + 0.3055, 0.2577, 0.8058, 0.8715, 0.8602, 0.0761, 0.6514, + 0.3807, 0.4425, 0.3380, 0.6201, 0.9309, 0.3440, 0.9225, + 0.7987, 0.0785, 0.2455, 0.9692, 0.9564, 0.3172, 0.1983, + 0.4076, 0.3450, 0.7906, 0.8020, 0.6271, 0.1755, 0.8210, + 0.0990, 0.0570, 0.0300, 0.7859, 0.5313, 0.6641, 0.6688, + 0.2322, 0.1610, 0.1823, 0.1078, 0.6360, 0.4913, 0.4992, + 0.9529, 0.3422, 0.7400, 0.0826, 0.4553, 0.9529, 0.0415, + 0.7348, 0.0743, 0.9889, 0.5217, 0.7483, 0.5876, 0.0144, + 0.8254, 0.8920, 0.2868, 0.2736, 0.3612, 0.1545]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.1182, 0.0531, 0.2619, ..., 0.1829, 0.3048, 0.7863]) +tensor([0.4940, 0.1637, 0.9163, ..., 0.2332, 0.8903, 0.4879]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -375,757 +268,271 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 0.09768295288085938 seconds +Time: 0.08288788795471191 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 107490 -ss 10000 -sd 1e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 7.983633518218994} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 999, 1000, 1000]), - col_indices=tensor([9808, 7333, 2761, 6984, 2618, 1200, 1681, 6432, 1908, - 9155, 6399, 6173, 7686, 5356, 3993, 7981, 7425, 5593, - 4850, 5052, 8961, 8336, 7112, 619, 2135, 8626, 2607, - 2216, 9694, 4189, 2998, 8335, 3027, 1032, 6652, 9026, - 1450, 3086, 8168, 3872, 2284, 9839, 7872, 6967, 7777, - 5855, 4560, 4411, 6083, 1505, 2302, 7162, 8721, 8884, - 3749, 7643, 9696, 5850, 2249, 8244, 1919, 8048, 1342, - 7317, 2490, 6808, 7315, 7726, 4785, 4921, 9956, 4443, - 4480, 6691, 8417, 161, 5555, 404, 8581, 5792, 8301, - 5318, 292, 5134, 8928, 8066, 453, 7458, 9510, 289, - 5180, 4317, 3606, 3627, 1876, 624, 3722, 3159, 4377, - 5013, 9349, 2667, 1785, 8678, 5026, 4888, 9756, 9607, - 8469, 7716, 8606, 3083, 1563, 1434, 2738, 7289, 1978, - 700, 6478, 8190, 8761, 951, 2998, 6295, 902, 3253, - 3444, 4043, 9178, 1269, 234, 7089, 6174, 6600, 4669, - 6727, 8268, 2330, 1069, 1763, 2492, 8073, 1684, 2315, - 3710, 3248, 8510, 8590, 8239, 1284, 8170, 4171, 474, - 2875, 8352, 3218, 3963, 969, 7748, 2856, 3854, 2315, - 7277, 5531, 1405, 4598, 6522, 957, 9296, 6849, 5437, - 1686, 9191, 9673, 8090, 1533, 5462, 8520, 5159, 3304, - 7705, 4342, 5443, 2284, 1194, 7594, 748, 9040, 2456, - 7669, 4061, 7047, 555, 2824, 7363, 3913, 9178, 7150, - 7304, 2499, 9256, 552, 8778, 8063, 6520, 4688, 7574, - 2145, 8518, 8603, 8669, 6546, 5623, 1957, 2338, 2425, - 1112, 2970, 9986, 9949, 6076, 9046, 7321, 9872, 754, - 3352, 3275, 9499, 116, 7035, 5380, 8743, 1703, 5751, - 5299, 3130, 7670, 5872, 5834, 7007, 9147, 2184, 8860, - 4428, 598, 4836, 8058, 4558, 5288, 7504, 8853, 2238, - 8861, 2578, 5091, 5013, 2037, 8928, 3857, 5077, 6931, - 8757, 6859, 8189, 3924, 4087, 9231, 3593, 6492, 4448, - 87, 6203, 9319, 8208, 1249, 351, 8796, 620, 924, - 9101, 8105, 2830, 1713, 7787, 888, 9020, 9481, 2139, - 7730, 8329, 4873, 2487, 7048, 7353, 4885, 8148, 1247, - 667, 849, 7914, 4689, 8303, 1722, 231, 6860, 8507, - 9888, 8475, 1032, 1630, 8070, 9540, 9112, 5168, 3062, - 9507, 879, 4850, 3483, 3940, 9319, 4881, 7856, 228, - 9287, 5989, 58, 749, 4343, 9901, 6928, 2389, 3972, - 8526, 481, 5624, 5755, 6275, 9280, 6597, 5060, 7191, - 4693, 4177, 2321, 1787, 1673, 5562, 4239, 2030, 1583, - 7898, 771, 4040, 3220, 1377, 4508, 5059, 3845, 7290, - 1003, 9599, 9375, 6692, 6090, 9952, 2176, 4210, 6030, - 2198, 8853, 2655, 1483, 8885, 9276, 8505, 4213, 6753, - 5450, 4850, 4204, 4416, 4704, 8418, 4907, 2331, 3960, - 7498, 3669, 9792, 5499, 8914, 8203, 7657, 3426, 7234, - 2486, 1235, 2469, 9660, 2367, 1370, 7799, 9145, 9171, - 2736, 6254, 3512, 3261, 9520, 8438, 7170, 6908, 3469, - 8590, 9937, 2564, 3019, 9524, 6717, 2986, 9309, 264, - 3, 9829, 4495, 3573, 190, 2981, 6208, 6585, 1371, - 2195, 5581, 2342, 9476, 5382, 8440, 4103, 3864, 5632, - 6269, 1712, 9749, 3061, 4897, 9774, 3008, 6163, 3407, - 1145, 4120, 29, 4912, 6169, 9479, 4251, 9076, 3330, - 2063, 775, 8392, 4609, 1933, 5138, 5664, 5906, 2726, - 6425, 3088, 1715, 4040, 9093, 1647, 1795, 7607, 3501, - 8477, 4093, 2038, 5013, 774, 7713, 8152, 4781, 3609, - 1098, 4881, 2038, 9274, 9741, 6209, 8115, 876, 5316, - 1613, 382, 2542, 8582, 3431, 391, 7187, 366, 3166, - 4017, 4198, 2725, 659, 934, 987, 196, 1864, 1644, - 1271, 2647, 5151, 3918, 3463, 6377, 7, 8878, 7976, - 3957, 8156, 2529, 258, 1623, 4604, 881, 692, 8112, - 4946, 7796, 4095, 3143, 8572, 7876, 1750, 4527, 7144, - 3583, 7056, 5316, 5348, 8997, 8809, 3142, 7991, 9464, - 6933, 7646, 9651, 2698, 4859, 8117, 3369, 6615, 3985, - 2317, 230, 1836, 5945, 3071, 154, 1226, 4860, 4637, - 6267, 5399, 3074, 7683, 1282, 3705, 8563, 9956, 8750, - 9574, 8335, 6491, 8329, 7853, 8857, 349, 7580, 9293, - 9709, 2126, 8982, 308, 4510, 4560, 475, 9704, 399, - 3060, 7736, 4629, 9546, 2561, 3034, 5116, 8728, 8805, - 3398, 8513, 8777, 2599, 3253, 5225, 5290, 2408, 6085, - 9068, 9069, 173, 2472, 6402, 2120, 4261, 2224, 826, - 3243, 3739, 6217, 6811, 5731, 219, 5472, 6849, 5110, - 5831, 3791, 6232, 2917, 6783, 6607, 4071, 1451, 6567, - 9736, 6402, 9995, 6552, 6542, 4482, 6282, 4291, 5913, - 4071, 5198, 4988, 695, 2806, 1386, 2173, 7790, 316, - 6649, 9, 1579, 9572, 3275, 3154, 2359, 5140, 653, - 8937, 9332, 1630, 4207, 5937, 8428, 2322, 4503, 5299, - 3901, 6823, 6228, 9657, 7140, 6477, 3755, 9688, 676, - 3349, 1162, 8482, 8395, 2504, 1528, 8242, 9826, 3556, - 6575, 5375, 2647, 4974, 6849, 8024, 7591, 9516, 2365, - 748, 1345, 9720, 6772, 5774, 5530, 5391, 1856, 3765, - 2657, 4498, 4984, 6901, 1395, 8475, 1819, 6925, 2421, - 1761, 7259, 1051, 265, 1590, 9384, 5894, 4762, 8506, - 7174, 6840, 1763, 4913, 2019, 5885, 2325, 3202, 4757, - 5428, 6422, 8241, 2695, 7858, 2699, 3893, 5719, 1839, - 3316, 9200, 7439, 3744, 1776, 9919, 8571, 3243, 3848, - 3477, 8382, 1436, 290, 1253, 960, 2876, 2388, 8662, - 6915, 4562, 1155, 3778, 5964, 385, 5156, 1243, 4937, - 2606, 2225, 7292, 9389, 1956, 492, 7736, 1955, 6506, - 766, 1105, 3864, 8934, 7960, 5641, 9655, 8776, 133, - 6666, 9610, 4929, 4719, 5264, 8382, 1947, 8666, 9300, - 4329, 2170, 7897, 4303, 882, 5936, 9116, 7064, 4599, - 2528, 5838, 7510, 2443, 1992, 7678, 2673, 5092, 1492, - 2387, 9934, 3869, 165, 9084, 8178, 1861, 4201, 2324, - 7567, 1793, 5834, 6476, 2076, 4845, 6644, 2639, 2822, - 9050, 252, 7942, 886, 5983, 1279, 4983, 8042, 9622, - 346, 3605, 2132, 9414, 2359, 283, 9534, 4029, 5072, - 6128, 7142, 6327, 4027, 1093, 7747, 8385, 885, 313, - 8859, 574, 6879, 3745, 3282, 2757, 8169, 5936, 5076, - 1137, 6613, 1076, 8073, 3513, 1306, 5080, 9110, 5604, - 7545, 4340, 1845, 4275, 5591, 4936, 9991, 892, 4674, - 6064, 8565, 6710, 8039, 7982, 3922, 275, 6066, 8905, - 6973, 2794, 1061, 7385, 7988, 8340, 2191, 2929, 2548, - 7557, 4165, 8338, 1716, 9924, 7462, 3608, 7958, 4156, - 6958, 6595, 6394, 1820, 4962, 5366, 4438, 5246, 4512, - 3255, 8226, 3630, 6120, 6332, 3099, 7741, 8936, 5999, - 9486, 6251, 5915, 3122, 7469, 8734, 1782, 2410, 4709, - 7347, 1254, 5365, 8659, 5942, 9828, 9706, 379, 7662, - 2198, 6628, 7945, 7413, 7327, 1345, 9039, 2421, 2619, - 2712, 9886, 7954, 4568, 6691, 9240, 2570, 4800, 2524, - 3422, 3750, 6113, 4241, 4013, 877, 1775, 579, 2634, - 1090, 7457, 7667, 2031, 9665, 144, 9204, 2579, 5962, - 1526, 1550, 9817, 4972, 1128, 9270, 6872, 5852, 7388, - 2994]), - values=tensor([7.8719e-01, 2.7798e-01, 6.8950e-02, 5.0472e-02, - 5.8545e-01, 1.1991e-01, 4.7623e-01, 5.7694e-01, - 1.5237e-01, 3.3055e-01, 5.1293e-01, 9.7978e-01, - 2.7608e-01, 6.7192e-01, 7.0951e-01, 5.1327e-01, - 8.1335e-01, 6.8963e-01, 5.5314e-01, 8.9263e-01, - 9.6576e-01, 9.0869e-01, 2.7184e-01, 1.9471e-01, - 9.7766e-01, 2.0095e-01, 9.3843e-01, 7.2126e-01, - 5.1714e-01, 6.0175e-01, 3.1801e-01, 7.7627e-01, - 3.4605e-01, 1.8040e-01, 3.0458e-01, 4.0278e-01, - 6.2776e-01, 9.3467e-01, 1.2351e-01, 5.6160e-02, - 1.1205e-01, 9.1760e-01, 2.4199e-01, 3.0944e-01, - 6.4413e-01, 3.6915e-01, 9.3294e-01, 7.5980e-01, - 3.3654e-02, 6.9582e-01, 6.7700e-01, 5.8286e-01, - 2.3073e-01, 5.6347e-01, 2.6682e-01, 2.2342e-01, - 4.0573e-01, 3.8100e-01, 1.8280e-01, 1.6569e-01, - 4.1207e-01, 3.0585e-01, 7.7921e-01, 4.5860e-01, - 7.1391e-01, 2.3078e-01, 1.1214e-01, 7.7082e-01, - 4.6340e-01, 7.6066e-01, 6.5596e-01, 2.2872e-01, - 5.6436e-02, 1.3893e-01, 3.0936e-01, 5.7445e-01, - 7.6112e-01, 6.8930e-01, 8.2550e-01, 8.7976e-01, - 6.3483e-01, 9.1301e-01, 1.6942e-01, 1.2039e-01, - 9.7139e-02, 1.7948e-01, 9.9856e-01, 7.3699e-01, - 7.4922e-01, 8.4417e-01, 3.9216e-01, 9.2979e-01, - 8.2247e-01, 4.3216e-01, 3.5842e-02, 9.0960e-01, - 4.6872e-01, 1.6075e-01, 7.0489e-01, 9.6429e-01, - 5.6028e-01, 2.2107e-01, 6.4836e-01, 2.6848e-01, - 4.6565e-01, 2.2203e-01, 9.1292e-01, 5.6801e-01, - 7.2532e-01, 3.9663e-01, 8.0242e-01, 8.5245e-01, - 4.4302e-01, 4.0458e-01, 1.5184e-01, 8.2485e-01, - 8.1878e-01, 3.5508e-01, 7.8907e-01, 6.1319e-01, - 4.0473e-01, 4.6310e-01, 9.9879e-01, 1.2625e-01, - 2.4641e-02, 4.4344e-01, 4.8008e-02, 7.1063e-01, - 3.8831e-01, 2.3756e-01, 8.9818e-01, 8.6457e-01, - 7.0548e-01, 8.7014e-01, 1.9525e-01, 3.8171e-01, - 8.9444e-01, 9.4533e-01, 9.3144e-01, 6.0240e-02, - 6.8551e-01, 5.6690e-01, 7.8262e-02, 4.4040e-01, - 9.2140e-01, 1.6390e-01, 1.7777e-02, 1.4404e-01, - 4.6732e-01, 1.9153e-01, 3.6275e-02, 2.7146e-01, - 1.5116e-01, 1.5619e-01, 8.4771e-01, 3.3031e-01, - 5.1292e-02, 3.4658e-01, 9.1107e-01, 7.5262e-01, - 4.4692e-01, 1.4474e-01, 3.6373e-01, 9.9651e-01, - 4.4502e-01, 7.7684e-01, 8.0853e-01, 2.2562e-01, - 6.2217e-01, 2.5839e-01, 7.3547e-01, 7.1796e-01, - 5.6419e-01, 6.7965e-02, 6.0161e-01, 6.8031e-01, - 1.3480e-01, 1.4008e-01, 2.1305e-01, 5.2230e-01, - 4.5845e-01, 7.8971e-01, 7.9544e-01, 6.9803e-01, - 7.6802e-01, 7.6155e-01, 3.1134e-01, 1.7799e-02, - 3.1824e-01, 5.2685e-01, 4.4170e-01, 8.3736e-01, - 1.9274e-01, 8.1668e-01, 2.4841e-01, 3.7080e-01, - 1.6220e-01, 7.9875e-01, 7.6216e-01, 7.2031e-01, - 7.6464e-01, 7.7670e-01, 3.3173e-01, 5.2319e-01, - 9.5119e-01, 4.1244e-01, 1.4600e-01, 1.4505e-01, - 5.9267e-01, 2.0373e-01, 9.9514e-01, 5.9135e-01, - 7.6637e-01, 9.8143e-01, 7.9584e-01, 1.2516e-01, - 9.5711e-01, 2.6995e-01, 1.9205e-01, 7.1691e-01, - 6.9453e-01, 5.9303e-01, 8.8441e-01, 3.9595e-01, - 8.2666e-01, 7.9076e-01, 4.5555e-01, 7.9378e-01, - 9.5976e-02, 3.8183e-01, 9.6509e-01, 4.4705e-01, - 9.2095e-01, 5.7548e-01, 7.4201e-01, 3.9286e-01, - 5.8219e-03, 3.5363e-01, 6.5290e-01, 3.9337e-01, - 2.9446e-01, 6.2985e-01, 5.9237e-01, 4.9532e-01, - 6.6808e-01, 1.4858e-01, 5.4063e-01, 6.5766e-01, - 7.8465e-01, 7.9338e-01, 3.4194e-01, 6.0330e-01, - 4.3091e-01, 4.2764e-01, 3.7078e-01, 9.4024e-01, - 9.4501e-01, 7.7417e-01, 2.0476e-01, 1.9499e-01, - 6.6973e-01, 6.4596e-01, 2.3835e-02, 7.6092e-01, - 2.4395e-02, 2.7034e-01, 6.6430e-01, 6.0325e-01, - 8.5135e-01, 4.5832e-02, 9.0873e-01, 5.6750e-01, - 2.4190e-01, 2.2314e-01, 4.8360e-01, 2.6715e-01, - 8.4999e-01, 3.6109e-01, 6.5628e-01, 7.2648e-01, - 3.7849e-01, 6.8336e-01, 1.6475e-01, 4.2428e-02, - 5.0039e-01, 4.0955e-01, 1.8601e-01, 6.9763e-02, - 4.1478e-01, 7.0105e-01, 1.4472e-02, 5.7295e-01, - 3.8123e-01, 4.2816e-01, 9.9610e-01, 1.8482e-01, - 6.0352e-01, 9.1170e-01, 3.9121e-01, 7.9080e-02, - 3.5268e-01, 6.4831e-01, 4.8081e-01, 1.9639e-01, - 3.8233e-02, 2.5478e-01, 5.6272e-01, 8.5128e-01, - 1.7658e-01, 1.0461e-01, 7.5971e-01, 2.0058e-01, - 2.0759e-02, 2.0159e-02, 9.3880e-01, 9.4331e-01, - 1.6540e-01, 2.7014e-01, 4.5377e-01, 7.5522e-01, - 7.4617e-01, 5.3126e-01, 4.2780e-01, 6.8393e-01, - 9.3150e-01, 5.5857e-01, 6.7215e-01, 7.0823e-01, - 3.2565e-01, 3.7222e-03, 6.8742e-01, 9.5403e-01, - 9.1592e-02, 7.2967e-01, 9.5884e-01, 6.3427e-01, - 2.5742e-01, 5.6475e-01, 2.7925e-01, 6.2923e-01, - 3.6718e-01, 9.5934e-01, 9.5815e-01, 4.9009e-01, - 7.1807e-02, 2.2681e-01, 1.2454e-01, 1.0542e-01, - 1.4174e-01, 9.8255e-01, 4.6236e-01, 6.6987e-01, - 4.8816e-01, 8.6746e-01, 9.3157e-02, 6.5768e-01, - 3.9811e-01, 7.8455e-01, 4.3119e-01, 9.2868e-01, - 8.9857e-01, 3.0392e-01, 2.2633e-01, 6.1545e-01, - 7.9614e-01, 5.2919e-01, 2.1834e-01, 8.0309e-02, - 9.7126e-01, 9.9745e-01, 4.1070e-01, 2.1148e-01, - 1.8361e-01, 3.6869e-01, 6.5944e-02, 1.1064e-01, - 6.3485e-01, 3.2604e-01, 8.9872e-01, 7.2092e-01, - 5.7428e-01, 4.5882e-01, 3.9850e-01, 4.0102e-01, - 8.2621e-01, 5.5455e-01, 3.1238e-02, 5.2826e-01, - 7.8630e-01, 9.8409e-02, 7.6561e-01, 3.9927e-01, - 6.1406e-01, 9.4480e-01, 1.4254e-01, 9.5086e-01, - 7.0821e-01, 8.4899e-01, 2.1804e-01, 2.5812e-01, - 2.0835e-01, 9.1140e-01, 8.5765e-01, 9.9032e-01, - 8.5393e-01, 4.9833e-01, 4.7477e-01, 3.2389e-01, - 5.7445e-03, 9.7329e-01, 7.2375e-01, 9.9966e-02, - 1.5025e-01, 7.4453e-01, 6.8332e-02, 1.6645e-01, - 9.6335e-01, 7.5026e-01, 5.3293e-01, 3.6223e-01, - 6.7385e-01, 6.4139e-01, 3.0444e-01, 5.2345e-01, - 1.2761e-01, 7.1742e-01, 7.8477e-01, 1.5626e-01, - 5.9381e-01, 6.4963e-02, 7.9896e-01, 3.1336e-01, - 3.7107e-01, 7.1857e-01, 9.1579e-01, 6.6099e-01, - 2.6809e-01, 1.7068e-01, 4.1313e-01, 7.3119e-01, - 1.4408e-01, 7.5105e-01, 7.0813e-01, 1.9223e-01, - 3.5761e-01, 5.1226e-01, 5.1808e-01, 1.2472e-01, - 7.7220e-01, 4.4079e-01, 8.6666e-01, 5.5199e-01, - 6.0896e-01, 9.1606e-01, 8.6291e-01, 8.7001e-01, - 8.8110e-01, 9.2673e-02, 7.5635e-01, 4.9880e-01, - 4.7058e-01, 4.4095e-01, 2.6035e-01, 4.4612e-01, - 7.7199e-01, 9.7001e-01, 7.9381e-01, 3.7973e-01, - 1.4628e-01, 3.0004e-01, 7.9967e-01, 2.3445e-01, - 2.8214e-01, 1.4505e-01, 2.9738e-02, 8.2369e-01, - 5.6563e-01, 6.1549e-01, 2.0430e-01, 6.7317e-01, - 8.8399e-01, 6.1068e-01, 2.3116e-01, 6.3362e-01, - 1.9280e-01, 3.6545e-01, 8.8520e-01, 8.2935e-01, - 5.4700e-01, 6.6788e-01, 1.0258e-01, 7.3352e-01, - 1.2764e-01, 7.6745e-01, 1.0823e-01, 7.7672e-01, - 7.0088e-01, 1.4280e-01, 2.7547e-01, 8.7632e-02, - 3.9206e-01, 8.0911e-01, 1.6979e-01, 4.7676e-01, - 6.5288e-01, 3.8342e-01, 8.8230e-01, 7.7932e-01, - 4.8290e-01, 9.7744e-01, 6.0052e-01, 2.9355e-01, - 5.7912e-01, 9.9443e-01, 5.3301e-01, 1.2536e-01, - 1.5214e-01, 6.8526e-01, 9.4258e-01, 3.6434e-01, - 8.7558e-01, 5.2552e-01, 7.7302e-01, 7.2297e-01, - 5.5616e-01, 9.7155e-01, 6.1762e-01, 3.9333e-01, - 9.4142e-01, 5.0394e-01, 5.3126e-01, 7.3120e-03, - 3.5779e-02, 1.3613e-01, 2.4848e-01, 5.4232e-01, - 7.8793e-01, 7.7360e-01, 8.0010e-01, 7.9171e-01, - 7.8279e-01, 2.7648e-01, 7.1536e-04, 1.6891e-01, - 6.9407e-01, 4.4307e-01, 8.8731e-01, 2.9828e-01, - 8.1246e-01, 9.2402e-01, 2.1079e-01, 2.7887e-01, - 4.6669e-01, 6.8159e-03, 8.8241e-01, 2.7337e-01, - 8.2022e-01, 2.7429e-01, 9.1030e-01, 8.1649e-01, - 5.5752e-01, 5.0382e-01, 2.3178e-02, 2.5625e-01, - 3.9271e-01, 7.4277e-01, 3.2581e-01, 6.5264e-01, - 1.4044e-01, 2.9504e-01, 1.0800e-02, 4.1264e-01, - 9.9371e-01, 6.5700e-01, 7.7452e-02, 6.0746e-01, - 2.2437e-01, 5.4157e-01, 1.1871e-02, 6.3941e-02, - 8.2036e-01, 9.6199e-01, 9.8448e-01, 8.2488e-01, - 5.6750e-01, 4.0211e-01, 6.1530e-01, 9.6152e-01, - 4.5253e-01, 4.5920e-01, 5.5935e-01, 5.0592e-01, - 8.3342e-01, 2.3467e-01, 5.6447e-03, 5.1410e-01, - 7.9746e-01, 1.6737e-01, 4.1857e-01, 1.2179e-01, - 4.5782e-01, 4.8415e-01, 1.2565e-01, 2.5572e-01, - 2.9466e-01, 3.9214e-01, 8.6572e-02, 8.6940e-01, - 7.5536e-01, 9.5322e-01, 7.9797e-03, 7.7378e-01, - 2.9158e-01, 1.5556e-01, 9.9596e-01, 8.0676e-01, - 4.9137e-01, 8.3342e-02, 9.6135e-03, 2.5928e-01, - 5.1709e-01, 7.9696e-01, 1.1237e-01, 4.6299e-03, - 9.6805e-01, 8.2316e-01, 3.3193e-01, 8.2257e-01, - 8.3508e-01, 1.2269e-01, 3.9215e-02, 2.7379e-01, - 6.9489e-01, 1.9759e-02, 1.1280e-01, 2.7342e-03, - 4.2183e-01, 1.0518e-01, 8.5223e-01, 5.6563e-01, - 3.8697e-01, 1.1785e-01, 3.7871e-01, 6.0490e-02, - 2.4291e-01, 6.4616e-01, 7.7271e-01, 2.2149e-01, - 4.9094e-01, 8.5007e-01, 7.8797e-01, 3.6982e-01, - 5.4627e-02, 6.5672e-01, 4.2979e-01, 5.1761e-01, - 5.2562e-01, 6.8937e-01, 5.5624e-01, 4.8477e-01, - 4.4277e-01, 8.7025e-01, 4.6938e-01, 9.9017e-02, - 2.3540e-01, 8.8826e-01, 8.4750e-01, 6.9361e-02, - 5.3295e-01, 6.3838e-01, 5.1882e-01, 6.1343e-01, - 3.3235e-01, 5.2842e-01, 9.6416e-01, 6.8030e-01, - 8.5786e-01, 5.9575e-01, 5.2750e-01, 4.2578e-01, - 8.1605e-01, 2.4509e-01, 4.6667e-01, 3.5601e-01, - 3.0833e-01, 2.9087e-01, 5.4065e-01, 5.5053e-01, - 5.3639e-01, 5.2394e-01, 7.3707e-01, 4.4853e-01, - 9.2411e-01, 9.4107e-01, 7.4106e-01, 5.0656e-01, - 1.9467e-01, 7.5185e-01, 8.5122e-01, 8.2702e-01, - 9.0613e-02, 5.8673e-03, 3.1343e-01, 3.5174e-01, - 3.7292e-01, 3.3097e-01, 6.8196e-01, 7.5472e-01, - 9.6424e-01, 5.1046e-01, 1.4638e-01, 2.3684e-01, - 7.8786e-01, 5.4429e-01, 5.7574e-01, 2.1532e-01, - 1.5684e-01, 5.4566e-01, 9.4138e-01, 3.2838e-01, - 4.8040e-01, 5.4414e-01, 1.9367e-02, 3.9706e-01, - 2.0430e-01, 1.9660e-01, 6.2466e-01, 8.6084e-01, - 8.0228e-02, 3.9186e-01, 8.9576e-01, 4.9514e-02, - 2.2106e-01, 9.2999e-01, 6.5310e-01, 4.3007e-01, - 6.5822e-01, 6.6603e-01, 9.8669e-01, 9.8824e-01, - 8.2168e-01, 6.8459e-01, 3.9156e-01, 1.1132e-02, - 2.2370e-01, 9.6903e-01, 2.7306e-01, 9.2415e-01, - 7.1639e-02, 3.3931e-01, 8.5008e-01, 6.4232e-01, - 7.2111e-02, 9.9499e-01, 9.1080e-01, 2.0324e-01, - 5.5506e-01, 6.1251e-01, 4.3318e-01, 6.4264e-01, - 1.7591e-01, 4.3507e-01, 1.0488e-01, 7.2339e-02, - 5.2835e-01, 5.6667e-01, 2.9372e-01, 2.4415e-01, - 3.9410e-01, 1.0101e-01, 1.8441e-01, 7.1626e-01, - 6.1243e-01, 3.6314e-01, 9.2150e-01, 9.7278e-02, - 7.2977e-01, 9.0747e-01, 3.1597e-01, 8.4171e-01, - 6.7253e-01, 9.4853e-01, 1.2906e-01, 2.7355e-01, - 1.4409e-01, 2.5160e-01, 5.9372e-01, 6.2295e-01, - 4.6559e-01, 1.0182e-01, 9.9182e-01, 1.0837e-01, - 1.4328e-01, 3.7837e-01, 5.9957e-01, 5.6506e-01, - 2.7237e-01, 2.9218e-01, 9.0535e-01, 7.7321e-01, - 5.1514e-01, 3.2557e-03, 1.1352e-02, 3.5446e-01, - 8.9878e-01, 6.8849e-01, 2.1011e-01, 6.8286e-01, - 9.5425e-01, 5.6617e-01, 6.4023e-01, 7.0185e-01, - 9.9854e-01, 3.9273e-02, 9.0494e-01, 4.1552e-01, - 1.7585e-01, 3.0999e-02, 2.5590e-01, 9.8308e-01, - 9.9331e-01, 2.9050e-01, 8.3045e-01, 5.8265e-01, - 2.6416e-01, 8.8248e-01, 8.8451e-01, 7.1606e-01, - 7.7418e-01, 9.1509e-01, 3.3493e-01, 4.0022e-01, - 8.9266e-01, 9.8437e-01, 3.7543e-01, 7.7526e-01, - 7.8948e-01, 5.7629e-01, 7.0095e-01, 5.4200e-01, - 2.7128e-01, 2.5999e-01, 5.5865e-02, 2.9070e-01, - 4.3462e-01, 6.1735e-01, 7.5223e-01, 6.9592e-01, - 1.2204e-01, 9.2739e-01, 5.8123e-01, 1.2222e-01, - 7.8177e-01, 4.1364e-01, 2.5832e-01, 1.6744e-01, - 2.5223e-01, 6.0992e-01, 2.7721e-01, 9.3869e-01, - 9.8241e-01, 7.1822e-02, 8.0650e-01, 5.1973e-01, - 6.0070e-01, 6.0370e-01, 2.2224e-01, 2.1113e-01, - 9.2031e-01, 4.0777e-01, 5.4750e-01, 1.7712e-01, - 3.5411e-01, 2.4928e-01, 3.2929e-01, 7.3402e-01, - 6.6194e-01, 9.8667e-02, 8.4750e-01, 9.6597e-01, - 1.6766e-02, 5.7657e-01, 5.0813e-01, 7.2302e-01, - 7.9038e-01, 9.2692e-01, 7.5721e-01, 4.2435e-01, - 4.4147e-01, 1.4234e-01, 2.4352e-01, 9.2361e-01, - 5.6001e-01, 8.2192e-01, 1.5664e-01, 4.4392e-01, - 5.5010e-01, 3.1554e-01, 1.4607e-01, 7.0739e-02, - 5.0825e-01, 2.4566e-01, 9.4402e-02, 9.2503e-01, - 6.4014e-02, 5.0204e-01, 5.3551e-01, 6.5074e-01, - 9.9401e-01, 5.7726e-01, 4.0971e-01, 7.4098e-01, - 3.0006e-01, 7.8090e-01, 4.6809e-01, 2.6276e-01, - 1.3399e-01, 8.1362e-01, 3.4512e-01, 8.9697e-01, - 7.4544e-01, 7.9488e-01, 6.8908e-01, 4.3181e-01, - 2.5480e-01, 2.1212e-01, 7.5625e-01, 5.2526e-01, - 9.5233e-01, 9.4755e-01, 7.1677e-01, 1.1347e-01, - 8.0781e-02, 1.4180e-01, 8.9249e-01, 2.8516e-01, - 7.7798e-01, 6.4198e-01, 3.8783e-01, 4.7671e-01, - 2.3407e-02, 4.6669e-01, 7.8425e-01, 9.5864e-01, - 3.7504e-01, 8.6204e-01, 3.1679e-01, 8.8901e-01, - 3.7300e-01, 2.5242e-01, 9.6592e-01, 6.0299e-01, - 5.1251e-01, 2.2772e-01, 3.9972e-01, 6.5961e-01, - 9.5451e-01, 9.7991e-01, 4.5724e-01, 9.3034e-01, - 4.3354e-01, 3.2771e-01, 1.7238e-01, 5.7435e-01, - 4.4729e-02, 1.1177e-01, 9.3390e-02, 7.0157e-01, - 9.8350e-01, 2.2812e-01, 6.8480e-01, 3.7276e-01, - 8.6972e-01, 7.1125e-01, 2.9051e-01, 2.8034e-01, - 7.3300e-01, 1.8556e-01, 8.8325e-01, 5.4715e-02, - 3.3904e-01, 7.4426e-01, 7.5334e-01, 9.7634e-01, - 2.1530e-01, 3.0424e-01, 5.5628e-01, 8.2914e-01, - 8.4980e-01, 5.3636e-01, 5.5424e-01, 3.7605e-01, - 2.6903e-01, 4.0124e-01, 9.2905e-01, 5.0572e-02, - 7.3581e-01, 7.8623e-01, 2.7676e-01, 6.2277e-01]), - size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.8535, 0.9437, 0.0129, ..., 0.9247, 0.1049, 0.8510]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 1000 -Density: 1e-05 -Time: 7.983633518218994 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 141369 -ss 10000 -sd 1e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.808244943618774} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 126677 -ss 10000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.372220277786255} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([7315, 1858, 1670, 5364, 1184, 3689, 9574, 1136, 5558, - 3491, 589, 9091, 159, 766, 502, 9929, 4846, 9650, - 2563, 3405, 2322, 3115, 8463, 8330, 9642, 7938, 1757, - 7149, 4012, 8129, 197, 2039, 5706, 3549, 7371, 2993, - 1668, 5510, 7702, 9196, 8429, 6070, 2662, 4013, 9415, - 6857, 7829, 189, 1980, 6763, 6718, 1267, 4257, 3542, - 1839, 9352, 3880, 4065, 5790, 6525, 9847, 6167, 4814, - 6341, 2068, 662, 5058, 1944, 658, 6063, 9056, 9925, - 2964, 8244, 282, 3473, 7406, 8810, 4236, 886, 9762, - 8425, 8800, 4778, 5281, 3283, 4118, 9078, 3169, 8457, - 9924, 2720, 1304, 4941, 3743, 4847, 8299, 4889, 214, - 6275, 5734, 2313, 2745, 5305, 3623, 13, 2937, 2995, - 6172, 9968, 1311, 5504, 8279, 7545, 3069, 7648, 5567, - 8268, 1055, 3660, 335, 7062, 8121, 8983, 7622, 6715, - 5283, 420, 1742, 3498, 1745, 3085, 6189, 2380, 1283, - 9795, 8106, 1945, 1678, 538, 5640, 7420, 194, 9011, - 3294, 2932, 2411, 2645, 8021, 5019, 4361, 3489, 1910, - 9620, 4481, 9616, 1602, 3695, 472, 2841, 8751, 2255, - 3889, 4605, 7873, 655, 6214, 3156, 256, 9963, 5581, - 8680, 717, 5063, 1404, 9442, 2228, 7492, 7160, 3856, - 6002, 9242, 9133, 2725, 317, 7326, 4324, 8318, 9240, - 9476, 4557, 5973, 4304, 8345, 1060, 987, 4937, 7346, - 7026, 4522, 1707, 1886, 1437, 9931, 6068, 6434, 2480, - 5992, 9055, 9136, 8322, 9905, 2782, 196, 693, 1688, - 8926, 3768, 8472, 207, 7495, 2196, 9726, 8814, 7453, - 1485, 5440, 3418, 8856, 743, 8967, 7230, 7965, 5970, - 15, 4482, 7568, 1808, 6735, 4024, 7824, 7515, 8144, - 3912, 8719, 4629, 8161, 3936, 4716, 2525, 783, 5222, - 9876, 8442, 8428, 7355, 3144, 4428, 6522, 132, 3790, - 5300, 9557, 7676, 9228, 8283, 5149, 2325, 9115, 1687, - 4272, 5763, 8908, 5886, 1669, 7453, 1796, 8835, 7278, - 6225, 9225, 6087, 903, 1728, 1580, 3725, 7730, 3836, - 8521, 8698, 4270, 4031, 2371, 8499, 521, 7387, 7743, - 8947, 886, 3479, 4527, 8226, 9051, 6009, 5379, 5768, - 439, 2600, 4801, 8794, 9610, 3561, 1577, 9097, 9975, - 1103, 6673, 9445, 1441, 7688, 2124, 2728, 4023, 4610, - 9118, 4314, 7265, 578, 8603, 197, 5840, 6120, 5539, - 9564, 8200, 5375, 7108, 5160, 476, 3824, 2379, 87, - 1194, 5282, 4975, 7693, 2868, 5155, 7376, 7424, 5181, - 4696, 4912, 8460, 4137, 3270, 9635, 9590, 6124, 1068, - 6534, 3173, 3896, 8453, 8316, 291, 1519, 8246, 4691, - 1751, 2308, 3396, 2423, 5215, 2970, 6752, 2916, 7921, - 8802, 4661, 9102, 3770, 6158, 7914, 1489, 4152, 1612, - 5823, 1936, 6852, 1976, 2146, 3405, 5451, 6120, 4413, - 8917, 3721, 1057, 4268, 9757, 761, 1140, 545, 6502, - 1749, 9777, 5713, 4612, 1378, 6061, 5135, 7937, 4403, - 5066, 6373, 9197, 7808, 4204, 644, 8672, 2203, 2426, - 896, 6965, 8169, 1801, 9071, 4317, 3597, 8597, 2047, - 1880, 5444, 8242, 6448, 7450, 4122, 6833, 5968, 2231, - 7351, 4384, 833, 7891, 8348, 765, 3059, 7905, 8630, - 7013, 9479, 6195, 7212, 3271, 1293, 3311, 3855, 1964, - 4641, 7448, 4226, 8995, 7274, 5191, 6567, 9407, 1526, - 9480, 8548, 5872, 8769, 2698, 7864, 8038, 56, 27, - 7763, 2188, 9491, 2878, 9318, 4526, 7193, 5303, 5124, - 9708, 2338, 8366, 9823, 9154, 6091, 9581, 2937, 8906, - 5848, 2687, 7340, 8708, 1010, 6399, 3045, 4615, 8826, - 3568, 7285, 6031, 6734, 6933, 6578, 5862, 9558, 6554, - 5089, 3113, 4603, 7009, 7343, 4267, 19, 9919, 9366, - 2933, 9750, 5505, 6090, 1142, 7058, 5172, 8599, 4136, - 5547, 157, 7341, 831, 3008, 927, 5993, 8607, 171, - 8862, 6810, 2856, 5723, 7936, 8843, 3881, 1692, 6638, - 2904, 7697, 5025, 5714, 7530, 9817, 833, 9236, 7396, - 9254, 912, 7384, 3775, 1170, 3977, 7524, 4813, 9008, - 7620, 7005, 3844, 204, 3517, 5068, 7264, 4011, 9098, - 8578, 2006, 5773, 9227, 5847, 5858, 2895, 2892, 6076, - 8741, 3340, 3921, 2744, 56, 4494, 5968, 89, 4026, - 7255, 8544, 840, 5863, 6302, 414, 2922, 3740, 955, - 8218, 5362, 5266, 5026, 8483, 5082, 7417, 7810, 644, - 1190, 6997, 8587, 3940, 7419, 7679, 3419, 2050, 1375, - 547, 8244, 8381, 3833, 6045, 5426, 8943, 8778, 9260, - 207, 4853, 1852, 1589, 1656, 6333, 1916, 7763, 2953, - 9839, 3533, 7621, 5586, 4408, 8103, 2442, 8516, 9148, - 3506, 9770, 571, 3215, 8121, 6287, 599, 7936, 8894, - 9182, 1185, 7298, 5238, 2435, 606, 6619, 1717, 9123, - 3804, 6744, 1688, 8115, 3519, 8007, 9191, 5309, 7085, - 4342, 899, 8048, 923, 5544, 6203, 2126, 6246, 100, - 7840, 564, 2942, 3285, 2333, 5463, 7923, 2151, 8056, - 5768, 3098, 9031, 3120, 4324, 2617, 2542, 2600, 8238, - 4487, 7446, 3440, 5653, 3220, 3177, 9241, 8081, 5102, - 9646, 8885, 6699, 8756, 5519, 9452, 259, 2677, 8794, - 1460, 8411, 5716, 9458, 8155, 644, 8341, 1450, 5540, - 518, 3426, 4607, 7955, 4148, 7168, 7044, 6505, 1269, - 2346, 3090, 5416, 1512, 4209, 5899, 7202, 6013, 8728, - 3813, 6142, 82, 1211, 5513, 137, 7986, 7640, 888, - 9985, 5099, 3271, 6013, 3559, 7646, 1436, 3067, 5359, - 155, 1728, 9824, 1779, 491, 233, 3754, 1015, 9765, - 298, 9547, 5517, 4342, 1582, 1949, 2482, 7321, 4988, - 2411, 5519, 4219, 6830, 5793, 9214, 4820, 3125, 8890, - 466, 8599, 508, 8339, 851, 3777, 1734, 9674, 8942, - 1427, 9500, 6588, 1605, 2007, 4551, 8625, 2609, 8688, - 2327, 4631, 6062, 4470, 4072, 4614, 1416, 705, 8243, - 2076, 1478, 9585, 1950, 8913, 3473, 2513, 5641, 9515, - 7648, 6244, 9436, 7192, 4031, 1160, 9508, 6308, 1862, - 2152, 8421, 2090, 4057, 9596, 5353, 4301, 5906, 3596, - 9384, 3132, 7647, 7603, 1913, 5032, 9088, 8117, 9096, - 9354, 6494, 1087, 8680, 5407, 3221, 1552, 3920, 5378, - 593, 2113, 3230, 2673, 9118, 6677, 9772, 8594, 9788, - 9478, 4593, 1005, 3282, 8204, 3936, 3443, 4355, 7243, - 1487, 2444, 8503, 9638, 7593, 7545, 129, 8918, 133, - 421, 8917, 4830, 3557, 8606, 3066, 6016, 3080, 4558, - 3544, 5859, 5233, 2394, 2434, 6421, 5504, 8413, 9470, - 5364, 6135, 5632, 3434, 1674, 4946, 63, 7706, 4456, - 2533, 4194, 7919, 3231, 3967, 8180, 3312, 3496, 3610, - 5542, 2929, 7279, 7446, 892, 2424, 6796, 4557, 3766, - 7918, 6363, 3300, 1258, 5530, 8133, 8383, 8851, 5621, - 3759, 3195, 4313, 9020, 9093, 9589, 8220, 8240, 8511, - 5075, 193, 2620, 5513, 2863, 9274, 8566, 2237, 3704, - 640, 7685, 9689, 8300, 5197, 3833, 6912, 7500, 4822, - 3780, 3582, 1367, 3318, 8493, 5378, 9890, 7159, 2910, - 7302, 2202, 4682, 1371, 3870, 8991, 7769, 4570, 5127, - 1421, 4645, 6754, 8030, 2942, 1562, 421, 1755, 6122, - 877]), - values=tensor([4.5503e-01, 5.5236e-01, 7.4946e-01, 7.5932e-01, - 9.8952e-01, 3.1557e-01, 9.1443e-01, 1.4281e-01, - 4.6438e-01, 9.2065e-01, 9.5281e-01, 9.8289e-01, - 4.9044e-01, 6.6880e-01, 9.5273e-01, 8.2119e-01, - 7.2789e-01, 1.0208e-02, 8.4355e-01, 1.4450e-01, - 6.3411e-01, 1.6232e-01, 7.1788e-01, 9.8785e-01, - 4.0314e-01, 3.0270e-01, 4.8394e-02, 3.4055e-01, - 7.6103e-01, 4.4991e-01, 5.0957e-01, 4.1284e-01, - 4.0828e-01, 2.3054e-01, 5.1885e-01, 4.2268e-01, - 6.4354e-01, 3.8568e-01, 8.9900e-01, 8.8779e-01, - 9.9151e-01, 6.9879e-02, 3.0729e-01, 4.2140e-01, - 7.0114e-01, 6.7449e-01, 8.3811e-01, 8.3634e-02, - 8.5457e-01, 5.9364e-01, 5.6239e-01, 5.3730e-01, - 2.8704e-01, 3.3603e-01, 7.3587e-02, 6.1617e-01, - 7.7568e-01, 1.0129e-01, 4.9434e-01, 4.4292e-01, - 7.5922e-01, 9.8988e-01, 2.8604e-01, 8.3362e-01, - 6.9002e-02, 5.9701e-01, 2.6577e-01, 3.9077e-01, - 8.7670e-02, 7.3042e-01, 1.5991e-01, 4.9806e-01, - 5.8198e-01, 8.9984e-01, 8.0160e-01, 3.3965e-01, - 3.7423e-02, 6.4093e-01, 2.2178e-01, 1.7177e-01, - 3.3041e-01, 4.0645e-01, 4.5012e-01, 5.2065e-01, - 7.1438e-01, 7.7746e-01, 6.9993e-01, 8.4247e-01, - 3.3459e-01, 2.4600e-01, 5.9346e-01, 7.0913e-01, - 2.5832e-01, 7.7024e-01, 2.6469e-01, 2.7663e-01, - 6.2902e-02, 9.4403e-01, 6.6746e-02, 3.0041e-01, - 9.3610e-01, 2.6778e-01, 3.7705e-01, 8.4692e-03, - 8.8453e-01, 2.0632e-01, 4.4988e-01, 1.6894e-01, - 1.2438e-02, 5.8381e-01, 7.1115e-01, 4.4251e-01, - 6.7595e-01, 6.3083e-02, 1.5459e-01, 7.7248e-01, - 1.6884e-01, 4.5854e-01, 9.8097e-03, 2.8779e-01, - 9.4221e-01, 1.7999e-01, 9.6697e-01, 2.1248e-01, - 3.7740e-01, 4.0096e-01, 5.1385e-01, 9.8130e-01, - 6.5753e-01, 6.0122e-02, 1.9068e-01, 3.6002e-01, - 9.0231e-01, 1.9831e-01, 8.9445e-01, 6.5688e-02, - 4.5744e-01, 2.4531e-01, 8.5433e-03, 4.4037e-01, - 8.6668e-01, 6.9154e-01, 9.6479e-01, 9.5443e-01, - 1.2512e-01, 1.3133e-01, 8.0802e-01, 7.8380e-01, - 1.7089e-01, 2.1042e-01, 7.9960e-01, 5.8428e-01, - 4.6539e-01, 8.5401e-01, 5.1112e-01, 4.7769e-01, - 5.4050e-01, 9.3609e-01, 2.8009e-01, 6.6015e-02, - 5.1141e-01, 7.8915e-01, 8.6880e-01, 6.1754e-01, - 9.4843e-01, 1.1650e-01, 1.5677e-01, 1.7600e-01, - 9.5041e-01, 2.2249e-01, 9.7408e-01, 3.4262e-01, - 6.7159e-01, 6.4447e-01, 8.9288e-01, 2.9729e-01, - 7.2744e-01, 6.1960e-01, 9.0982e-01, 9.4766e-01, - 9.2741e-01, 1.8050e-01, 7.5777e-01, 8.4737e-01, - 5.2974e-01, 1.3975e-02, 3.2218e-01, 7.5728e-01, - 3.0380e-01, 9.5599e-01, 7.4789e-01, 3.7504e-01, - 9.6520e-01, 7.6303e-01, 3.9696e-01, 1.0401e-01, - 4.0982e-01, 6.3316e-01, 9.2327e-02, 1.9405e-02, - 4.7989e-01, 9.4566e-01, 6.9287e-01, 1.9453e-01, - 2.1253e-01, 4.4926e-01, 5.8619e-02, 7.0350e-01, - 3.3919e-01, 7.5556e-01, 9.4463e-01, 3.1611e-01, - 4.7702e-01, 3.3843e-01, 7.1410e-01, 6.7562e-01, - 5.5025e-01, 8.2715e-01, 9.1478e-01, 4.7040e-01, - 6.8418e-01, 1.3297e-01, 1.3643e-02, 8.3963e-01, - 9.4038e-01, 3.8012e-01, 6.6480e-01, 4.3203e-01, - 1.1632e-01, 2.1166e-01, 1.0880e-01, 5.0712e-02, - 6.8131e-01, 1.0832e-01, 8.7495e-01, 9.2336e-01, - 4.1149e-01, 8.0381e-01, 6.7670e-01, 6.4943e-01, - 2.4712e-01, 6.0406e-01, 8.7765e-02, 8.2406e-01, - 9.0938e-01, 1.8784e-01, 5.9111e-01, 8.8462e-01, - 3.7481e-01, 9.9173e-01, 1.7508e-01, 9.1699e-01, - 7.0613e-03, 1.6038e-01, 3.2955e-01, 7.4587e-02, - 8.9881e-01, 8.5470e-01, 7.3684e-01, 6.1175e-01, - 3.0823e-02, 7.5970e-01, 1.9162e-01, 7.2898e-01, - 3.5928e-01, 5.5114e-01, 9.5006e-01, 2.0655e-01, - 3.8569e-01, 1.7515e-01, 8.3360e-01, 6.9865e-01, - 9.2797e-01, 6.5322e-02, 9.3365e-01, 8.5821e-01, - 6.8687e-01, 5.4796e-01, 7.4582e-01, 3.8030e-01, - 5.9283e-01, 1.5694e-02, 1.0339e-01, 1.3704e-01, - 1.5233e-01, 8.2752e-01, 6.5473e-01, 1.7110e-01, - 4.4237e-01, 8.6223e-01, 8.4772e-01, 3.8298e-01, - 9.6728e-01, 6.0198e-01, 7.9223e-01, 8.2339e-01, - 7.4017e-01, 3.3245e-01, 4.3901e-01, 8.2795e-01, - 4.8074e-01, 9.4428e-01, 8.4482e-01, 6.9114e-01, - 4.6154e-01, 5.3893e-01, 3.2994e-01, 1.1555e-01, - 5.1914e-01, 4.7741e-01, 4.9269e-02, 4.4184e-01, - 7.6289e-01, 4.9858e-01, 6.7587e-01, 1.5299e-01, - 8.6362e-01, 5.8084e-01, 4.7783e-01, 4.9700e-01, - 4.2540e-01, 1.0162e-01, 7.5272e-01, 1.8767e-01, - 9.9168e-01, 4.0597e-01, 9.5217e-01, 6.5442e-01, - 5.2756e-01, 4.4262e-01, 5.8423e-01, 6.3144e-01, - 2.4769e-01, 8.0175e-01, 6.4149e-01, 5.8141e-01, - 9.1916e-01, 5.2628e-01, 7.8845e-01, 9.0944e-01, - 6.8864e-01, 8.1878e-01, 7.2157e-01, 4.5334e-03, - 3.0805e-02, 9.5355e-01, 7.2954e-01, 8.3027e-01, - 4.9064e-01, 2.4970e-01, 2.3865e-01, 2.4121e-01, - 1.1834e-01, 6.6075e-01, 6.5268e-01, 3.8776e-01, - 2.7396e-01, 2.4684e-01, 2.4196e-01, 7.7938e-01, - 1.4070e-01, 9.9002e-01, 1.5260e-01, 1.4167e-01, - 7.9548e-02, 7.9584e-01, 8.7139e-01, 2.7498e-01, - 5.3075e-01, 4.1591e-01, 3.0671e-01, 8.1041e-01, - 1.0275e-01, 5.7758e-01, 3.1594e-01, 4.8286e-01, - 1.9194e-01, 3.1282e-01, 7.1670e-01, 3.2051e-01, - 5.2841e-02, 1.4067e-01, 9.1088e-01, 6.3668e-01, - 7.9115e-01, 2.4379e-01, 1.5462e-01, 4.2311e-02, - 7.5394e-01, 3.3252e-01, 2.4049e-01, 9.8577e-02, - 4.3073e-01, 6.5555e-01, 5.7805e-01, 4.9998e-01, - 4.3272e-01, 7.7004e-01, 1.1628e-01, 4.2535e-02, - 6.9726e-02, 1.5280e-01, 3.4529e-01, 4.6590e-01, - 5.8611e-02, 1.2878e-01, 7.4846e-01, 9.6319e-01, - 6.8051e-01, 7.1943e-01, 3.5870e-01, 5.7262e-01, - 3.2620e-01, 2.8076e-01, 8.7010e-02, 2.6366e-01, - 6.3187e-01, 1.4391e-01, 2.4884e-01, 3.5726e-01, - 3.9396e-01, 6.6159e-02, 3.5599e-01, 7.5055e-03, - 4.3583e-01, 4.5588e-02, 5.9369e-01, 1.2518e-01, - 6.8146e-01, 2.5843e-01, 5.1872e-01, 2.2847e-02, - 6.7753e-01, 2.2119e-01, 7.0221e-01, 7.7854e-01, - 3.8901e-01, 4.8029e-01, 8.7054e-01, 9.2905e-01, - 9.2144e-01, 6.6123e-01, 5.6375e-01, 9.4639e-01, - 6.2517e-01, 1.1648e-01, 8.8688e-01, 1.4300e-01, - 4.5617e-01, 7.1688e-01, 6.7259e-01, 1.8324e-01, - 3.7338e-01, 3.3730e-01, 4.3106e-01, 6.3842e-01, - 9.3775e-01, 7.9491e-01, 3.1424e-01, 2.5864e-02, - 4.7776e-01, 5.9028e-01, 9.3268e-01, 6.6511e-02, - 9.6948e-01, 8.4139e-01, 8.5390e-02, 5.1637e-01, - 9.5302e-01, 5.1624e-01, 1.6779e-01, 9.5432e-01, - 5.5960e-01, 5.8557e-01, 5.4894e-01, 9.4951e-01, - 7.7562e-03, 5.5602e-01, 1.5450e-03, 7.9207e-01, - 1.2908e-01, 4.7199e-01, 8.2021e-01, 2.9632e-01, - 4.7542e-01, 8.7852e-01, 8.6382e-01, 4.6447e-01, - 2.8956e-01, 3.2381e-01, 5.1671e-01, 4.2590e-01, - 3.5379e-01, 8.3501e-01, 9.9986e-01, 3.4206e-01, - 9.3903e-01, 2.7332e-01, 1.1633e-01, 3.1837e-01, - 4.9135e-01, 4.8891e-02, 5.8062e-01, 6.1981e-01, - 2.2126e-01, 7.9590e-01, 5.5608e-02, 2.9418e-01, - 6.1269e-01, 1.9812e-01, 6.3875e-01, 9.0929e-01, - 4.0284e-01, 3.2515e-01, 1.6248e-01, 3.7453e-01, - 4.3202e-01, 2.2840e-01, 3.2111e-02, 4.3723e-01, - 9.3748e-01, 7.7104e-01, 5.2249e-01, 3.4964e-01, - 4.8542e-01, 4.1877e-03, 7.9705e-01, 9.7778e-02, - 7.5475e-01, 2.3129e-01, 3.2430e-01, 5.1361e-01, - 1.8085e-01, 2.4295e-01, 6.1813e-01, 5.6771e-02, - 8.8849e-01, 1.6365e-01, 3.5850e-01, 1.2620e-01, - 8.2022e-01, 8.4712e-01, 2.9485e-01, 2.3532e-01, - 4.3311e-01, 2.0140e-01, 8.9178e-02, 8.6381e-01, - 3.1238e-01, 7.5215e-01, 5.5682e-01, 7.5471e-01, - 6.9133e-01, 2.2126e-01, 3.9228e-01, 4.3043e-01, - 3.7625e-02, 5.6012e-01, 5.1315e-01, 4.1442e-01, - 5.9861e-01, 9.3922e-01, 9.5372e-01, 2.4100e-01, - 5.6126e-01, 5.9819e-01, 9.4024e-01, 3.3099e-01, - 9.4664e-01, 7.0020e-01, 9.6989e-01, 2.8081e-01, - 2.1666e-01, 6.3859e-01, 7.5331e-01, 7.9288e-01, - 6.5633e-01, 6.7730e-01, 5.3757e-01, 8.5381e-01, - 2.7089e-02, 8.9615e-01, 5.8202e-01, 1.4598e-01, - 9.2770e-01, 1.8855e-01, 6.7041e-01, 8.6578e-01, - 4.2881e-01, 7.9922e-01, 2.4112e-01, 4.0255e-02, - 8.6471e-01, 8.6429e-02, 7.2325e-02, 2.4991e-01, - 5.2937e-01, 2.8655e-01, 1.5239e-02, 1.8351e-01, - 5.4877e-01, 5.2668e-01, 1.9444e-01, 5.9319e-01, - 6.0559e-01, 7.6015e-01, 1.0539e-01, 3.3917e-01, - 2.1990e-01, 2.2539e-01, 4.5181e-01, 3.4579e-01, - 9.4857e-01, 6.1781e-01, 7.9405e-01, 7.7832e-01, - 2.5284e-01, 9.8874e-01, 5.6116e-02, 8.6901e-01, - 4.8905e-01, 8.0060e-01, 6.9558e-01, 7.3432e-01, - 7.0246e-01, 9.7403e-01, 9.1121e-01, 9.0764e-02, - 1.3506e-03, 1.8463e-01, 6.4957e-01, 1.7816e-01, - 5.2229e-02, 8.8521e-01, 5.1552e-01, 7.3362e-01, - 3.7973e-01, 7.0862e-04, 7.8281e-01, 9.0549e-01, - 8.8415e-01, 6.1763e-01, 5.1671e-01, 7.1547e-01, - 5.0531e-01, 5.8740e-01, 9.1102e-01, 2.1919e-01, - 4.6771e-01, 3.6478e-01, 7.6398e-01, 9.1139e-01, - 9.4153e-01, 9.2249e-01, 2.2911e-01, 2.7885e-01, - 4.3889e-01, 1.4626e-01, 6.3367e-01, 3.9236e-01, - 9.8093e-01, 6.6512e-01, 9.1751e-01, 5.0031e-01, - 9.7081e-01, 8.7499e-01, 2.7757e-01, 8.8382e-01, - 2.9761e-01, 2.0972e-01, 3.4652e-01, 2.4990e-01, - 8.4359e-01, 5.6919e-02, 3.4057e-01, 3.8347e-01, - 1.6333e-01, 1.4571e-01, 1.0638e-01, 5.5718e-01, - 9.0663e-01, 4.6886e-01, 6.8587e-01, 7.3991e-01, - 7.9950e-01, 3.8327e-01, 1.5761e-01, 6.6038e-01, - 5.7231e-01, 6.3222e-01, 9.3553e-01, 4.0591e-01, - 4.2525e-01, 5.9767e-01, 2.2326e-01, 6.5176e-01, - 8.9346e-01, 4.4515e-01, 1.0019e-01, 3.7373e-01, - 2.4146e-01, 4.1742e-02, 6.5142e-01, 6.4001e-01, - 5.2382e-01, 7.2723e-01, 8.1684e-01, 5.1471e-01, - 4.1147e-01, 1.1515e-01, 4.6097e-01, 6.7416e-01, - 9.8748e-01, 9.0707e-01, 1.8568e-01, 1.1411e-01, - 5.2570e-01, 2.7178e-01, 7.4673e-01, 2.5094e-01, - 8.4486e-01, 5.5279e-01, 8.5037e-01, 2.4211e-01, - 2.3279e-02, 7.4591e-01, 7.8880e-01, 4.9236e-01, - 1.5544e-01, 5.8410e-01, 7.0916e-01, 2.1707e-01, - 5.3791e-01, 1.3857e-01, 2.8669e-01, 6.3323e-01, - 6.9263e-01, 3.5406e-01, 5.5079e-01, 6.6306e-01, - 2.7546e-01, 9.6607e-01, 5.8201e-01, 5.4173e-01, - 3.2975e-01, 4.5753e-01, 6.9765e-01, 3.3836e-01, - 9.7130e-01, 5.7396e-01, 5.9263e-01, 9.6322e-01, - 2.0223e-01, 1.7166e-01, 9.5551e-01, 2.1967e-01, - 4.6969e-01, 5.8877e-01, 7.4059e-01, 4.7956e-01, - 5.1271e-01, 2.2219e-01, 1.5212e-01, 9.8482e-01, - 4.7918e-01, 9.7424e-01, 6.1901e-01, 5.8213e-01, - 9.8916e-01, 8.7395e-01, 1.1881e-01, 1.4496e-01, - 7.9770e-01, 8.8556e-01, 6.3267e-01, 1.3114e-01, - 2.4220e-01, 9.3859e-01, 4.8235e-01, 2.6558e-01, - 7.2715e-01, 3.5161e-01, 7.2675e-01, 3.3797e-01, - 3.1692e-01, 8.7654e-01, 9.2532e-01, 3.3535e-01, - 3.8120e-01, 7.8279e-02, 6.3613e-01, 1.8321e-02, - 8.3910e-01, 8.3030e-01, 2.7475e-01, 5.6181e-01, - 7.5287e-01, 9.3822e-01, 1.3700e-01, 1.0742e-01, - 2.0008e-01, 5.8382e-01, 9.8849e-01, 1.6606e-01, - 7.1002e-01, 6.1730e-01, 3.4926e-01, 2.1845e-01, - 1.6840e-01, 9.6172e-01, 4.0727e-01, 3.6296e-02, - 9.1858e-01, 2.5370e-02, 7.0144e-01, 7.1196e-01, - 7.8953e-01, 1.2248e-02, 7.7101e-01, 2.2163e-01, - 2.7998e-01, 5.7485e-01, 5.9607e-01, 3.8870e-01, - 6.0031e-01, 8.3227e-01, 6.9437e-02, 5.5109e-01, - 4.4409e-01, 7.9516e-01, 2.8411e-01, 7.2349e-01, - 9.4898e-01, 7.4032e-01, 6.0055e-02, 2.9516e-01, - 1.5857e-01, 6.6885e-01, 2.2852e-01, 6.8544e-01, - 1.1209e-01, 7.6688e-01, 3.3589e-01, 5.7460e-01, - 5.0186e-01, 5.7458e-01, 2.7537e-01, 2.3220e-01, - 9.9519e-01, 3.4429e-01, 1.5667e-01, 1.6299e-01, - 8.1033e-01, 9.3481e-01, 6.2915e-01, 5.8154e-01, - 2.0061e-01, 2.2781e-01, 9.0755e-01, 4.0108e-01, - 3.4687e-01, 7.8445e-01, 4.8250e-02, 7.3426e-01, - 7.6138e-01, 4.4409e-01, 8.3863e-01, 7.6788e-01, - 4.2313e-01, 5.3538e-01, 1.2980e-01, 8.6170e-01, - 3.1408e-01, 5.4734e-01, 1.7495e-01, 1.1843e-01, - 3.7798e-01, 7.5427e-01, 3.1385e-01, 8.9977e-01, - 4.5864e-01, 2.3028e-01, 2.2634e-01, 9.9183e-01, - 2.2514e-01, 4.1228e-01, 5.1544e-01, 4.3390e-01, - 4.0426e-01, 3.9971e-01, 7.6463e-01, 2.0155e-03, - 7.5372e-01, 2.2893e-01, 2.9849e-02, 7.6110e-02, - 3.3857e-01, 2.2407e-02, 7.0282e-01, 1.4713e-01, - 6.2629e-01, 6.8243e-01, 9.9627e-01, 5.1013e-01, - 8.0536e-02, 9.8552e-01, 6.6502e-01, 6.8200e-02, - 8.5999e-02, 5.8414e-01, 8.9296e-01, 3.6543e-01, - 6.5870e-01, 7.9749e-01, 1.3448e-01, 7.5013e-01, - 2.0267e-01, 4.7561e-01, 3.3676e-01, 8.5537e-01, - 2.8565e-01, 3.5750e-01, 6.0110e-01, 4.8385e-01, - 6.7175e-01, 2.8373e-02, 2.9667e-01, 1.5980e-01, - 6.0663e-01, 5.6893e-01, 8.6187e-01, 3.8426e-01, - 9.4313e-01, 1.5700e-01, 6.4374e-01, 6.7882e-01, - 8.1698e-02, 8.7238e-01, 6.1915e-01, 2.4451e-01, - 8.6242e-01, 6.4213e-01, 9.1978e-01, 5.7440e-01, - 6.5066e-02, 1.3295e-01, 6.8781e-01, 6.0675e-01, - 2.5313e-01, 2.2555e-02, 1.8691e-01, 8.1480e-01, - 8.6104e-01, 2.0666e-01, 2.1914e-01, 6.0359e-01, - 5.1611e-01, 5.6270e-01, 2.2858e-01, 7.0563e-02, - 7.3900e-02, 9.0084e-01, 4.8907e-01, 2.6475e-01, - 7.2082e-01, 6.8751e-01, 1.3829e-01, 9.7521e-02, - 9.9126e-01, 6.0189e-01, 7.9516e-01, 2.1625e-01, - 5.7626e-01, 1.6852e-01, 9.5043e-01, 3.1655e-01, - 2.5323e-01, 5.6154e-01, 5.6857e-01, 5.2077e-01, - 9.3389e-01, 8.0069e-01, 1.4968e-01, 8.4212e-01, - 3.1001e-01, 2.2924e-01, 6.2681e-01, 2.6673e-01, - 9.8161e-01, 4.5832e-01, 3.3629e-01, 6.6850e-01, - 7.7463e-01, 6.5327e-02, 8.0080e-01, 4.1266e-01, - 7.2843e-01, 9.0381e-02, 5.7561e-01, 7.3344e-01]), + col_indices=tensor([4171, 3057, 7209, 2948, 4077, 8602, 6723, 7385, 3185, + 1979, 4898, 5146, 6082, 3916, 3357, 1239, 1865, 5288, + 7084, 3669, 8567, 390, 222, 7937, 7436, 7536, 100, + 9939, 953, 6752, 4459, 1164, 7573, 5755, 4330, 1051, + 6926, 4784, 6452, 173, 7327, 2769, 9411, 547, 7184, + 1803, 2735, 3553, 2593, 8603, 9787, 409, 5628, 6192, + 968, 7416, 6382, 1295, 829, 5935, 257, 823, 2011, + 1692, 9205, 9876, 7722, 8228, 3769, 5411, 4265, 6838, + 3788, 5377, 1539, 121, 7640, 6556, 2886, 373, 633, + 3388, 2063, 2691, 1816, 4562, 4715, 1544, 5746, 4224, + 9753, 3302, 257, 5030, 1700, 5572, 5488, 8810, 44, + 3892, 6196, 680, 9235, 3771, 3078, 9652, 8313, 8340, + 8194, 9811, 6858, 7697, 1051, 6425, 3442, 9895, 8537, + 6956, 3173, 2655, 5724, 9506, 1362, 5282, 7899, 4034, + 717, 6956, 2074, 8234, 9231, 8400, 1721, 8096, 8058, + 1564, 2837, 5525, 6467, 9105, 8508, 2425, 8050, 3104, + 4383, 807, 8898, 4439, 9565, 3772, 5003, 435, 7644, + 1652, 1914, 2238, 1718, 8446, 8758, 3283, 6522, 4570, + 7929, 5444, 633, 5009, 5898, 6412, 6138, 7641, 8023, + 3779, 6596, 1843, 4611, 5333, 3629, 1447, 5379, 4896, + 2089, 9462, 7859, 9196, 1537, 6884, 3472, 9048, 9935, + 3380, 3192, 8628, 3425, 8179, 6368, 2038, 9136, 8061, + 4989, 8687, 552, 2271, 3149, 9135, 582, 9927, 9097, + 2493, 9502, 7584, 4043, 4133, 6860, 691, 8767, 6168, + 9739, 5803, 3262, 4083, 3153, 7197, 5661, 4584, 5627, + 5286, 6261, 3581, 5434, 706, 8759, 9491, 8376, 5759, + 3936, 7006, 5446, 611, 6403, 7493, 2501, 344, 9152, + 150, 9135, 7259, 2392, 2575, 860, 4372, 1162, 1538, + 2024, 4841, 4854, 4474, 8570, 9195, 6099, 1575, 7604, + 2154, 4244, 6565, 6969, 3326, 3755, 4895, 8572, 6142, + 2184, 8551, 118, 8884, 795, 1091, 9777, 3962, 6775, + 5235, 8730, 9354, 5232, 6956, 7770, 9887, 4021, 5102, + 7542, 7149, 8658, 8721, 993, 4855, 8263, 7566, 4198, + 9643, 9042, 1412, 6406, 201, 8489, 2307, 9064, 2555, + 9663, 9159, 2978, 360, 4929, 5940, 6745, 3315, 7591, + 5668, 6700, 5879, 7321, 7180, 3279, 4488, 8581, 958, + 7425, 7701, 2088, 1418, 8898, 556, 6955, 8597, 1909, + 2715, 6282, 9823, 3990, 1424, 2696, 2113, 1383, 7011, + 3430, 8570, 4503, 3770, 1789, 3311, 9299, 9464, 6616, + 8873, 1240, 2624, 4370, 5836, 8559, 7737, 358, 9586, + 7848, 2968, 7333, 9753, 5901, 7252, 4963, 3089, 4167, + 474, 858, 6778, 1967, 4227, 9344, 2784, 6491, 1519, + 5597, 6764, 523, 1385, 6925, 2974, 4181, 3929, 5908, + 6560, 1590, 1699, 7323, 6440, 7896, 46, 3088, 6798, + 9850, 5931, 1626, 7674, 4915, 5518, 7943, 1927, 1848, + 9685, 9157, 2844, 4061, 1680, 534, 7745, 4726, 1978, + 6642, 1627, 1780, 6356, 796, 3636, 2194, 1594, 4485, + 5547, 5732, 991, 7922, 4644, 5251, 8603, 8177, 3055, + 3092, 864, 1671, 625, 6335, 872, 6033, 3915, 9704, + 2838, 5280, 6839, 1107, 8192, 9854, 10, 7358, 5729, + 7672, 9018, 7999, 9886, 3903, 332, 6039, 2718, 486, + 9338, 4888, 642, 2641, 8493, 9074, 7037, 4121, 2857, + 7670, 437, 3150, 3472, 6363, 1462, 9423, 2752, 9604, + 6126, 1966, 967, 6018, 6151, 6956, 9567, 1377, 2627, + 3339, 7453, 8116, 8505, 1963, 320, 1354, 8233, 2890, + 4712, 5923, 7507, 1168, 4122, 4404, 7181, 1991, 2191, + 8275, 7285, 2794, 9311, 4057, 4194, 9595, 7009, 5312, + 7563, 3003, 262, 4441, 3726, 4811, 590, 214, 3678, + 4769, 7922, 9858, 9078, 4474, 5472, 2117, 9957, 4905, + 4210, 9118, 2443, 4337, 559, 429, 5091, 7411, 2322, + 2740, 970, 7099, 7791, 9424, 2375, 6748, 5364, 3376, + 759, 6765, 6563, 7009, 5917, 8033, 103, 2556, 9654, + 7379, 1825, 1993, 7549, 4098, 3695, 8920, 8903, 9769, + 1722, 6589, 8831, 183, 6202, 1828, 7236, 5919, 4066, + 7989, 8517, 9644, 5713, 6870, 1471, 3063, 5308, 1800, + 4028, 398, 6076, 49, 5584, 6701, 874, 7580, 7109, + 7567, 7489, 165, 216, 2596, 2574, 8422, 5803, 4359, + 7017, 1094, 5107, 6174, 3445, 6851, 9183, 7690, 4452, + 8138, 4660, 3624, 4709, 9349, 5515, 101, 6164, 8143, + 1521, 1313, 1392, 4319, 521, 2718, 7242, 2894, 8876, + 7894, 8783, 5732, 5611, 8956, 4200, 5663, 8786, 4299, + 9359, 3300, 7300, 262, 8236, 3471, 8269, 3654, 7419, + 6, 3744, 8175, 6445, 7372, 7446, 9800, 761, 6696, + 415, 9859, 8573, 6158, 5109, 3984, 7163, 2377, 9369, + 7478, 5206, 1692, 4134, 7467, 9029, 431, 562, 4095, + 1381, 2986, 3374, 522, 4253, 4868, 5790, 8736, 7209, + 1524, 9247, 4314, 1512, 3044, 7263, 7101, 7578, 3761, + 9421, 4055, 8908, 8447, 6895, 3233, 6594, 7225, 8934, + 1620, 8931, 6629, 1371, 1481, 9971, 8892, 8055, 6339, + 2626, 3951, 2540, 7730, 8105, 1818, 9698, 6348, 8238, + 8909, 92, 9679, 5649, 6994, 8423, 9617, 8893, 7878, + 139, 4894, 3194, 6365, 3139, 8540, 8124, 8207, 7468, + 3137, 9027, 4981, 9129, 8139, 7731, 869, 9157, 8972, + 754, 7007, 3578, 658, 5120, 8944, 8958, 1363, 7546, + 5022, 8634, 9033, 4340, 1112, 5028, 4052, 5410, 9972, + 5272, 7759, 9481, 6252, 2832, 4357, 7820, 3555, 4324, + 20, 1504, 9542, 713, 1188, 1948, 3456, 1550, 136, + 2414, 5911, 6860, 4407, 6346, 7014, 5031, 6493, 2142, + 1672, 3712, 1898, 8437, 9729, 3991, 167, 4352, 5778, + 8900, 1295, 7690, 1238, 7305, 1352, 3474, 4757, 448, + 4751, 7038, 5655, 3913, 2112, 3311, 2122, 7321, 1658, + 9669, 1257, 6025, 4153, 3492, 4168, 843, 4347, 5234, + 4124, 3706, 6196, 9808, 5421, 5783, 5258, 2911, 5185, + 5334, 7727, 8197, 3498, 5897, 6029, 9707, 5802, 2571, + 7156, 7850, 7317, 249, 6660, 9594, 8499, 6553, 367, + 452, 2085, 3023, 7242, 2596, 6241, 3814, 4512, 3007, + 6430, 8255, 3146, 7416, 5434, 5823, 2994, 627, 262, + 5259, 1903, 4724, 1430, 729, 5769, 4005, 3066, 7263, + 5434, 2017, 9296, 466, 1984, 9099, 2145, 2980, 9331, + 7656, 3853, 4124, 4440, 4606, 4994, 3593, 5296, 6185, + 2913, 2613, 1513, 7347, 1102, 7495, 7027, 2473, 5744, + 394, 6818, 7551, 8928, 2745, 6186, 2232, 8871, 5872, + 2457, 5632, 8334, 406, 2307, 4965, 8901, 8286, 7300, + 1451, 785, 7023, 5972, 2973, 2951, 2860, 2555, 3468, + 5575, 2968, 6407, 4850, 4514, 934, 1371, 5509, 5185, + 6086, 2474, 120, 192, 3431, 9336, 8993, 2772, 1393, + 752, 6364, 7216, 2822, 8956, 1582, 2682, 3898, 4597, + 8811, 3172, 7041, 265, 5793, 7404, 4892, 9652, 5243, + 951, 6585, 8400, 7544, 5140, 5368, 7880, 3221, 1693, + 6758, 7073, 7494, 687, 4440, 7953, 9316, 1257, 3158, + 7707, 5424, 2405, 5820, 1689, 6718, 6603, 4211, 9517, + 9615]), + values=tensor([0.4415, 0.6013, 0.5126, 0.9698, 0.1034, 0.0639, 0.4558, + 0.2905, 0.4310, 0.2619, 0.9778, 0.6823, 0.9070, 0.1229, + 0.8548, 0.3336, 0.0404, 0.9142, 0.8199, 0.0542, 0.7053, + 0.4688, 0.8485, 0.2928, 0.6756, 0.2645, 0.9947, 0.2741, + 0.5856, 0.1620, 0.4957, 0.3399, 0.5914, 0.2897, 0.1526, + 0.1693, 0.5980, 0.2886, 0.5541, 0.3334, 0.5449, 0.4062, + 0.5414, 0.6879, 0.2033, 0.0945, 0.6450, 0.7402, 0.2638, + 0.2943, 0.5515, 0.1512, 0.9724, 0.4802, 0.3050, 0.3013, + 0.9225, 0.9783, 0.5092, 0.7142, 0.1530, 0.1832, 0.4581, + 0.1377, 0.4616, 0.9132, 0.5714, 0.8993, 0.1126, 0.6871, + 0.5589, 0.3161, 0.4140, 0.9816, 0.6789, 0.6752, 0.7765, + 0.8042, 0.3426, 0.8369, 0.8937, 0.5076, 0.7416, 0.8606, + 0.2242, 0.5767, 0.8912, 0.6587, 0.9801, 0.3691, 0.7054, + 0.2320, 0.7870, 0.6422, 0.3693, 0.6632, 0.7760, 0.1448, + 0.4345, 0.0515, 0.3358, 0.9018, 0.5133, 0.2405, 0.4186, + 0.9543, 0.9360, 0.7747, 0.6893, 0.7048, 0.9897, 0.5550, + 0.9691, 0.4907, 0.4556, 0.6844, 0.3056, 0.0603, 0.0778, + 0.8454, 0.6882, 0.7264, 0.8823, 0.6269, 0.5507, 0.1535, + 0.3271, 0.4840, 0.1687, 0.2816, 0.3788, 0.0525, 0.8448, + 0.4092, 0.6448, 0.0704, 0.0081, 0.9023, 0.5796, 0.6260, + 0.4056, 0.6689, 0.4193, 0.6796, 0.5630, 0.9806, 0.2222, + 0.5558, 0.2750, 0.8217, 0.1518, 0.6088, 0.3728, 0.6107, + 0.7218, 0.7832, 0.0315, 0.1777, 0.4923, 0.6962, 0.5172, + 0.2135, 0.9904, 0.7255, 0.8147, 0.2772, 0.5359, 0.5445, + 0.6578, 0.1756, 0.7465, 0.9196, 0.8367, 0.8344, 0.1261, + 0.5996, 0.8036, 0.3773, 0.7719, 0.0903, 0.3028, 0.2725, + 0.9127, 0.9162, 0.5408, 0.6634, 0.6958, 0.5282, 0.5569, + 0.4442, 0.2693, 0.5361, 0.4599, 0.3625, 0.0550, 0.1984, + 0.5349, 0.2482, 0.6378, 0.0691, 0.8641, 0.3287, 0.3415, + 0.4697, 0.9585, 0.0278, 0.0861, 0.0753, 0.5865, 0.7297, + 0.7984, 0.7314, 0.7714, 0.3176, 0.0359, 0.8791, 0.6106, + 0.9543, 0.1131, 0.3027, 0.3898, 0.1352, 0.1799, 0.1900, + 0.9195, 0.4614, 0.3998, 0.1611, 0.2860, 0.0189, 0.1376, + 0.0642, 0.6253, 0.3117, 0.6206, 0.2776, 0.7335, 0.0215, + 0.1020, 0.0450, 0.8422, 0.1455, 0.6809, 0.3764, 0.1098, + 0.5745, 0.6518, 0.6141, 0.7556, 0.0799, 0.6027, 0.9244, + 0.2771, 0.9147, 0.3379, 0.8431, 0.7801, 0.1199, 0.8711, + 0.3760, 0.5248, 0.4262, 0.0309, 0.0502, 0.0138, 0.5777, + 0.5941, 0.7357, 0.9003, 0.1504, 0.5942, 0.0234, 0.9430, + 0.3015, 0.5012, 0.3805, 0.7971, 0.2703, 0.2556, 0.4135, + 0.9302, 0.7793, 0.8013, 0.2127, 0.1854, 0.8157, 0.9489, + 0.5990, 0.6312, 0.5622, 0.3686, 0.8562, 0.4716, 0.4828, + 0.4596, 0.1787, 0.8020, 0.5056, 0.5302, 0.5941, 0.1504, + 0.1235, 0.0377, 0.2361, 0.5933, 0.4742, 0.6778, 0.2338, + 0.1840, 0.7674, 0.9783, 0.7128, 0.0084, 0.4766, 0.8942, + 0.6541, 0.0450, 0.8813, 0.8581, 0.6083, 0.3958, 0.5902, + 0.6407, 0.3035, 0.5362, 0.6238, 0.0892, 0.0057, 0.4585, + 0.6267, 0.1433, 0.2999, 0.2599, 0.8013, 0.1761, 0.8615, + 0.3483, 0.3247, 0.1179, 0.2298, 0.6436, 0.1866, 0.2388, + 0.4454, 0.7344, 0.5541, 0.4582, 0.3439, 0.7475, 0.6069, + 0.9289, 0.9691, 0.3224, 0.0400, 0.6214, 0.2663, 0.8787, + 0.6913, 0.2276, 0.2654, 0.2824, 0.9542, 0.4508, 0.7718, + 0.5819, 0.7007, 0.7532, 0.2890, 0.5844, 0.4034, 0.4560, + 0.5721, 0.3861, 0.4393, 0.4255, 0.7961, 0.8238, 0.6457, + 0.5584, 0.9889, 0.2485, 0.1868, 0.6329, 0.4246, 0.0722, + 0.2295, 0.5590, 0.5341, 0.5840, 0.5867, 0.2096, 0.2893, + 0.6630, 0.5421, 0.6229, 0.5147, 0.3445, 0.0323, 0.1572, + 0.7699, 0.5255, 0.4444, 0.2885, 0.7488, 0.7088, 0.2912, + 0.7436, 0.7915, 0.0733, 0.9105, 0.6975, 0.6753, 0.9628, + 0.0220, 0.9450, 0.7152, 0.6484, 0.2016, 0.7266, 0.4621, + 0.9244, 0.3208, 0.1924, 0.5143, 0.1364, 0.2001, 0.4960, + 0.0313, 0.3327, 0.3586, 0.1335, 0.6516, 0.0035, 0.3676, + 0.9186, 0.7489, 0.2595, 0.4232, 0.1239, 0.4474, 0.2603, + 0.1655, 0.0450, 0.7379, 0.4038, 0.2505, 0.5416, 0.6372, + 0.8776, 0.1829, 0.7521, 0.4974, 0.9526, 0.5187, 0.0162, + 0.0784, 0.6127, 0.7950, 0.8684, 0.1502, 0.2616, 0.0109, + 0.7618, 0.5504, 0.4625, 0.5130, 0.3349, 0.2075, 0.7484, + 0.2322, 0.9482, 0.0557, 0.2593, 0.8634, 0.2678, 0.7997, + 0.1470, 0.8854, 0.1792, 0.3971, 0.4395, 0.2147, 0.8617, + 0.7653, 0.9522, 0.6222, 0.1151, 0.7874, 0.6605, 0.9141, + 0.5394, 0.0543, 0.9805, 0.6427, 0.6026, 0.5450, 0.3987, + 0.2173, 0.7257, 0.9467, 0.3590, 0.8305, 0.9851, 0.7266, + 0.7210, 0.4619, 0.9947, 0.4160, 0.9266, 0.9716, 0.3095, + 0.6290, 0.7516, 0.0773, 0.3839, 0.5656, 0.5713, 0.8387, + 0.5951, 0.8347, 0.8002, 0.6353, 0.6676, 0.8665, 0.1704, + 0.1516, 0.2652, 0.7229, 0.2765, 0.1121, 0.9817, 0.8564, + 0.3469, 0.3504, 0.3072, 0.2632, 0.5895, 0.5174, 0.4026, + 0.1364, 0.5608, 0.7744, 0.7937, 0.8143, 0.5205, 0.4612, + 0.0113, 0.0541, 0.3102, 0.5930, 0.9411, 0.5533, 0.6919, + 0.6365, 0.8476, 0.0198, 0.0366, 0.9327, 0.7269, 0.3586, + 0.7745, 0.0834, 0.0832, 0.6023, 0.7883, 0.7545, 0.9670, + 0.4813, 0.6739, 0.0831, 0.5227, 0.7484, 0.4197, 0.4165, + 0.1262, 0.5130, 0.3426, 0.2503, 0.2406, 0.0887, 0.5780, + 0.6191, 0.0626, 0.5445, 0.1171, 0.4482, 0.9651, 0.6499, + 0.4185, 0.9964, 0.8638, 0.7514, 0.7064, 0.6449, 0.1740, + 0.2789, 0.9708, 0.0411, 0.0976, 0.3315, 0.1025, 0.3725, + 0.3011, 0.0041, 0.4219, 0.9494, 0.9327, 0.5777, 0.0742, + 0.3001, 0.5165, 0.7896, 0.2761, 0.7934, 0.8226, 0.0769, + 0.4508, 0.9088, 0.9363, 0.3806, 0.7336, 0.2494, 0.8425, + 0.1188, 0.3084, 0.8034, 0.6387, 0.5120, 0.0964, 0.4423, + 0.0326, 0.5969, 0.3875, 0.7015, 0.1614, 0.0456, 0.3080, + 0.4419, 0.5501, 0.4593, 0.6924, 0.3353, 0.4249, 0.6182, + 0.6731, 0.0665, 0.4382, 0.1282, 0.8806, 0.5417, 0.1511, + 0.8967, 0.1791, 0.7341, 0.7820, 0.3063, 0.1693, 0.2448, + 0.5324, 0.8764, 0.8032, 0.6217, 0.7666, 0.0290, 0.5756, + 0.5368, 0.6837, 0.8425, 0.9269, 0.8851, 0.7297, 0.4100, + 0.0830, 0.7274, 0.6707, 0.7886, 0.6064, 0.6168, 0.8794, + 0.2459, 0.2226, 0.7328, 0.0671, 0.9354, 0.1578, 0.9391, + 0.7607, 0.6549, 0.3751, 0.8607, 0.8558, 0.5714, 0.1929, + 0.4860, 0.1420, 0.1337, 0.1905, 0.9881, 0.1650, 0.2729, + 0.1906, 0.0516, 0.3917, 0.7651, 0.2976, 0.1600, 0.3865, + 0.8835, 0.2804, 0.2193, 0.9395, 0.5070, 0.3572, 0.0397, + 0.8047, 0.7010, 0.1034, 0.0082, 0.2831, 0.4944, 0.8787, + 0.4122, 0.0980, 0.0169, 0.0079, 0.0244, 0.7492, 0.9822, + 0.5642, 0.8464, 0.4997, 0.6173, 0.0653, 0.9125, 0.0781, + 0.4155, 0.9481, 0.6939, 0.2718, 0.4249, 0.8118, 0.3439, + 0.5777, 0.9966, 0.3292, 0.1202, 0.7200, 0.2519, 0.8671, + 0.0653, 0.3801, 0.1987, 0.0712, 0.4025, 0.2125, 0.8575, + 0.8190, 0.0269, 0.7551, 0.9860, 0.5051, 0.2462, 0.7202, + 0.9788, 0.6497, 0.9001, 0.7066, 0.2703, 0.2680, 0.1810, + 0.0814, 0.5232, 0.3867, 0.1370, 0.9077, 0.4343, 0.7542, + 0.7324, 0.8975, 0.6439, 0.4352, 0.7082, 0.8315, 0.9360, + 0.7744, 0.7737, 0.3185, 0.7683, 0.7204, 0.6797, 0.1381, + 0.0680, 0.8863, 0.4171, 0.9753, 0.7834, 0.6247, 0.4415, + 0.3685, 0.6455, 0.4171, 0.4496, 0.6336, 0.5676, 0.3524, + 0.0609, 0.3024, 0.8095, 0.3386, 0.4790, 0.6576, 0.2983, + 0.5918, 0.3457, 0.5714, 0.4836, 0.5379, 0.2760, 0.4745, + 0.8249, 0.5231, 0.2518, 0.1107, 0.7531, 0.9927, 0.6691, + 0.5479, 0.8661, 0.1062, 0.2639, 0.8719, 0.4872, 0.7824, + 0.4431, 0.6183, 0.3399, 0.5890, 0.0408, 0.2008, 0.6868, + 0.1677, 0.8079, 0.7914, 0.5275, 0.5099, 0.8609, 0.7195, + 0.7869, 0.2394, 0.6218, 0.0175, 0.7397, 0.5786, 0.2749, + 0.5056, 0.3739, 0.1506, 0.3525, 0.9063, 0.1263, 0.7479, + 0.4609, 0.6911, 0.5454, 0.6017, 0.9923, 0.0775, 0.5711, + 0.0871, 0.9472, 0.5472, 0.4527, 0.7268, 0.4719, 0.0237, + 0.5548, 0.3452, 0.5147, 0.5543, 0.3881, 0.6377, 0.4363, + 0.8582, 0.1181, 0.4029, 0.1913, 0.7190, 0.0104, 0.9330, + 0.7107, 0.5703, 0.5688, 0.0687, 0.2854, 0.1080, 0.1319, + 0.6260, 0.3483, 0.0434, 0.6685, 0.9786, 0.1326, 0.9894, + 0.7212, 0.3212, 0.3659, 0.3632, 0.8856, 0.1125, 0.3702, + 0.4390, 0.3271, 0.9685, 0.8393, 0.6468, 0.3694, 0.8215, + 0.8256, 0.0243, 0.8467, 0.6851, 0.5169, 0.2961, 0.3100, + 0.4346, 0.9920, 0.8174, 0.4419, 0.9220, 0.7491, 0.9513, + 0.9558, 0.2557, 0.0819, 0.6161, 0.3992, 0.4513, 0.3794, + 0.2566, 0.9818, 0.1875, 0.6689, 0.9989, 0.6274, 0.1447, + 0.7124, 0.9271, 0.5618, 0.3250, 0.0051, 0.0734, 0.6607, + 0.4316, 0.8575, 0.9087, 0.3760, 0.5591, 0.5008, 0.1099, + 0.7937, 0.4907, 0.5126, 0.1552, 0.8388, 0.0713, 0.2170, + 0.5683, 0.3341, 0.5338, 0.8690, 0.0444, 0.5187, 0.8890, + 0.5402, 0.5834, 0.6082, 0.8602, 0.8437, 0.4723, 0.7593, + 0.8109, 0.2675, 0.3399, 0.6022, 0.0546, 0.7369, 0.0541, + 0.5651, 0.6738, 0.4614, 0.6944, 0.2561, 0.0901, 0.2038, + 0.5369, 0.1848, 0.5378, 0.5862, 0.0851, 0.9818]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.0651, 0.6329, 0.6141, ..., 0.3243, 0.1158, 0.5219]) +tensor([0.9110, 0.9462, 0.7927, ..., 0.0987, 0.6084, 0.0709]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -1133,375 +540,378 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 10.808244943618774 seconds +Time: 9.372220277786255 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 141920 -ss 10000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.197850227355957} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([7315, 1858, 1670, 5364, 1184, 3689, 9574, 1136, 5558, - 3491, 589, 9091, 159, 766, 502, 9929, 4846, 9650, - 2563, 3405, 2322, 3115, 8463, 8330, 9642, 7938, 1757, - 7149, 4012, 8129, 197, 2039, 5706, 3549, 7371, 2993, - 1668, 5510, 7702, 9196, 8429, 6070, 2662, 4013, 9415, - 6857, 7829, 189, 1980, 6763, 6718, 1267, 4257, 3542, - 1839, 9352, 3880, 4065, 5790, 6525, 9847, 6167, 4814, - 6341, 2068, 662, 5058, 1944, 658, 6063, 9056, 9925, - 2964, 8244, 282, 3473, 7406, 8810, 4236, 886, 9762, - 8425, 8800, 4778, 5281, 3283, 4118, 9078, 3169, 8457, - 9924, 2720, 1304, 4941, 3743, 4847, 8299, 4889, 214, - 6275, 5734, 2313, 2745, 5305, 3623, 13, 2937, 2995, - 6172, 9968, 1311, 5504, 8279, 7545, 3069, 7648, 5567, - 8268, 1055, 3660, 335, 7062, 8121, 8983, 7622, 6715, - 5283, 420, 1742, 3498, 1745, 3085, 6189, 2380, 1283, - 9795, 8106, 1945, 1678, 538, 5640, 7420, 194, 9011, - 3294, 2932, 2411, 2645, 8021, 5019, 4361, 3489, 1910, - 9620, 4481, 9616, 1602, 3695, 472, 2841, 8751, 2255, - 3889, 4605, 7873, 655, 6214, 3156, 256, 9963, 5581, - 8680, 717, 5063, 1404, 9442, 2228, 7492, 7160, 3856, - 6002, 9242, 9133, 2725, 317, 7326, 4324, 8318, 9240, - 9476, 4557, 5973, 4304, 8345, 1060, 987, 4937, 7346, - 7026, 4522, 1707, 1886, 1437, 9931, 6068, 6434, 2480, - 5992, 9055, 9136, 8322, 9905, 2782, 196, 693, 1688, - 8926, 3768, 8472, 207, 7495, 2196, 9726, 8814, 7453, - 1485, 5440, 3418, 8856, 743, 8967, 7230, 7965, 5970, - 15, 4482, 7568, 1808, 6735, 4024, 7824, 7515, 8144, - 3912, 8719, 4629, 8161, 3936, 4716, 2525, 783, 5222, - 9876, 8442, 8428, 7355, 3144, 4428, 6522, 132, 3790, - 5300, 9557, 7676, 9228, 8283, 5149, 2325, 9115, 1687, - 4272, 5763, 8908, 5886, 1669, 7453, 1796, 8835, 7278, - 6225, 9225, 6087, 903, 1728, 1580, 3725, 7730, 3836, - 8521, 8698, 4270, 4031, 2371, 8499, 521, 7387, 7743, - 8947, 886, 3479, 4527, 8226, 9051, 6009, 5379, 5768, - 439, 2600, 4801, 8794, 9610, 3561, 1577, 9097, 9975, - 1103, 6673, 9445, 1441, 7688, 2124, 2728, 4023, 4610, - 9118, 4314, 7265, 578, 8603, 197, 5840, 6120, 5539, - 9564, 8200, 5375, 7108, 5160, 476, 3824, 2379, 87, - 1194, 5282, 4975, 7693, 2868, 5155, 7376, 7424, 5181, - 4696, 4912, 8460, 4137, 3270, 9635, 9590, 6124, 1068, - 6534, 3173, 3896, 8453, 8316, 291, 1519, 8246, 4691, - 1751, 2308, 3396, 2423, 5215, 2970, 6752, 2916, 7921, - 8802, 4661, 9102, 3770, 6158, 7914, 1489, 4152, 1612, - 5823, 1936, 6852, 1976, 2146, 3405, 5451, 6120, 4413, - 8917, 3721, 1057, 4268, 9757, 761, 1140, 545, 6502, - 1749, 9777, 5713, 4612, 1378, 6061, 5135, 7937, 4403, - 5066, 6373, 9197, 7808, 4204, 644, 8672, 2203, 2426, - 896, 6965, 8169, 1801, 9071, 4317, 3597, 8597, 2047, - 1880, 5444, 8242, 6448, 7450, 4122, 6833, 5968, 2231, - 7351, 4384, 833, 7891, 8348, 765, 3059, 7905, 8630, - 7013, 9479, 6195, 7212, 3271, 1293, 3311, 3855, 1964, - 4641, 7448, 4226, 8995, 7274, 5191, 6567, 9407, 1526, - 9480, 8548, 5872, 8769, 2698, 7864, 8038, 56, 27, - 7763, 2188, 9491, 2878, 9318, 4526, 7193, 5303, 5124, - 9708, 2338, 8366, 9823, 9154, 6091, 9581, 2937, 8906, - 5848, 2687, 7340, 8708, 1010, 6399, 3045, 4615, 8826, - 3568, 7285, 6031, 6734, 6933, 6578, 5862, 9558, 6554, - 5089, 3113, 4603, 7009, 7343, 4267, 19, 9919, 9366, - 2933, 9750, 5505, 6090, 1142, 7058, 5172, 8599, 4136, - 5547, 157, 7341, 831, 3008, 927, 5993, 8607, 171, - 8862, 6810, 2856, 5723, 7936, 8843, 3881, 1692, 6638, - 2904, 7697, 5025, 5714, 7530, 9817, 833, 9236, 7396, - 9254, 912, 7384, 3775, 1170, 3977, 7524, 4813, 9008, - 7620, 7005, 3844, 204, 3517, 5068, 7264, 4011, 9098, - 8578, 2006, 5773, 9227, 5847, 5858, 2895, 2892, 6076, - 8741, 3340, 3921, 2744, 56, 4494, 5968, 89, 4026, - 7255, 8544, 840, 5863, 6302, 414, 2922, 3740, 955, - 8218, 5362, 5266, 5026, 8483, 5082, 7417, 7810, 644, - 1190, 6997, 8587, 3940, 7419, 7679, 3419, 2050, 1375, - 547, 8244, 8381, 3833, 6045, 5426, 8943, 8778, 9260, - 207, 4853, 1852, 1589, 1656, 6333, 1916, 7763, 2953, - 9839, 3533, 7621, 5586, 4408, 8103, 2442, 8516, 9148, - 3506, 9770, 571, 3215, 8121, 6287, 599, 7936, 8894, - 9182, 1185, 7298, 5238, 2435, 606, 6619, 1717, 9123, - 3804, 6744, 1688, 8115, 3519, 8007, 9191, 5309, 7085, - 4342, 899, 8048, 923, 5544, 6203, 2126, 6246, 100, - 7840, 564, 2942, 3285, 2333, 5463, 7923, 2151, 8056, - 5768, 3098, 9031, 3120, 4324, 2617, 2542, 2600, 8238, - 4487, 7446, 3440, 5653, 3220, 3177, 9241, 8081, 5102, - 9646, 8885, 6699, 8756, 5519, 9452, 259, 2677, 8794, - 1460, 8411, 5716, 9458, 8155, 644, 8341, 1450, 5540, - 518, 3426, 4607, 7955, 4148, 7168, 7044, 6505, 1269, - 2346, 3090, 5416, 1512, 4209, 5899, 7202, 6013, 8728, - 3813, 6142, 82, 1211, 5513, 137, 7986, 7640, 888, - 9985, 5099, 3271, 6013, 3559, 7646, 1436, 3067, 5359, - 155, 1728, 9824, 1779, 491, 233, 3754, 1015, 9765, - 298, 9547, 5517, 4342, 1582, 1949, 2482, 7321, 4988, - 2411, 5519, 4219, 6830, 5793, 9214, 4820, 3125, 8890, - 466, 8599, 508, 8339, 851, 3777, 1734, 9674, 8942, - 1427, 9500, 6588, 1605, 2007, 4551, 8625, 2609, 8688, - 2327, 4631, 6062, 4470, 4072, 4614, 1416, 705, 8243, - 2076, 1478, 9585, 1950, 8913, 3473, 2513, 5641, 9515, - 7648, 6244, 9436, 7192, 4031, 1160, 9508, 6308, 1862, - 2152, 8421, 2090, 4057, 9596, 5353, 4301, 5906, 3596, - 9384, 3132, 7647, 7603, 1913, 5032, 9088, 8117, 9096, - 9354, 6494, 1087, 8680, 5407, 3221, 1552, 3920, 5378, - 593, 2113, 3230, 2673, 9118, 6677, 9772, 8594, 9788, - 9478, 4593, 1005, 3282, 8204, 3936, 3443, 4355, 7243, - 1487, 2444, 8503, 9638, 7593, 7545, 129, 8918, 133, - 421, 8917, 4830, 3557, 8606, 3066, 6016, 3080, 4558, - 3544, 5859, 5233, 2394, 2434, 6421, 5504, 8413, 9470, - 5364, 6135, 5632, 3434, 1674, 4946, 63, 7706, 4456, - 2533, 4194, 7919, 3231, 3967, 8180, 3312, 3496, 3610, - 5542, 2929, 7279, 7446, 892, 2424, 6796, 4557, 3766, - 7918, 6363, 3300, 1258, 5530, 8133, 8383, 8851, 5621, - 3759, 3195, 4313, 9020, 9093, 9589, 8220, 8240, 8511, - 5075, 193, 2620, 5513, 2863, 9274, 8566, 2237, 3704, - 640, 7685, 9689, 8300, 5197, 3833, 6912, 7500, 4822, - 3780, 3582, 1367, 3318, 8493, 5378, 9890, 7159, 2910, - 7302, 2202, 4682, 1371, 3870, 8991, 7769, 4570, 5127, - 1421, 4645, 6754, 8030, 2942, 1562, 421, 1755, 6122, - 877]), - values=tensor([4.5503e-01, 5.5236e-01, 7.4946e-01, 7.5932e-01, - 9.8952e-01, 3.1557e-01, 9.1443e-01, 1.4281e-01, - 4.6438e-01, 9.2065e-01, 9.5281e-01, 9.8289e-01, - 4.9044e-01, 6.6880e-01, 9.5273e-01, 8.2119e-01, - 7.2789e-01, 1.0208e-02, 8.4355e-01, 1.4450e-01, - 6.3411e-01, 1.6232e-01, 7.1788e-01, 9.8785e-01, - 4.0314e-01, 3.0270e-01, 4.8394e-02, 3.4055e-01, - 7.6103e-01, 4.4991e-01, 5.0957e-01, 4.1284e-01, - 4.0828e-01, 2.3054e-01, 5.1885e-01, 4.2268e-01, - 6.4354e-01, 3.8568e-01, 8.9900e-01, 8.8779e-01, - 9.9151e-01, 6.9879e-02, 3.0729e-01, 4.2140e-01, - 7.0114e-01, 6.7449e-01, 8.3811e-01, 8.3634e-02, - 8.5457e-01, 5.9364e-01, 5.6239e-01, 5.3730e-01, - 2.8704e-01, 3.3603e-01, 7.3587e-02, 6.1617e-01, - 7.7568e-01, 1.0129e-01, 4.9434e-01, 4.4292e-01, - 7.5922e-01, 9.8988e-01, 2.8604e-01, 8.3362e-01, - 6.9002e-02, 5.9701e-01, 2.6577e-01, 3.9077e-01, - 8.7670e-02, 7.3042e-01, 1.5991e-01, 4.9806e-01, - 5.8198e-01, 8.9984e-01, 8.0160e-01, 3.3965e-01, - 3.7423e-02, 6.4093e-01, 2.2178e-01, 1.7177e-01, - 3.3041e-01, 4.0645e-01, 4.5012e-01, 5.2065e-01, - 7.1438e-01, 7.7746e-01, 6.9993e-01, 8.4247e-01, - 3.3459e-01, 2.4600e-01, 5.9346e-01, 7.0913e-01, - 2.5832e-01, 7.7024e-01, 2.6469e-01, 2.7663e-01, - 6.2902e-02, 9.4403e-01, 6.6746e-02, 3.0041e-01, - 9.3610e-01, 2.6778e-01, 3.7705e-01, 8.4692e-03, - 8.8453e-01, 2.0632e-01, 4.4988e-01, 1.6894e-01, - 1.2438e-02, 5.8381e-01, 7.1115e-01, 4.4251e-01, - 6.7595e-01, 6.3083e-02, 1.5459e-01, 7.7248e-01, - 1.6884e-01, 4.5854e-01, 9.8097e-03, 2.8779e-01, - 9.4221e-01, 1.7999e-01, 9.6697e-01, 2.1248e-01, - 3.7740e-01, 4.0096e-01, 5.1385e-01, 9.8130e-01, - 6.5753e-01, 6.0122e-02, 1.9068e-01, 3.6002e-01, - 9.0231e-01, 1.9831e-01, 8.9445e-01, 6.5688e-02, - 4.5744e-01, 2.4531e-01, 8.5433e-03, 4.4037e-01, - 8.6668e-01, 6.9154e-01, 9.6479e-01, 9.5443e-01, - 1.2512e-01, 1.3133e-01, 8.0802e-01, 7.8380e-01, - 1.7089e-01, 2.1042e-01, 7.9960e-01, 5.8428e-01, - 4.6539e-01, 8.5401e-01, 5.1112e-01, 4.7769e-01, - 5.4050e-01, 9.3609e-01, 2.8009e-01, 6.6015e-02, - 5.1141e-01, 7.8915e-01, 8.6880e-01, 6.1754e-01, - 9.4843e-01, 1.1650e-01, 1.5677e-01, 1.7600e-01, - 9.5041e-01, 2.2249e-01, 9.7408e-01, 3.4262e-01, - 6.7159e-01, 6.4447e-01, 8.9288e-01, 2.9729e-01, - 7.2744e-01, 6.1960e-01, 9.0982e-01, 9.4766e-01, - 9.2741e-01, 1.8050e-01, 7.5777e-01, 8.4737e-01, - 5.2974e-01, 1.3975e-02, 3.2218e-01, 7.5728e-01, - 3.0380e-01, 9.5599e-01, 7.4789e-01, 3.7504e-01, - 9.6520e-01, 7.6303e-01, 3.9696e-01, 1.0401e-01, - 4.0982e-01, 6.3316e-01, 9.2327e-02, 1.9405e-02, - 4.7989e-01, 9.4566e-01, 6.9287e-01, 1.9453e-01, - 2.1253e-01, 4.4926e-01, 5.8619e-02, 7.0350e-01, - 3.3919e-01, 7.5556e-01, 9.4463e-01, 3.1611e-01, - 4.7702e-01, 3.3843e-01, 7.1410e-01, 6.7562e-01, - 5.5025e-01, 8.2715e-01, 9.1478e-01, 4.7040e-01, - 6.8418e-01, 1.3297e-01, 1.3643e-02, 8.3963e-01, - 9.4038e-01, 3.8012e-01, 6.6480e-01, 4.3203e-01, - 1.1632e-01, 2.1166e-01, 1.0880e-01, 5.0712e-02, - 6.8131e-01, 1.0832e-01, 8.7495e-01, 9.2336e-01, - 4.1149e-01, 8.0381e-01, 6.7670e-01, 6.4943e-01, - 2.4712e-01, 6.0406e-01, 8.7765e-02, 8.2406e-01, - 9.0938e-01, 1.8784e-01, 5.9111e-01, 8.8462e-01, - 3.7481e-01, 9.9173e-01, 1.7508e-01, 9.1699e-01, - 7.0613e-03, 1.6038e-01, 3.2955e-01, 7.4587e-02, - 8.9881e-01, 8.5470e-01, 7.3684e-01, 6.1175e-01, - 3.0823e-02, 7.5970e-01, 1.9162e-01, 7.2898e-01, - 3.5928e-01, 5.5114e-01, 9.5006e-01, 2.0655e-01, - 3.8569e-01, 1.7515e-01, 8.3360e-01, 6.9865e-01, - 9.2797e-01, 6.5322e-02, 9.3365e-01, 8.5821e-01, - 6.8687e-01, 5.4796e-01, 7.4582e-01, 3.8030e-01, - 5.9283e-01, 1.5694e-02, 1.0339e-01, 1.3704e-01, - 1.5233e-01, 8.2752e-01, 6.5473e-01, 1.7110e-01, - 4.4237e-01, 8.6223e-01, 8.4772e-01, 3.8298e-01, - 9.6728e-01, 6.0198e-01, 7.9223e-01, 8.2339e-01, - 7.4017e-01, 3.3245e-01, 4.3901e-01, 8.2795e-01, - 4.8074e-01, 9.4428e-01, 8.4482e-01, 6.9114e-01, - 4.6154e-01, 5.3893e-01, 3.2994e-01, 1.1555e-01, - 5.1914e-01, 4.7741e-01, 4.9269e-02, 4.4184e-01, - 7.6289e-01, 4.9858e-01, 6.7587e-01, 1.5299e-01, - 8.6362e-01, 5.8084e-01, 4.7783e-01, 4.9700e-01, - 4.2540e-01, 1.0162e-01, 7.5272e-01, 1.8767e-01, - 9.9168e-01, 4.0597e-01, 9.5217e-01, 6.5442e-01, - 5.2756e-01, 4.4262e-01, 5.8423e-01, 6.3144e-01, - 2.4769e-01, 8.0175e-01, 6.4149e-01, 5.8141e-01, - 9.1916e-01, 5.2628e-01, 7.8845e-01, 9.0944e-01, - 6.8864e-01, 8.1878e-01, 7.2157e-01, 4.5334e-03, - 3.0805e-02, 9.5355e-01, 7.2954e-01, 8.3027e-01, - 4.9064e-01, 2.4970e-01, 2.3865e-01, 2.4121e-01, - 1.1834e-01, 6.6075e-01, 6.5268e-01, 3.8776e-01, - 2.7396e-01, 2.4684e-01, 2.4196e-01, 7.7938e-01, - 1.4070e-01, 9.9002e-01, 1.5260e-01, 1.4167e-01, - 7.9548e-02, 7.9584e-01, 8.7139e-01, 2.7498e-01, - 5.3075e-01, 4.1591e-01, 3.0671e-01, 8.1041e-01, - 1.0275e-01, 5.7758e-01, 3.1594e-01, 4.8286e-01, - 1.9194e-01, 3.1282e-01, 7.1670e-01, 3.2051e-01, - 5.2841e-02, 1.4067e-01, 9.1088e-01, 6.3668e-01, - 7.9115e-01, 2.4379e-01, 1.5462e-01, 4.2311e-02, - 7.5394e-01, 3.3252e-01, 2.4049e-01, 9.8577e-02, - 4.3073e-01, 6.5555e-01, 5.7805e-01, 4.9998e-01, - 4.3272e-01, 7.7004e-01, 1.1628e-01, 4.2535e-02, - 6.9726e-02, 1.5280e-01, 3.4529e-01, 4.6590e-01, - 5.8611e-02, 1.2878e-01, 7.4846e-01, 9.6319e-01, - 6.8051e-01, 7.1943e-01, 3.5870e-01, 5.7262e-01, - 3.2620e-01, 2.8076e-01, 8.7010e-02, 2.6366e-01, - 6.3187e-01, 1.4391e-01, 2.4884e-01, 3.5726e-01, - 3.9396e-01, 6.6159e-02, 3.5599e-01, 7.5055e-03, - 4.3583e-01, 4.5588e-02, 5.9369e-01, 1.2518e-01, - 6.8146e-01, 2.5843e-01, 5.1872e-01, 2.2847e-02, - 6.7753e-01, 2.2119e-01, 7.0221e-01, 7.7854e-01, - 3.8901e-01, 4.8029e-01, 8.7054e-01, 9.2905e-01, - 9.2144e-01, 6.6123e-01, 5.6375e-01, 9.4639e-01, - 6.2517e-01, 1.1648e-01, 8.8688e-01, 1.4300e-01, - 4.5617e-01, 7.1688e-01, 6.7259e-01, 1.8324e-01, - 3.7338e-01, 3.3730e-01, 4.3106e-01, 6.3842e-01, - 9.3775e-01, 7.9491e-01, 3.1424e-01, 2.5864e-02, - 4.7776e-01, 5.9028e-01, 9.3268e-01, 6.6511e-02, - 9.6948e-01, 8.4139e-01, 8.5390e-02, 5.1637e-01, - 9.5302e-01, 5.1624e-01, 1.6779e-01, 9.5432e-01, - 5.5960e-01, 5.8557e-01, 5.4894e-01, 9.4951e-01, - 7.7562e-03, 5.5602e-01, 1.5450e-03, 7.9207e-01, - 1.2908e-01, 4.7199e-01, 8.2021e-01, 2.9632e-01, - 4.7542e-01, 8.7852e-01, 8.6382e-01, 4.6447e-01, - 2.8956e-01, 3.2381e-01, 5.1671e-01, 4.2590e-01, - 3.5379e-01, 8.3501e-01, 9.9986e-01, 3.4206e-01, - 9.3903e-01, 2.7332e-01, 1.1633e-01, 3.1837e-01, - 4.9135e-01, 4.8891e-02, 5.8062e-01, 6.1981e-01, - 2.2126e-01, 7.9590e-01, 5.5608e-02, 2.9418e-01, - 6.1269e-01, 1.9812e-01, 6.3875e-01, 9.0929e-01, - 4.0284e-01, 3.2515e-01, 1.6248e-01, 3.7453e-01, - 4.3202e-01, 2.2840e-01, 3.2111e-02, 4.3723e-01, - 9.3748e-01, 7.7104e-01, 5.2249e-01, 3.4964e-01, - 4.8542e-01, 4.1877e-03, 7.9705e-01, 9.7778e-02, - 7.5475e-01, 2.3129e-01, 3.2430e-01, 5.1361e-01, - 1.8085e-01, 2.4295e-01, 6.1813e-01, 5.6771e-02, - 8.8849e-01, 1.6365e-01, 3.5850e-01, 1.2620e-01, - 8.2022e-01, 8.4712e-01, 2.9485e-01, 2.3532e-01, - 4.3311e-01, 2.0140e-01, 8.9178e-02, 8.6381e-01, - 3.1238e-01, 7.5215e-01, 5.5682e-01, 7.5471e-01, - 6.9133e-01, 2.2126e-01, 3.9228e-01, 4.3043e-01, - 3.7625e-02, 5.6012e-01, 5.1315e-01, 4.1442e-01, - 5.9861e-01, 9.3922e-01, 9.5372e-01, 2.4100e-01, - 5.6126e-01, 5.9819e-01, 9.4024e-01, 3.3099e-01, - 9.4664e-01, 7.0020e-01, 9.6989e-01, 2.8081e-01, - 2.1666e-01, 6.3859e-01, 7.5331e-01, 7.9288e-01, - 6.5633e-01, 6.7730e-01, 5.3757e-01, 8.5381e-01, - 2.7089e-02, 8.9615e-01, 5.8202e-01, 1.4598e-01, - 9.2770e-01, 1.8855e-01, 6.7041e-01, 8.6578e-01, - 4.2881e-01, 7.9922e-01, 2.4112e-01, 4.0255e-02, - 8.6471e-01, 8.6429e-02, 7.2325e-02, 2.4991e-01, - 5.2937e-01, 2.8655e-01, 1.5239e-02, 1.8351e-01, - 5.4877e-01, 5.2668e-01, 1.9444e-01, 5.9319e-01, - 6.0559e-01, 7.6015e-01, 1.0539e-01, 3.3917e-01, - 2.1990e-01, 2.2539e-01, 4.5181e-01, 3.4579e-01, - 9.4857e-01, 6.1781e-01, 7.9405e-01, 7.7832e-01, - 2.5284e-01, 9.8874e-01, 5.6116e-02, 8.6901e-01, - 4.8905e-01, 8.0060e-01, 6.9558e-01, 7.3432e-01, - 7.0246e-01, 9.7403e-01, 9.1121e-01, 9.0764e-02, - 1.3506e-03, 1.8463e-01, 6.4957e-01, 1.7816e-01, - 5.2229e-02, 8.8521e-01, 5.1552e-01, 7.3362e-01, - 3.7973e-01, 7.0862e-04, 7.8281e-01, 9.0549e-01, - 8.8415e-01, 6.1763e-01, 5.1671e-01, 7.1547e-01, - 5.0531e-01, 5.8740e-01, 9.1102e-01, 2.1919e-01, - 4.6771e-01, 3.6478e-01, 7.6398e-01, 9.1139e-01, - 9.4153e-01, 9.2249e-01, 2.2911e-01, 2.7885e-01, - 4.3889e-01, 1.4626e-01, 6.3367e-01, 3.9236e-01, - 9.8093e-01, 6.6512e-01, 9.1751e-01, 5.0031e-01, - 9.7081e-01, 8.7499e-01, 2.7757e-01, 8.8382e-01, - 2.9761e-01, 2.0972e-01, 3.4652e-01, 2.4990e-01, - 8.4359e-01, 5.6919e-02, 3.4057e-01, 3.8347e-01, - 1.6333e-01, 1.4571e-01, 1.0638e-01, 5.5718e-01, - 9.0663e-01, 4.6886e-01, 6.8587e-01, 7.3991e-01, - 7.9950e-01, 3.8327e-01, 1.5761e-01, 6.6038e-01, - 5.7231e-01, 6.3222e-01, 9.3553e-01, 4.0591e-01, - 4.2525e-01, 5.9767e-01, 2.2326e-01, 6.5176e-01, - 8.9346e-01, 4.4515e-01, 1.0019e-01, 3.7373e-01, - 2.4146e-01, 4.1742e-02, 6.5142e-01, 6.4001e-01, - 5.2382e-01, 7.2723e-01, 8.1684e-01, 5.1471e-01, - 4.1147e-01, 1.1515e-01, 4.6097e-01, 6.7416e-01, - 9.8748e-01, 9.0707e-01, 1.8568e-01, 1.1411e-01, - 5.2570e-01, 2.7178e-01, 7.4673e-01, 2.5094e-01, - 8.4486e-01, 5.5279e-01, 8.5037e-01, 2.4211e-01, - 2.3279e-02, 7.4591e-01, 7.8880e-01, 4.9236e-01, - 1.5544e-01, 5.8410e-01, 7.0916e-01, 2.1707e-01, - 5.3791e-01, 1.3857e-01, 2.8669e-01, 6.3323e-01, - 6.9263e-01, 3.5406e-01, 5.5079e-01, 6.6306e-01, - 2.7546e-01, 9.6607e-01, 5.8201e-01, 5.4173e-01, - 3.2975e-01, 4.5753e-01, 6.9765e-01, 3.3836e-01, - 9.7130e-01, 5.7396e-01, 5.9263e-01, 9.6322e-01, - 2.0223e-01, 1.7166e-01, 9.5551e-01, 2.1967e-01, - 4.6969e-01, 5.8877e-01, 7.4059e-01, 4.7956e-01, - 5.1271e-01, 2.2219e-01, 1.5212e-01, 9.8482e-01, - 4.7918e-01, 9.7424e-01, 6.1901e-01, 5.8213e-01, - 9.8916e-01, 8.7395e-01, 1.1881e-01, 1.4496e-01, - 7.9770e-01, 8.8556e-01, 6.3267e-01, 1.3114e-01, - 2.4220e-01, 9.3859e-01, 4.8235e-01, 2.6558e-01, - 7.2715e-01, 3.5161e-01, 7.2675e-01, 3.3797e-01, - 3.1692e-01, 8.7654e-01, 9.2532e-01, 3.3535e-01, - 3.8120e-01, 7.8279e-02, 6.3613e-01, 1.8321e-02, - 8.3910e-01, 8.3030e-01, 2.7475e-01, 5.6181e-01, - 7.5287e-01, 9.3822e-01, 1.3700e-01, 1.0742e-01, - 2.0008e-01, 5.8382e-01, 9.8849e-01, 1.6606e-01, - 7.1002e-01, 6.1730e-01, 3.4926e-01, 2.1845e-01, - 1.6840e-01, 9.6172e-01, 4.0727e-01, 3.6296e-02, - 9.1858e-01, 2.5370e-02, 7.0144e-01, 7.1196e-01, - 7.8953e-01, 1.2248e-02, 7.7101e-01, 2.2163e-01, - 2.7998e-01, 5.7485e-01, 5.9607e-01, 3.8870e-01, - 6.0031e-01, 8.3227e-01, 6.9437e-02, 5.5109e-01, - 4.4409e-01, 7.9516e-01, 2.8411e-01, 7.2349e-01, - 9.4898e-01, 7.4032e-01, 6.0055e-02, 2.9516e-01, - 1.5857e-01, 6.6885e-01, 2.2852e-01, 6.8544e-01, - 1.1209e-01, 7.6688e-01, 3.3589e-01, 5.7460e-01, - 5.0186e-01, 5.7458e-01, 2.7537e-01, 2.3220e-01, - 9.9519e-01, 3.4429e-01, 1.5667e-01, 1.6299e-01, - 8.1033e-01, 9.3481e-01, 6.2915e-01, 5.8154e-01, - 2.0061e-01, 2.2781e-01, 9.0755e-01, 4.0108e-01, - 3.4687e-01, 7.8445e-01, 4.8250e-02, 7.3426e-01, - 7.6138e-01, 4.4409e-01, 8.3863e-01, 7.6788e-01, - 4.2313e-01, 5.3538e-01, 1.2980e-01, 8.6170e-01, - 3.1408e-01, 5.4734e-01, 1.7495e-01, 1.1843e-01, - 3.7798e-01, 7.5427e-01, 3.1385e-01, 8.9977e-01, - 4.5864e-01, 2.3028e-01, 2.2634e-01, 9.9183e-01, - 2.2514e-01, 4.1228e-01, 5.1544e-01, 4.3390e-01, - 4.0426e-01, 3.9971e-01, 7.6463e-01, 2.0155e-03, - 7.5372e-01, 2.2893e-01, 2.9849e-02, 7.6110e-02, - 3.3857e-01, 2.2407e-02, 7.0282e-01, 1.4713e-01, - 6.2629e-01, 6.8243e-01, 9.9627e-01, 5.1013e-01, - 8.0536e-02, 9.8552e-01, 6.6502e-01, 6.8200e-02, - 8.5999e-02, 5.8414e-01, 8.9296e-01, 3.6543e-01, - 6.5870e-01, 7.9749e-01, 1.3448e-01, 7.5013e-01, - 2.0267e-01, 4.7561e-01, 3.3676e-01, 8.5537e-01, - 2.8565e-01, 3.5750e-01, 6.0110e-01, 4.8385e-01, - 6.7175e-01, 2.8373e-02, 2.9667e-01, 1.5980e-01, - 6.0663e-01, 5.6893e-01, 8.6187e-01, 3.8426e-01, - 9.4313e-01, 1.5700e-01, 6.4374e-01, 6.7882e-01, - 8.1698e-02, 8.7238e-01, 6.1915e-01, 2.4451e-01, - 8.6242e-01, 6.4213e-01, 9.1978e-01, 5.7440e-01, - 6.5066e-02, 1.3295e-01, 6.8781e-01, 6.0675e-01, - 2.5313e-01, 2.2555e-02, 1.8691e-01, 8.1480e-01, - 8.6104e-01, 2.0666e-01, 2.1914e-01, 6.0359e-01, - 5.1611e-01, 5.6270e-01, 2.2858e-01, 7.0563e-02, - 7.3900e-02, 9.0084e-01, 4.8907e-01, 2.6475e-01, - 7.2082e-01, 6.8751e-01, 1.3829e-01, 9.7521e-02, - 9.9126e-01, 6.0189e-01, 7.9516e-01, 2.1625e-01, - 5.7626e-01, 1.6852e-01, 9.5043e-01, 3.1655e-01, - 2.5323e-01, 5.6154e-01, 5.6857e-01, 5.2077e-01, - 9.3389e-01, 8.0069e-01, 1.4968e-01, 8.4212e-01, - 3.1001e-01, 2.2924e-01, 6.2681e-01, 2.6673e-01, - 9.8161e-01, 4.5832e-01, 3.3629e-01, 6.6850e-01, - 7.7463e-01, 6.5327e-02, 8.0080e-01, 4.1266e-01, - 7.2843e-01, 9.0381e-02, 5.7561e-01, 7.3344e-01]), + col_indices=tensor([7056, 919, 9795, 9151, 2044, 7967, 5705, 2623, 1627, + 6717, 8708, 775, 127, 1374, 5044, 4299, 6342, 2263, + 5929, 5936, 1548, 2847, 6130, 6554, 2239, 7163, 1692, + 5793, 7119, 1287, 7508, 865, 1459, 7418, 3194, 4266, + 5780, 5575, 180, 8863, 8594, 4896, 438, 2537, 9988, + 4607, 5188, 6211, 6192, 6056, 7097, 5429, 1839, 2821, + 5784, 5246, 8081, 948, 4779, 1850, 1043, 9101, 2658, + 6891, 8025, 4761, 559, 865, 7629, 6085, 5946, 6354, + 9409, 9347, 7997, 4210, 3579, 999, 6644, 8129, 3149, + 6858, 4041, 4647, 7223, 2236, 7192, 9546, 9793, 3327, + 3171, 2565, 5976, 7978, 3677, 2920, 144, 9344, 6975, + 2500, 5379, 6794, 7366, 5322, 1940, 4044, 8778, 8972, + 3256, 9932, 4555, 9183, 8216, 4060, 4031, 360, 1944, + 7355, 8202, 9688, 593, 6710, 7830, 6044, 3654, 5591, + 7924, 1626, 5386, 2008, 4406, 634, 5326, 8955, 8209, + 5178, 2353, 3613, 9298, 6923, 1330, 2295, 8418, 4016, + 6041, 3493, 2023, 3116, 9349, 4002, 5551, 6028, 3519, + 6478, 8114, 4566, 1480, 1179, 4402, 6390, 2641, 4788, + 5490, 7082, 2181, 7781, 5904, 491, 2224, 8974, 482, + 7270, 9130, 6779, 3453, 7281, 6000, 9094, 1080, 96, + 3235, 2362, 7656, 6780, 2699, 3796, 1691, 106, 5591, + 2247, 1360, 4408, 8461, 5554, 962, 5012, 1178, 665, + 8966, 149, 3510, 466, 7726, 3308, 1620, 3276, 3031, + 3091, 7257, 8304, 7811, 2312, 9044, 4159, 2417, 6077, + 5309, 1633, 575, 3648, 8105, 8542, 7301, 3395, 9379, + 2616, 5588, 3562, 5579, 6202, 1237, 1076, 4693, 5863, + 4521, 8863, 2402, 4796, 8453, 8138, 8938, 9681, 8822, + 3016, 4280, 6174, 1987, 2265, 9622, 8422, 8024, 2552, + 7142, 6196, 3924, 6374, 3611, 5004, 6476, 3085, 3871, + 7485, 3089, 5859, 3123, 8069, 3938, 8070, 3319, 4967, + 6032, 3132, 8882, 8168, 9948, 9153, 5960, 4617, 2269, + 9592, 3316, 8405, 7722, 5455, 3568, 9972, 3178, 7732, + 3073, 9461, 3455, 1563, 3313, 5518, 1888, 5013, 9499, + 7686, 8128, 3614, 58, 6450, 6598, 9327, 2675, 6405, + 1465, 5960, 4, 919, 8839, 74, 8303, 7367, 985, + 1714, 972, 3691, 196, 2221, 5916, 9621, 719, 7868, + 6821, 1399, 599, 7980, 2535, 2710, 8687, 3504, 500, + 8756, 2206, 3927, 9619, 5296, 3265, 3270, 8220, 9811, + 8515, 3994, 4320, 3608, 6591, 3810, 1151, 5003, 1681, + 9535, 5842, 20, 673, 3013, 3128, 9579, 416, 4669, + 1672, 5675, 7857, 9285, 3183, 9934, 3384, 3124, 6569, + 9838, 4601, 323, 9366, 3658, 3481, 8266, 106, 4724, + 9925, 449, 231, 1193, 8640, 6487, 4395, 7873, 2975, + 371, 4139, 2769, 51, 7434, 3992, 2930, 3651, 8635, + 2260, 5356, 167, 5798, 384, 6039, 7889, 8913, 9072, + 4191, 8932, 4779, 1901, 9554, 5285, 2097, 2772, 9175, + 6725, 2156, 148, 5, 476, 3156, 6607, 7786, 8502, + 1087, 2367, 867, 1415, 3356, 4022, 2905, 4640, 6433, + 1986, 3092, 5931, 6521, 8577, 8053, 2869, 6427, 3755, + 6368, 9012, 2562, 8601, 9397, 1396, 3615, 1144, 9404, + 4843, 3491, 1114, 7468, 9175, 5343, 2695, 6775, 3132, + 763, 2580, 5472, 6555, 5197, 7683, 7083, 9461, 6717, + 3416, 4535, 7651, 8103, 5254, 5119, 4843, 7481, 9740, + 875, 8435, 1895, 4149, 1288, 2687, 4563, 8844, 5497, + 6857, 8265, 5805, 7507, 6505, 6546, 761, 9841, 4528, + 9297, 3090, 2401, 1436, 8521, 3201, 3280, 647, 6053, + 9778, 5421, 2338, 7895, 9657, 4768, 4164, 346, 6739, + 5802, 3214, 837, 1345, 161, 7417, 6331, 4380, 8050, + 5202, 8517, 4660, 8461, 3874, 4103, 9836, 4667, 1660, + 1906, 1376, 4340, 1934, 9527, 6941, 331, 4783, 1013, + 8971, 6772, 975, 1578, 3459, 408, 1053, 7816, 3059, + 6639, 3329, 9285, 4169, 206, 4382, 9903, 2838, 624, + 6682, 7989, 5248, 6652, 4925, 1208, 2515, 3523, 7868, + 5124, 932, 3036, 2610, 9397, 6019, 4365, 3159, 7967, + 6712, 4292, 9634, 2506, 2306, 6880, 1422, 5592, 3014, + 2127, 2108, 7692, 2917, 5954, 9346, 9190, 5918, 9056, + 1095, 6544, 2601, 8317, 1507, 8478, 427, 8983, 7403, + 1403, 9463, 9521, 1040, 2364, 4790, 3646, 1839, 5831, + 2407, 5569, 8636, 3161, 4775, 5244, 6918, 2550, 2648, + 9459, 505, 4866, 459, 3616, 1899, 4865, 6781, 645, + 8187, 4087, 8269, 527, 142, 3293, 2115, 9227, 4492, + 4310, 5673, 305, 9750, 3677, 3525, 7465, 7997, 3251, + 3987, 3609, 7768, 9505, 2837, 6841, 9179, 2234, 5163, + 4220, 1985, 3135, 228, 4791, 5638, 6344, 6055, 1786, + 6558, 448, 7423, 7966, 6313, 1661, 2565, 5057, 6400, + 9258, 4995, 104, 6131, 2357, 478, 9553, 4258, 9824, + 7655, 1162, 7233, 6219, 4197, 5671, 698, 3933, 5858, + 5825, 1485, 2940, 1221, 3277, 8498, 517, 4954, 4571, + 3551, 3811, 2162, 1085, 4990, 2770, 5124, 3331, 2642, + 4071, 9626, 4506, 3366, 3577, 4151, 3800, 2366, 4168, + 3825, 9342, 9534, 8100, 1320, 5092, 3157, 2094, 6070, + 8057, 5993, 2362, 411, 8733, 397, 7089, 7206, 2945, + 3064, 5117, 3443, 470, 608, 3384, 8567, 5110, 4565, + 6431, 5474, 8783, 7927, 5169, 4863, 7172, 8183, 1224, + 9394, 692, 7991, 536, 2135, 5294, 8037, 8526, 1190, + 63, 849, 9116, 6147, 7935, 7112, 6626, 226, 5579, + 9554, 4960, 2909, 3565, 5132, 4287, 1865, 5728, 7111, + 4986, 5695, 7077, 5616, 8771, 9145, 8441, 9676, 1058, + 6430, 6079, 4862, 6083, 1292, 6175, 658, 1311, 8075, + 3408, 4717, 4970, 3386, 6451, 4509, 1589, 2526, 3046, + 4015, 115, 606, 6816, 9023, 74, 3707, 8570, 3733, + 7439, 8010, 2330, 2707, 7997, 6578, 6530, 9528, 1545, + 1828, 6104, 2474, 5308, 5222, 6365, 8862, 4445, 3256, + 8596, 7733, 4546, 4298, 3962, 1735, 405, 9446, 2765, + 5223, 1245, 1928, 1613, 1076, 3386, 5499, 7117, 7236, + 1089, 5676, 7344, 5770, 9734, 6262, 6225, 7764, 2961, + 5070, 7253, 556, 1687, 3579, 9804, 7261, 2963, 8905, + 7732, 2949, 3331, 8106, 4114, 6793, 2309, 6606, 2474, + 1922, 3120, 8805, 7588, 3625, 3473, 6466, 7065, 4574, + 1985, 4989, 8004, 6734, 5981, 3660, 7778, 125, 7384, + 6141, 1158, 4521, 6919, 144, 914, 7797, 4111, 6086, + 6673, 8442, 7879, 7713, 9248, 4408, 1097, 7867, 1626, + 2747, 651, 2532, 5546, 1372, 589, 1653, 7492, 6806, + 1244, 9203, 2206, 5006, 203, 739, 5153, 6454, 4270, + 4396, 3640, 2150, 9255, 9834, 7708, 1267, 1636, 5030, + 795, 8742, 7543, 566, 6397, 3605, 8166, 8086, 4077, + 8364, 3818, 1834, 6488, 7143, 7438, 5866, 8540, 856, + 7785, 7368, 3461, 1705, 3376, 6688, 1751, 642, 9424, + 2608, 4435, 1836, 3463, 6828, 5152, 1957, 8354, 4656, + 6595, 4201, 9042, 3819, 3824, 5439, 7490, 1010, 4810, + 9972, 7566, 955, 751, 8629, 3365, 7594, 1053, 2299, + 7870]), + values=tensor([5.2060e-01, 5.8752e-01, 7.2713e-01, 1.0305e-01, + 9.3686e-01, 4.5302e-01, 7.9066e-01, 3.3889e-01, + 9.1304e-01, 7.1777e-01, 8.2105e-01, 1.2503e-01, + 3.7067e-02, 6.7818e-01, 7.9631e-01, 1.5332e-01, + 4.2909e-02, 3.6354e-01, 7.8473e-02, 3.9129e-01, + 5.0385e-01, 2.7311e-01, 2.8426e-01, 9.8490e-01, + 4.6471e-01, 1.8436e-01, 2.9210e-01, 7.6968e-01, + 9.8680e-01, 6.5393e-01, 6.5447e-01, 7.6587e-01, + 5.1696e-01, 3.0301e-01, 1.2974e-01, 4.9850e-01, + 9.5414e-01, 3.3815e-01, 1.2515e-01, 6.9570e-01, + 2.4680e-01, 5.9705e-01, 6.2698e-01, 7.7985e-01, + 6.0484e-01, 4.0524e-01, 6.3660e-01, 3.4506e-01, + 8.0482e-01, 7.8297e-02, 6.1842e-01, 5.7868e-01, + 6.3483e-01, 6.6108e-01, 9.3584e-04, 7.4347e-01, + 6.9134e-01, 8.5039e-01, 7.7178e-01, 3.8597e-01, + 1.9561e-01, 2.9964e-01, 5.3888e-01, 7.9254e-01, + 8.6409e-01, 7.5097e-01, 3.3689e-01, 7.8610e-02, + 4.5323e-01, 8.5126e-01, 6.9200e-01, 1.9498e-02, + 8.3003e-01, 8.6298e-01, 4.3806e-01, 7.1679e-01, + 8.1194e-01, 9.9720e-01, 8.2151e-01, 9.6087e-01, + 6.3853e-01, 3.4560e-01, 5.5284e-01, 4.4640e-01, + 3.7580e-01, 3.2438e-01, 8.9706e-01, 2.8604e-01, + 2.9762e-01, 2.8814e-01, 6.0684e-01, 4.1394e-01, + 6.2215e-01, 9.4551e-01, 4.0044e-01, 8.6963e-01, + 5.5266e-01, 5.9098e-01, 5.9783e-01, 1.7437e-01, + 3.4636e-01, 2.4680e-01, 2.3300e-01, 1.5789e-01, + 7.2572e-01, 4.7658e-01, 7.9251e-01, 5.5119e-01, + 3.4901e-01, 1.8926e-01, 7.2681e-02, 4.1166e-01, + 2.3574e-01, 9.2902e-01, 4.2647e-01, 4.2048e-01, + 7.6334e-01, 2.7562e-01, 1.2457e-01, 8.0447e-01, + 2.7298e-01, 2.6366e-01, 9.0147e-01, 9.8730e-01, + 4.8371e-01, 4.3424e-01, 8.9082e-01, 9.3669e-02, + 2.4573e-01, 1.7890e-01, 7.0955e-01, 7.5968e-01, + 4.4304e-01, 7.1779e-01, 7.2909e-01, 7.9905e-01, + 2.0894e-01, 1.6586e-01, 7.5174e-02, 4.0334e-01, + 1.0341e-01, 7.8773e-01, 8.5689e-01, 8.9735e-01, + 1.9547e-01, 8.9818e-01, 8.1080e-01, 5.8252e-01, + 6.2795e-01, 2.4733e-02, 3.7285e-01, 7.4750e-01, + 4.5329e-01, 5.4283e-01, 1.3581e-01, 8.2036e-01, + 3.3337e-02, 6.6711e-01, 2.7268e-01, 5.5912e-01, + 5.5669e-01, 3.5501e-01, 6.2792e-01, 9.7736e-01, + 3.5615e-01, 8.9429e-03, 5.2671e-01, 1.1444e-01, + 7.5357e-02, 8.8607e-01, 8.5215e-01, 8.6159e-01, + 3.1847e-01, 1.4552e-01, 6.4152e-01, 7.1863e-01, + 3.3005e-01, 3.2445e-02, 1.0168e-01, 8.4233e-01, + 8.8000e-01, 2.7047e-01, 7.1423e-01, 7.8187e-01, + 8.9146e-02, 4.9672e-01, 5.9046e-01, 1.9515e-01, + 9.8528e-01, 5.0321e-01, 5.8000e-01, 7.4168e-01, + 3.5260e-02, 1.2683e-01, 4.5738e-01, 6.8617e-01, + 4.4433e-01, 4.8514e-01, 5.5207e-01, 3.5556e-01, + 8.9692e-01, 3.6084e-01, 3.8720e-01, 5.8634e-01, + 7.0358e-01, 3.0095e-01, 1.3903e-01, 1.1694e-01, + 9.7711e-01, 3.7841e-01, 2.3352e-01, 4.5329e-01, + 8.2085e-01, 3.0580e-01, 4.8927e-01, 3.0347e-01, + 5.1485e-02, 5.2444e-02, 3.0794e-01, 5.9206e-01, + 8.3589e-01, 1.7323e-01, 3.1820e-01, 7.0798e-01, + 1.5440e-01, 1.9361e-01, 1.0835e-01, 8.1042e-01, + 7.1437e-02, 4.4926e-01, 8.1084e-01, 6.5975e-01, + 2.3167e-01, 9.5539e-01, 9.0418e-01, 1.8474e-01, + 8.4342e-01, 2.5289e-01, 6.3702e-01, 3.5397e-01, + 1.1201e-01, 4.5470e-01, 1.3388e-01, 2.6199e-01, + 8.1948e-01, 6.9816e-01, 9.3318e-01, 6.6708e-02, + 2.6242e-01, 4.7858e-02, 6.7194e-01, 4.8540e-01, + 5.6054e-01, 6.7286e-01, 8.0291e-01, 7.4681e-02, + 5.2165e-01, 3.6455e-01, 2.6177e-01, 2.8656e-01, + 4.8881e-01, 1.5495e-01, 8.1151e-01, 6.4157e-01, + 4.6118e-01, 9.8777e-02, 3.7165e-01, 7.8764e-01, + 6.8295e-01, 2.4347e-01, 1.6075e-01, 8.7591e-01, + 3.4120e-01, 5.4312e-01, 2.2322e-01, 7.3732e-01, + 9.2524e-02, 8.0737e-01, 4.2221e-01, 6.0497e-02, + 2.2442e-01, 1.6638e-01, 9.3025e-01, 3.6644e-01, + 9.7051e-01, 2.6113e-01, 3.3298e-01, 5.5566e-01, + 8.4633e-01, 2.1670e-02, 3.4866e-01, 4.1243e-01, + 4.3974e-02, 5.1011e-02, 9.7001e-01, 9.2475e-01, + 6.8789e-01, 4.8683e-01, 8.8514e-01, 2.8747e-01, + 8.3000e-01, 5.9514e-01, 1.9359e-01, 9.9127e-01, + 4.6767e-01, 9.0117e-02, 3.1439e-01, 7.7717e-01, + 3.2578e-01, 7.6461e-01, 4.9560e-01, 7.0813e-01, + 8.9512e-01, 3.4294e-01, 7.9012e-01, 2.1834e-01, + 9.9954e-01, 8.4021e-01, 5.2415e-01, 5.5913e-01, + 6.0001e-01, 4.0800e-01, 9.2418e-01, 9.8484e-02, + 1.2325e-01, 9.2593e-01, 8.8126e-01, 2.1903e-01, + 8.6474e-02, 5.6159e-01, 1.6813e-01, 3.7588e-01, + 1.1257e-01, 7.7701e-01, 6.1191e-01, 9.3067e-01, + 1.7717e-01, 6.9689e-01, 4.1894e-01, 3.1744e-01, + 6.8942e-01, 5.4469e-01, 6.9142e-01, 5.9683e-01, + 4.4063e-01, 7.9936e-01, 7.7242e-01, 2.8988e-01, + 9.4011e-01, 6.2090e-01, 4.0644e-01, 4.6521e-01, + 6.9337e-01, 9.9796e-02, 8.2484e-01, 6.9345e-01, + 6.1383e-01, 7.4834e-01, 4.0722e-01, 9.9024e-01, + 8.5334e-01, 8.8771e-01, 5.0253e-01, 7.2537e-01, + 6.4116e-01, 4.1841e-01, 9.1987e-01, 6.3601e-02, + 6.2845e-01, 9.8979e-01, 3.2048e-02, 4.9481e-01, + 7.6203e-01, 8.4482e-01, 1.8315e-01, 9.7150e-01, + 6.4234e-01, 3.9806e-01, 3.1906e-01, 8.5592e-01, + 2.7348e-01, 1.0324e-01, 3.3822e-01, 1.0261e-01, + 7.8657e-01, 4.1245e-02, 6.2280e-01, 1.5594e-01, + 1.4057e-01, 9.3217e-01, 5.3651e-01, 3.1414e-01, + 7.4943e-01, 1.8062e-01, 6.8355e-01, 1.7011e-01, + 7.9311e-01, 5.6505e-01, 1.7516e-01, 3.2628e-01, + 7.4869e-01, 3.6292e-02, 7.7730e-01, 5.6876e-01, + 7.4365e-01, 3.1819e-01, 2.9167e-01, 8.0054e-01, + 5.2095e-02, 8.4271e-01, 9.4201e-01, 2.6332e-01, + 4.1440e-01, 2.8004e-01, 6.3749e-01, 8.2784e-01, + 3.8989e-01, 4.8344e-01, 6.1792e-01, 7.7696e-01, + 2.9025e-01, 3.0195e-01, 1.6203e-02, 4.9882e-02, + 9.5260e-01, 3.8574e-02, 8.6137e-01, 1.0206e-01, + 1.8185e-01, 3.7184e-01, 1.8011e-01, 5.8471e-01, + 9.0896e-01, 7.8819e-01, 1.2554e-01, 9.1937e-01, + 4.7326e-01, 8.1565e-01, 4.0860e-01, 9.5624e-01, + 6.4546e-01, 2.1755e-01, 1.8583e-01, 7.2313e-01, + 5.0034e-01, 9.6810e-01, 1.1208e-01, 3.3256e-01, + 1.0626e-01, 8.2704e-01, 9.0571e-01, 9.1930e-01, + 5.0810e-01, 2.9139e-02, 4.8153e-01, 8.3425e-01, + 7.7208e-01, 2.0930e-01, 7.7873e-01, 1.9398e-01, + 4.6583e-01, 7.2143e-01, 7.7072e-01, 8.8225e-01, + 5.6393e-01, 4.5270e-01, 4.5709e-01, 3.4438e-02, + 8.3723e-01, 8.7398e-01, 7.7789e-01, 3.1834e-01, + 6.7630e-01, 7.2696e-01, 6.2046e-01, 8.7032e-01, + 7.5275e-01, 1.2652e-01, 1.6913e-01, 8.9638e-01, + 8.4284e-01, 6.7028e-01, 1.6150e-01, 4.8858e-01, + 5.2657e-01, 2.6709e-01, 5.3737e-01, 7.9635e-02, + 4.6502e-01, 6.6489e-01, 3.3302e-02, 6.7807e-03, + 3.2601e-01, 6.2838e-01, 8.8805e-01, 6.4081e-01, + 8.3664e-01, 2.1253e-01, 9.9560e-01, 1.0008e-01, + 8.1128e-01, 7.1759e-02, 1.9140e-01, 3.9544e-01, + 2.0466e-01, 1.0560e-01, 5.0978e-01, 4.3684e-01, + 1.8097e-02, 5.2480e-01, 1.3644e-01, 6.5843e-01, + 4.7603e-01, 3.9206e-01, 2.5961e-01, 4.4991e-01, + 2.7612e-01, 5.3666e-01, 1.0518e-02, 8.5692e-01, + 2.4882e-01, 9.3487e-01, 5.1282e-01, 9.4781e-01, + 6.6273e-01, 7.0853e-01, 2.2742e-01, 9.1791e-01, + 6.9906e-01, 3.2979e-01, 5.6041e-01, 8.6281e-01, + 9.5303e-01, 7.5565e-01, 4.3419e-01, 8.3829e-01, + 2.4070e-01, 6.8860e-01, 5.5668e-01, 1.8449e-01, + 9.9667e-01, 7.7594e-01, 5.3148e-01, 4.1552e-01, + 5.1947e-01, 1.3992e-01, 4.4856e-01, 7.4868e-01, + 6.3705e-01, 3.5783e-01, 1.2134e-01, 6.9568e-01, + 1.2509e-01, 1.7793e-01, 3.2242e-02, 4.1301e-01, + 5.9646e-01, 1.6615e-01, 3.1664e-01, 8.1783e-02, + 2.9824e-02, 2.1237e-01, 5.2911e-01, 5.1732e-01, + 1.9451e-01, 3.5512e-01, 2.9795e-01, 5.1951e-01, + 1.9558e-01, 3.7898e-01, 5.9402e-01, 4.2044e-01, + 8.7643e-01, 6.5956e-01, 7.4035e-01, 4.6048e-01, + 1.2749e-02, 9.2360e-01, 3.6981e-01, 9.4150e-01, + 9.7784e-01, 5.7582e-01, 3.1385e-01, 3.8101e-01, + 9.2912e-01, 4.3789e-01, 7.2061e-01, 5.2086e-01, + 7.3528e-01, 8.5192e-01, 3.5126e-01, 7.6776e-01, + 2.8613e-01, 6.3526e-02, 2.8596e-01, 6.3501e-01, + 4.9176e-02, 8.7348e-01, 5.2000e-01, 5.8496e-01, + 4.8855e-01, 7.2483e-01, 3.5412e-01, 6.6161e-01, + 5.5561e-01, 5.4683e-02, 9.0732e-01, 7.5460e-02, + 3.1367e-01, 7.3793e-01, 3.5729e-01, 2.8849e-01, + 8.9432e-01, 9.7713e-02, 1.1743e-01, 4.3089e-01, + 8.1021e-01, 8.8355e-01, 1.9843e-01, 6.1349e-02, + 8.3824e-01, 6.2155e-02, 2.0617e-01, 4.8812e-01, + 9.8256e-01, 8.8490e-02, 5.0411e-01, 9.5740e-01, + 9.7775e-02, 5.1726e-01, 7.1232e-01, 4.7675e-02, + 2.2480e-02, 3.7611e-01, 5.9660e-01, 6.2244e-01, + 4.4880e-01, 8.5892e-01, 1.9312e-01, 3.0721e-01, + 6.9925e-01, 6.7802e-01, 9.2157e-01, 8.5335e-01, + 4.1058e-01, 1.3666e-01, 3.9121e-01, 4.7478e-02, + 7.7467e-01, 5.8273e-03, 7.4476e-01, 1.9062e-01, + 7.5316e-02, 8.1639e-01, 5.2018e-01, 5.6018e-01, + 9.6970e-01, 3.0172e-02, 6.0521e-01, 4.0407e-02, + 5.7990e-01, 9.5453e-01, 8.8848e-01, 3.7532e-01, + 9.6133e-01, 7.7502e-01, 4.0087e-01, 6.8611e-01, + 3.2210e-02, 2.8094e-01, 7.3871e-01, 9.1462e-01, + 1.9356e-01, 2.6926e-01, 1.8582e-01, 5.4929e-02, + 9.1024e-01, 2.6956e-01, 5.5420e-01, 3.6135e-01, + 3.5875e-01, 6.0498e-01, 3.7275e-01, 9.8664e-01, + 8.8819e-01, 2.7405e-01, 1.1934e-01, 7.3250e-01, + 8.9712e-01, 8.8231e-01, 3.3160e-01, 9.5892e-01, + 8.4187e-01, 2.7689e-01, 1.6102e-02, 9.3174e-01, + 7.5817e-01, 9.0117e-01, 2.4771e-01, 2.6531e-01, + 5.1327e-01, 3.8035e-01, 4.0058e-01, 4.0599e-04, + 9.2180e-01, 9.3958e-01, 5.5013e-01, 6.4800e-01, + 1.1731e-01, 5.7797e-01, 2.5301e-01, 6.8211e-01, + 9.6611e-01, 2.1744e-01, 2.7821e-01, 3.8118e-01, + 6.2557e-01, 8.9564e-01, 9.8076e-01, 4.2087e-01, + 1.3319e-02, 1.8018e-01, 6.2599e-01, 1.4615e-01, + 7.4026e-01, 9.7573e-01, 1.0896e-01, 4.9573e-01, + 7.0308e-01, 6.3058e-01, 1.5012e-01, 5.9222e-01, + 6.5591e-01, 4.5450e-01, 1.3452e-01, 1.1006e-01, + 6.2072e-01, 1.7700e-01, 3.2033e-01, 7.1407e-01, + 9.3104e-01, 7.3480e-01, 7.5735e-01, 5.3846e-01, + 1.2357e-01, 6.0479e-01, 5.0554e-01, 7.2641e-01, + 7.1854e-01, 3.9348e-02, 4.3940e-02, 5.5093e-01, + 1.1556e-02, 4.5159e-01, 8.0712e-01, 7.2793e-01, + 9.4343e-01, 4.6801e-01, 6.8975e-01, 6.8837e-01, + 5.9669e-02, 7.6994e-01, 4.3286e-01, 7.1974e-01, + 4.6165e-01, 5.8672e-01, 5.0695e-01, 4.7555e-01, + 6.9675e-01, 3.7575e-01, 7.2625e-01, 6.6386e-01, + 3.0499e-01, 7.8592e-01, 5.7412e-01, 1.8771e-01, + 2.0747e-02, 9.7205e-01, 1.9993e-01, 4.5722e-01, + 9.2382e-02, 3.5018e-01, 5.2392e-01, 1.1923e-01, + 8.3795e-01, 3.4123e-02, 2.8436e-01, 1.9208e-01, + 6.4406e-02, 7.8742e-01, 2.0108e-01, 9.3581e-01, + 1.7748e-01, 8.9101e-01, 1.2175e-01, 8.8756e-01, + 7.8316e-01, 4.0800e-01, 5.6851e-01, 9.2774e-01, + 3.7604e-01, 5.3144e-01, 8.0134e-02, 6.2059e-02, + 8.7489e-03, 2.6598e-01, 6.7003e-01, 8.4602e-01, + 6.0322e-01, 6.8827e-01, 3.2298e-01, 7.1255e-01, + 9.5221e-01, 9.1295e-01, 2.3726e-01, 8.5325e-02, + 3.8353e-01, 7.7125e-01, 7.3426e-01, 5.3143e-01, + 8.0892e-01, 8.7041e-01, 5.8474e-01, 8.7437e-01, + 3.1761e-01, 8.3607e-01, 2.6952e-01, 6.1480e-01, + 7.6446e-01, 6.1953e-01, 4.9927e-01, 7.5728e-01, + 9.1551e-01, 2.2186e-01, 7.1374e-01, 2.5784e-01, + 8.5969e-01, 9.0881e-01, 1.8012e-01, 9.8941e-02, + 6.1818e-01, 2.8776e-01, 9.6763e-01, 2.0033e-01, + 8.2090e-01, 1.9388e-01, 1.7567e-01, 4.4690e-01, + 2.0662e-01, 1.0745e-01, 9.3569e-01, 8.0811e-01, + 9.1073e-02, 3.4629e-01, 7.8523e-01, 1.1764e-01, + 2.8677e-01, 7.0343e-01, 8.7211e-01, 3.6672e-01, + 9.7703e-01, 5.2406e-01, 4.9980e-01, 5.4475e-01, + 8.9090e-01, 4.3209e-02, 4.4884e-01, 9.0686e-01, + 1.1154e-01, 1.0570e-01, 4.1471e-01, 9.7202e-01, + 3.8561e-01, 2.0579e-01, 4.4916e-01, 1.9721e-01, + 5.1959e-01, 7.6768e-01, 9.2048e-01, 2.2598e-01, + 9.6825e-01, 5.9095e-01, 2.7735e-02, 9.8780e-01, + 5.5600e-01, 6.4011e-01, 4.9115e-01, 1.5679e-01, + 9.0323e-01, 1.9041e-01, 8.5013e-01, 1.2025e-01, + 1.1804e-01, 6.8892e-01, 2.0461e-01, 1.9657e-01, + 7.0597e-01, 5.5451e-01, 1.8591e-01, 6.9899e-01, + 5.3076e-01, 7.3978e-01, 9.5676e-01, 4.0627e-01, + 8.3752e-01, 3.1046e-01, 5.6607e-01, 4.4132e-01, + 5.2029e-01, 8.5109e-01, 6.6508e-01, 2.6182e-01, + 6.7981e-01, 8.4438e-01, 4.1711e-01, 3.6277e-01, + 9.2177e-01, 3.6580e-02, 5.1418e-02, 4.5413e-01, + 5.0643e-01, 1.2991e-01, 7.2776e-01, 6.5957e-01, + 6.6907e-01, 4.4236e-01, 5.5565e-01, 1.1326e-01, + 6.2352e-01, 4.1680e-02, 8.5603e-01, 7.0539e-01, + 3.5385e-01, 5.1926e-01, 6.4794e-02, 6.4126e-01, + 3.0824e-01, 8.7537e-01, 3.8092e-03, 1.4017e-01, + 7.9757e-01, 2.7307e-01, 7.7809e-01, 5.3453e-01, + 3.5741e-01, 8.3152e-01, 9.6330e-01, 4.5118e-01, + 6.9011e-01, 8.9458e-01, 5.3158e-01, 8.2853e-01, + 1.8923e-01, 6.5167e-01, 3.8821e-01, 5.7283e-01, + 1.9550e-01, 4.8766e-01, 5.8973e-01, 6.6654e-01, + 3.4043e-01, 3.0734e-01, 7.2507e-01, 4.5141e-01, + 8.7825e-01, 1.8915e-01, 9.1650e-01, 2.5972e-01, + 1.2721e-02, 4.2352e-01, 6.5855e-01, 4.8197e-01, + 6.3384e-01, 1.1340e-01, 9.1519e-02, 7.2253e-01, + 7.7717e-01, 7.7128e-01, 7.9797e-01, 9.5449e-01, + 1.9479e-01, 9.4967e-01, 6.5866e-01, 2.6908e-01, + 6.6522e-01, 9.9513e-01, 6.4628e-01, 3.2376e-01, + 6.8241e-01, 2.0082e-01, 6.7192e-01, 6.3818e-01, + 8.3533e-01, 1.4580e-01, 6.7572e-01, 3.1304e-02, + 5.6257e-01, 6.4916e-01, 4.5939e-01, 1.6735e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.0651, 0.6329, 0.6141, ..., 0.3243, 0.1158, 0.5219]) +tensor([0.0747, 0.3561, 0.1255, ..., 0.0664, 0.4841, 0.3262]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -1509,13 +919,389 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 10.808244943618774 seconds +Time: 10.197850227355957 seconds -[20.16, 20.24, 20.2, 20.4, 20.4, 20.4, 20.44, 20.32, 20.16, 20.08] -[20.0, 19.96, 20.64, 22.76, 24.36, 26.04, 26.44, 25.96, 25.96, 24.84, 23.4, 23.52, 23.44, 23.4] -14.250372886657715 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 141369, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.808244943618774, 'TIME_S_1KI': 0.0764541373541496, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 314.87872554779057, 'W': 22.096174468711904} -[20.16, 20.24, 20.2, 20.4, 20.4, 20.4, 20.44, 20.32, 20.16, 20.08, 20.64, 20.44, 20.4, 20.6, 20.64, 20.48, 20.36, 20.32, 20.32, 20.52] -366.82000000000005 -18.341 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 141369, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.808244943618774, 'TIME_S_1KI': 0.0764541373541496, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 314.87872554779057, 'W': 22.096174468711904, 'J_1KI': 2.2273534194044706, 'W_1KI': 0.15630141310125914, 'W_D': 3.7551744687119033, 'J_D': 53.51263643360139, 'W_D_1KI': 0.02656292729461129, 'J_D_1KI': 0.00018789782268114857} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), + col_indices=tensor([7056, 919, 9795, 9151, 2044, 7967, 5705, 2623, 1627, + 6717, 8708, 775, 127, 1374, 5044, 4299, 6342, 2263, + 5929, 5936, 1548, 2847, 6130, 6554, 2239, 7163, 1692, + 5793, 7119, 1287, 7508, 865, 1459, 7418, 3194, 4266, + 5780, 5575, 180, 8863, 8594, 4896, 438, 2537, 9988, + 4607, 5188, 6211, 6192, 6056, 7097, 5429, 1839, 2821, + 5784, 5246, 8081, 948, 4779, 1850, 1043, 9101, 2658, + 6891, 8025, 4761, 559, 865, 7629, 6085, 5946, 6354, + 9409, 9347, 7997, 4210, 3579, 999, 6644, 8129, 3149, + 6858, 4041, 4647, 7223, 2236, 7192, 9546, 9793, 3327, + 3171, 2565, 5976, 7978, 3677, 2920, 144, 9344, 6975, + 2500, 5379, 6794, 7366, 5322, 1940, 4044, 8778, 8972, + 3256, 9932, 4555, 9183, 8216, 4060, 4031, 360, 1944, + 7355, 8202, 9688, 593, 6710, 7830, 6044, 3654, 5591, + 7924, 1626, 5386, 2008, 4406, 634, 5326, 8955, 8209, + 5178, 2353, 3613, 9298, 6923, 1330, 2295, 8418, 4016, + 6041, 3493, 2023, 3116, 9349, 4002, 5551, 6028, 3519, + 6478, 8114, 4566, 1480, 1179, 4402, 6390, 2641, 4788, + 5490, 7082, 2181, 7781, 5904, 491, 2224, 8974, 482, + 7270, 9130, 6779, 3453, 7281, 6000, 9094, 1080, 96, + 3235, 2362, 7656, 6780, 2699, 3796, 1691, 106, 5591, + 2247, 1360, 4408, 8461, 5554, 962, 5012, 1178, 665, + 8966, 149, 3510, 466, 7726, 3308, 1620, 3276, 3031, + 3091, 7257, 8304, 7811, 2312, 9044, 4159, 2417, 6077, + 5309, 1633, 575, 3648, 8105, 8542, 7301, 3395, 9379, + 2616, 5588, 3562, 5579, 6202, 1237, 1076, 4693, 5863, + 4521, 8863, 2402, 4796, 8453, 8138, 8938, 9681, 8822, + 3016, 4280, 6174, 1987, 2265, 9622, 8422, 8024, 2552, + 7142, 6196, 3924, 6374, 3611, 5004, 6476, 3085, 3871, + 7485, 3089, 5859, 3123, 8069, 3938, 8070, 3319, 4967, + 6032, 3132, 8882, 8168, 9948, 9153, 5960, 4617, 2269, + 9592, 3316, 8405, 7722, 5455, 3568, 9972, 3178, 7732, + 3073, 9461, 3455, 1563, 3313, 5518, 1888, 5013, 9499, + 7686, 8128, 3614, 58, 6450, 6598, 9327, 2675, 6405, + 1465, 5960, 4, 919, 8839, 74, 8303, 7367, 985, + 1714, 972, 3691, 196, 2221, 5916, 9621, 719, 7868, + 6821, 1399, 599, 7980, 2535, 2710, 8687, 3504, 500, + 8756, 2206, 3927, 9619, 5296, 3265, 3270, 8220, 9811, + 8515, 3994, 4320, 3608, 6591, 3810, 1151, 5003, 1681, + 9535, 5842, 20, 673, 3013, 3128, 9579, 416, 4669, + 1672, 5675, 7857, 9285, 3183, 9934, 3384, 3124, 6569, + 9838, 4601, 323, 9366, 3658, 3481, 8266, 106, 4724, + 9925, 449, 231, 1193, 8640, 6487, 4395, 7873, 2975, + 371, 4139, 2769, 51, 7434, 3992, 2930, 3651, 8635, + 2260, 5356, 167, 5798, 384, 6039, 7889, 8913, 9072, + 4191, 8932, 4779, 1901, 9554, 5285, 2097, 2772, 9175, + 6725, 2156, 148, 5, 476, 3156, 6607, 7786, 8502, + 1087, 2367, 867, 1415, 3356, 4022, 2905, 4640, 6433, + 1986, 3092, 5931, 6521, 8577, 8053, 2869, 6427, 3755, + 6368, 9012, 2562, 8601, 9397, 1396, 3615, 1144, 9404, + 4843, 3491, 1114, 7468, 9175, 5343, 2695, 6775, 3132, + 763, 2580, 5472, 6555, 5197, 7683, 7083, 9461, 6717, + 3416, 4535, 7651, 8103, 5254, 5119, 4843, 7481, 9740, + 875, 8435, 1895, 4149, 1288, 2687, 4563, 8844, 5497, + 6857, 8265, 5805, 7507, 6505, 6546, 761, 9841, 4528, + 9297, 3090, 2401, 1436, 8521, 3201, 3280, 647, 6053, + 9778, 5421, 2338, 7895, 9657, 4768, 4164, 346, 6739, + 5802, 3214, 837, 1345, 161, 7417, 6331, 4380, 8050, + 5202, 8517, 4660, 8461, 3874, 4103, 9836, 4667, 1660, + 1906, 1376, 4340, 1934, 9527, 6941, 331, 4783, 1013, + 8971, 6772, 975, 1578, 3459, 408, 1053, 7816, 3059, + 6639, 3329, 9285, 4169, 206, 4382, 9903, 2838, 624, + 6682, 7989, 5248, 6652, 4925, 1208, 2515, 3523, 7868, + 5124, 932, 3036, 2610, 9397, 6019, 4365, 3159, 7967, + 6712, 4292, 9634, 2506, 2306, 6880, 1422, 5592, 3014, + 2127, 2108, 7692, 2917, 5954, 9346, 9190, 5918, 9056, + 1095, 6544, 2601, 8317, 1507, 8478, 427, 8983, 7403, + 1403, 9463, 9521, 1040, 2364, 4790, 3646, 1839, 5831, + 2407, 5569, 8636, 3161, 4775, 5244, 6918, 2550, 2648, + 9459, 505, 4866, 459, 3616, 1899, 4865, 6781, 645, + 8187, 4087, 8269, 527, 142, 3293, 2115, 9227, 4492, + 4310, 5673, 305, 9750, 3677, 3525, 7465, 7997, 3251, + 3987, 3609, 7768, 9505, 2837, 6841, 9179, 2234, 5163, + 4220, 1985, 3135, 228, 4791, 5638, 6344, 6055, 1786, + 6558, 448, 7423, 7966, 6313, 1661, 2565, 5057, 6400, + 9258, 4995, 104, 6131, 2357, 478, 9553, 4258, 9824, + 7655, 1162, 7233, 6219, 4197, 5671, 698, 3933, 5858, + 5825, 1485, 2940, 1221, 3277, 8498, 517, 4954, 4571, + 3551, 3811, 2162, 1085, 4990, 2770, 5124, 3331, 2642, + 4071, 9626, 4506, 3366, 3577, 4151, 3800, 2366, 4168, + 3825, 9342, 9534, 8100, 1320, 5092, 3157, 2094, 6070, + 8057, 5993, 2362, 411, 8733, 397, 7089, 7206, 2945, + 3064, 5117, 3443, 470, 608, 3384, 8567, 5110, 4565, + 6431, 5474, 8783, 7927, 5169, 4863, 7172, 8183, 1224, + 9394, 692, 7991, 536, 2135, 5294, 8037, 8526, 1190, + 63, 849, 9116, 6147, 7935, 7112, 6626, 226, 5579, + 9554, 4960, 2909, 3565, 5132, 4287, 1865, 5728, 7111, + 4986, 5695, 7077, 5616, 8771, 9145, 8441, 9676, 1058, + 6430, 6079, 4862, 6083, 1292, 6175, 658, 1311, 8075, + 3408, 4717, 4970, 3386, 6451, 4509, 1589, 2526, 3046, + 4015, 115, 606, 6816, 9023, 74, 3707, 8570, 3733, + 7439, 8010, 2330, 2707, 7997, 6578, 6530, 9528, 1545, + 1828, 6104, 2474, 5308, 5222, 6365, 8862, 4445, 3256, + 8596, 7733, 4546, 4298, 3962, 1735, 405, 9446, 2765, + 5223, 1245, 1928, 1613, 1076, 3386, 5499, 7117, 7236, + 1089, 5676, 7344, 5770, 9734, 6262, 6225, 7764, 2961, + 5070, 7253, 556, 1687, 3579, 9804, 7261, 2963, 8905, + 7732, 2949, 3331, 8106, 4114, 6793, 2309, 6606, 2474, + 1922, 3120, 8805, 7588, 3625, 3473, 6466, 7065, 4574, + 1985, 4989, 8004, 6734, 5981, 3660, 7778, 125, 7384, + 6141, 1158, 4521, 6919, 144, 914, 7797, 4111, 6086, + 6673, 8442, 7879, 7713, 9248, 4408, 1097, 7867, 1626, + 2747, 651, 2532, 5546, 1372, 589, 1653, 7492, 6806, + 1244, 9203, 2206, 5006, 203, 739, 5153, 6454, 4270, + 4396, 3640, 2150, 9255, 9834, 7708, 1267, 1636, 5030, + 795, 8742, 7543, 566, 6397, 3605, 8166, 8086, 4077, + 8364, 3818, 1834, 6488, 7143, 7438, 5866, 8540, 856, + 7785, 7368, 3461, 1705, 3376, 6688, 1751, 642, 9424, + 2608, 4435, 1836, 3463, 6828, 5152, 1957, 8354, 4656, + 6595, 4201, 9042, 3819, 3824, 5439, 7490, 1010, 4810, + 9972, 7566, 955, 751, 8629, 3365, 7594, 1053, 2299, + 7870]), + values=tensor([5.2060e-01, 5.8752e-01, 7.2713e-01, 1.0305e-01, + 9.3686e-01, 4.5302e-01, 7.9066e-01, 3.3889e-01, + 9.1304e-01, 7.1777e-01, 8.2105e-01, 1.2503e-01, + 3.7067e-02, 6.7818e-01, 7.9631e-01, 1.5332e-01, + 4.2909e-02, 3.6354e-01, 7.8473e-02, 3.9129e-01, + 5.0385e-01, 2.7311e-01, 2.8426e-01, 9.8490e-01, + 4.6471e-01, 1.8436e-01, 2.9210e-01, 7.6968e-01, + 9.8680e-01, 6.5393e-01, 6.5447e-01, 7.6587e-01, + 5.1696e-01, 3.0301e-01, 1.2974e-01, 4.9850e-01, + 9.5414e-01, 3.3815e-01, 1.2515e-01, 6.9570e-01, + 2.4680e-01, 5.9705e-01, 6.2698e-01, 7.7985e-01, + 6.0484e-01, 4.0524e-01, 6.3660e-01, 3.4506e-01, + 8.0482e-01, 7.8297e-02, 6.1842e-01, 5.7868e-01, + 6.3483e-01, 6.6108e-01, 9.3584e-04, 7.4347e-01, + 6.9134e-01, 8.5039e-01, 7.7178e-01, 3.8597e-01, + 1.9561e-01, 2.9964e-01, 5.3888e-01, 7.9254e-01, + 8.6409e-01, 7.5097e-01, 3.3689e-01, 7.8610e-02, + 4.5323e-01, 8.5126e-01, 6.9200e-01, 1.9498e-02, + 8.3003e-01, 8.6298e-01, 4.3806e-01, 7.1679e-01, + 8.1194e-01, 9.9720e-01, 8.2151e-01, 9.6087e-01, + 6.3853e-01, 3.4560e-01, 5.5284e-01, 4.4640e-01, + 3.7580e-01, 3.2438e-01, 8.9706e-01, 2.8604e-01, + 2.9762e-01, 2.8814e-01, 6.0684e-01, 4.1394e-01, + 6.2215e-01, 9.4551e-01, 4.0044e-01, 8.6963e-01, + 5.5266e-01, 5.9098e-01, 5.9783e-01, 1.7437e-01, + 3.4636e-01, 2.4680e-01, 2.3300e-01, 1.5789e-01, + 7.2572e-01, 4.7658e-01, 7.9251e-01, 5.5119e-01, + 3.4901e-01, 1.8926e-01, 7.2681e-02, 4.1166e-01, + 2.3574e-01, 9.2902e-01, 4.2647e-01, 4.2048e-01, + 7.6334e-01, 2.7562e-01, 1.2457e-01, 8.0447e-01, + 2.7298e-01, 2.6366e-01, 9.0147e-01, 9.8730e-01, + 4.8371e-01, 4.3424e-01, 8.9082e-01, 9.3669e-02, + 2.4573e-01, 1.7890e-01, 7.0955e-01, 7.5968e-01, + 4.4304e-01, 7.1779e-01, 7.2909e-01, 7.9905e-01, + 2.0894e-01, 1.6586e-01, 7.5174e-02, 4.0334e-01, + 1.0341e-01, 7.8773e-01, 8.5689e-01, 8.9735e-01, + 1.9547e-01, 8.9818e-01, 8.1080e-01, 5.8252e-01, + 6.2795e-01, 2.4733e-02, 3.7285e-01, 7.4750e-01, + 4.5329e-01, 5.4283e-01, 1.3581e-01, 8.2036e-01, + 3.3337e-02, 6.6711e-01, 2.7268e-01, 5.5912e-01, + 5.5669e-01, 3.5501e-01, 6.2792e-01, 9.7736e-01, + 3.5615e-01, 8.9429e-03, 5.2671e-01, 1.1444e-01, + 7.5357e-02, 8.8607e-01, 8.5215e-01, 8.6159e-01, + 3.1847e-01, 1.4552e-01, 6.4152e-01, 7.1863e-01, + 3.3005e-01, 3.2445e-02, 1.0168e-01, 8.4233e-01, + 8.8000e-01, 2.7047e-01, 7.1423e-01, 7.8187e-01, + 8.9146e-02, 4.9672e-01, 5.9046e-01, 1.9515e-01, + 9.8528e-01, 5.0321e-01, 5.8000e-01, 7.4168e-01, + 3.5260e-02, 1.2683e-01, 4.5738e-01, 6.8617e-01, + 4.4433e-01, 4.8514e-01, 5.5207e-01, 3.5556e-01, + 8.9692e-01, 3.6084e-01, 3.8720e-01, 5.8634e-01, + 7.0358e-01, 3.0095e-01, 1.3903e-01, 1.1694e-01, + 9.7711e-01, 3.7841e-01, 2.3352e-01, 4.5329e-01, + 8.2085e-01, 3.0580e-01, 4.8927e-01, 3.0347e-01, + 5.1485e-02, 5.2444e-02, 3.0794e-01, 5.9206e-01, + 8.3589e-01, 1.7323e-01, 3.1820e-01, 7.0798e-01, + 1.5440e-01, 1.9361e-01, 1.0835e-01, 8.1042e-01, + 7.1437e-02, 4.4926e-01, 8.1084e-01, 6.5975e-01, + 2.3167e-01, 9.5539e-01, 9.0418e-01, 1.8474e-01, + 8.4342e-01, 2.5289e-01, 6.3702e-01, 3.5397e-01, + 1.1201e-01, 4.5470e-01, 1.3388e-01, 2.6199e-01, + 8.1948e-01, 6.9816e-01, 9.3318e-01, 6.6708e-02, + 2.6242e-01, 4.7858e-02, 6.7194e-01, 4.8540e-01, + 5.6054e-01, 6.7286e-01, 8.0291e-01, 7.4681e-02, + 5.2165e-01, 3.6455e-01, 2.6177e-01, 2.8656e-01, + 4.8881e-01, 1.5495e-01, 8.1151e-01, 6.4157e-01, + 4.6118e-01, 9.8777e-02, 3.7165e-01, 7.8764e-01, + 6.8295e-01, 2.4347e-01, 1.6075e-01, 8.7591e-01, + 3.4120e-01, 5.4312e-01, 2.2322e-01, 7.3732e-01, + 9.2524e-02, 8.0737e-01, 4.2221e-01, 6.0497e-02, + 2.2442e-01, 1.6638e-01, 9.3025e-01, 3.6644e-01, + 9.7051e-01, 2.6113e-01, 3.3298e-01, 5.5566e-01, + 8.4633e-01, 2.1670e-02, 3.4866e-01, 4.1243e-01, + 4.3974e-02, 5.1011e-02, 9.7001e-01, 9.2475e-01, + 6.8789e-01, 4.8683e-01, 8.8514e-01, 2.8747e-01, + 8.3000e-01, 5.9514e-01, 1.9359e-01, 9.9127e-01, + 4.6767e-01, 9.0117e-02, 3.1439e-01, 7.7717e-01, + 3.2578e-01, 7.6461e-01, 4.9560e-01, 7.0813e-01, + 8.9512e-01, 3.4294e-01, 7.9012e-01, 2.1834e-01, + 9.9954e-01, 8.4021e-01, 5.2415e-01, 5.5913e-01, + 6.0001e-01, 4.0800e-01, 9.2418e-01, 9.8484e-02, + 1.2325e-01, 9.2593e-01, 8.8126e-01, 2.1903e-01, + 8.6474e-02, 5.6159e-01, 1.6813e-01, 3.7588e-01, + 1.1257e-01, 7.7701e-01, 6.1191e-01, 9.3067e-01, + 1.7717e-01, 6.9689e-01, 4.1894e-01, 3.1744e-01, + 6.8942e-01, 5.4469e-01, 6.9142e-01, 5.9683e-01, + 4.4063e-01, 7.9936e-01, 7.7242e-01, 2.8988e-01, + 9.4011e-01, 6.2090e-01, 4.0644e-01, 4.6521e-01, + 6.9337e-01, 9.9796e-02, 8.2484e-01, 6.9345e-01, + 6.1383e-01, 7.4834e-01, 4.0722e-01, 9.9024e-01, + 8.5334e-01, 8.8771e-01, 5.0253e-01, 7.2537e-01, + 6.4116e-01, 4.1841e-01, 9.1987e-01, 6.3601e-02, + 6.2845e-01, 9.8979e-01, 3.2048e-02, 4.9481e-01, + 7.6203e-01, 8.4482e-01, 1.8315e-01, 9.7150e-01, + 6.4234e-01, 3.9806e-01, 3.1906e-01, 8.5592e-01, + 2.7348e-01, 1.0324e-01, 3.3822e-01, 1.0261e-01, + 7.8657e-01, 4.1245e-02, 6.2280e-01, 1.5594e-01, + 1.4057e-01, 9.3217e-01, 5.3651e-01, 3.1414e-01, + 7.4943e-01, 1.8062e-01, 6.8355e-01, 1.7011e-01, + 7.9311e-01, 5.6505e-01, 1.7516e-01, 3.2628e-01, + 7.4869e-01, 3.6292e-02, 7.7730e-01, 5.6876e-01, + 7.4365e-01, 3.1819e-01, 2.9167e-01, 8.0054e-01, + 5.2095e-02, 8.4271e-01, 9.4201e-01, 2.6332e-01, + 4.1440e-01, 2.8004e-01, 6.3749e-01, 8.2784e-01, + 3.8989e-01, 4.8344e-01, 6.1792e-01, 7.7696e-01, + 2.9025e-01, 3.0195e-01, 1.6203e-02, 4.9882e-02, + 9.5260e-01, 3.8574e-02, 8.6137e-01, 1.0206e-01, + 1.8185e-01, 3.7184e-01, 1.8011e-01, 5.8471e-01, + 9.0896e-01, 7.8819e-01, 1.2554e-01, 9.1937e-01, + 4.7326e-01, 8.1565e-01, 4.0860e-01, 9.5624e-01, + 6.4546e-01, 2.1755e-01, 1.8583e-01, 7.2313e-01, + 5.0034e-01, 9.6810e-01, 1.1208e-01, 3.3256e-01, + 1.0626e-01, 8.2704e-01, 9.0571e-01, 9.1930e-01, + 5.0810e-01, 2.9139e-02, 4.8153e-01, 8.3425e-01, + 7.7208e-01, 2.0930e-01, 7.7873e-01, 1.9398e-01, + 4.6583e-01, 7.2143e-01, 7.7072e-01, 8.8225e-01, + 5.6393e-01, 4.5270e-01, 4.5709e-01, 3.4438e-02, + 8.3723e-01, 8.7398e-01, 7.7789e-01, 3.1834e-01, + 6.7630e-01, 7.2696e-01, 6.2046e-01, 8.7032e-01, + 7.5275e-01, 1.2652e-01, 1.6913e-01, 8.9638e-01, + 8.4284e-01, 6.7028e-01, 1.6150e-01, 4.8858e-01, + 5.2657e-01, 2.6709e-01, 5.3737e-01, 7.9635e-02, + 4.6502e-01, 6.6489e-01, 3.3302e-02, 6.7807e-03, + 3.2601e-01, 6.2838e-01, 8.8805e-01, 6.4081e-01, + 8.3664e-01, 2.1253e-01, 9.9560e-01, 1.0008e-01, + 8.1128e-01, 7.1759e-02, 1.9140e-01, 3.9544e-01, + 2.0466e-01, 1.0560e-01, 5.0978e-01, 4.3684e-01, + 1.8097e-02, 5.2480e-01, 1.3644e-01, 6.5843e-01, + 4.7603e-01, 3.9206e-01, 2.5961e-01, 4.4991e-01, + 2.7612e-01, 5.3666e-01, 1.0518e-02, 8.5692e-01, + 2.4882e-01, 9.3487e-01, 5.1282e-01, 9.4781e-01, + 6.6273e-01, 7.0853e-01, 2.2742e-01, 9.1791e-01, + 6.9906e-01, 3.2979e-01, 5.6041e-01, 8.6281e-01, + 9.5303e-01, 7.5565e-01, 4.3419e-01, 8.3829e-01, + 2.4070e-01, 6.8860e-01, 5.5668e-01, 1.8449e-01, + 9.9667e-01, 7.7594e-01, 5.3148e-01, 4.1552e-01, + 5.1947e-01, 1.3992e-01, 4.4856e-01, 7.4868e-01, + 6.3705e-01, 3.5783e-01, 1.2134e-01, 6.9568e-01, + 1.2509e-01, 1.7793e-01, 3.2242e-02, 4.1301e-01, + 5.9646e-01, 1.6615e-01, 3.1664e-01, 8.1783e-02, + 2.9824e-02, 2.1237e-01, 5.2911e-01, 5.1732e-01, + 1.9451e-01, 3.5512e-01, 2.9795e-01, 5.1951e-01, + 1.9558e-01, 3.7898e-01, 5.9402e-01, 4.2044e-01, + 8.7643e-01, 6.5956e-01, 7.4035e-01, 4.6048e-01, + 1.2749e-02, 9.2360e-01, 3.6981e-01, 9.4150e-01, + 9.7784e-01, 5.7582e-01, 3.1385e-01, 3.8101e-01, + 9.2912e-01, 4.3789e-01, 7.2061e-01, 5.2086e-01, + 7.3528e-01, 8.5192e-01, 3.5126e-01, 7.6776e-01, + 2.8613e-01, 6.3526e-02, 2.8596e-01, 6.3501e-01, + 4.9176e-02, 8.7348e-01, 5.2000e-01, 5.8496e-01, + 4.8855e-01, 7.2483e-01, 3.5412e-01, 6.6161e-01, + 5.5561e-01, 5.4683e-02, 9.0732e-01, 7.5460e-02, + 3.1367e-01, 7.3793e-01, 3.5729e-01, 2.8849e-01, + 8.9432e-01, 9.7713e-02, 1.1743e-01, 4.3089e-01, + 8.1021e-01, 8.8355e-01, 1.9843e-01, 6.1349e-02, + 8.3824e-01, 6.2155e-02, 2.0617e-01, 4.8812e-01, + 9.8256e-01, 8.8490e-02, 5.0411e-01, 9.5740e-01, + 9.7775e-02, 5.1726e-01, 7.1232e-01, 4.7675e-02, + 2.2480e-02, 3.7611e-01, 5.9660e-01, 6.2244e-01, + 4.4880e-01, 8.5892e-01, 1.9312e-01, 3.0721e-01, + 6.9925e-01, 6.7802e-01, 9.2157e-01, 8.5335e-01, + 4.1058e-01, 1.3666e-01, 3.9121e-01, 4.7478e-02, + 7.7467e-01, 5.8273e-03, 7.4476e-01, 1.9062e-01, + 7.5316e-02, 8.1639e-01, 5.2018e-01, 5.6018e-01, + 9.6970e-01, 3.0172e-02, 6.0521e-01, 4.0407e-02, + 5.7990e-01, 9.5453e-01, 8.8848e-01, 3.7532e-01, + 9.6133e-01, 7.7502e-01, 4.0087e-01, 6.8611e-01, + 3.2210e-02, 2.8094e-01, 7.3871e-01, 9.1462e-01, + 1.9356e-01, 2.6926e-01, 1.8582e-01, 5.4929e-02, + 9.1024e-01, 2.6956e-01, 5.5420e-01, 3.6135e-01, + 3.5875e-01, 6.0498e-01, 3.7275e-01, 9.8664e-01, + 8.8819e-01, 2.7405e-01, 1.1934e-01, 7.3250e-01, + 8.9712e-01, 8.8231e-01, 3.3160e-01, 9.5892e-01, + 8.4187e-01, 2.7689e-01, 1.6102e-02, 9.3174e-01, + 7.5817e-01, 9.0117e-01, 2.4771e-01, 2.6531e-01, + 5.1327e-01, 3.8035e-01, 4.0058e-01, 4.0599e-04, + 9.2180e-01, 9.3958e-01, 5.5013e-01, 6.4800e-01, + 1.1731e-01, 5.7797e-01, 2.5301e-01, 6.8211e-01, + 9.6611e-01, 2.1744e-01, 2.7821e-01, 3.8118e-01, + 6.2557e-01, 8.9564e-01, 9.8076e-01, 4.2087e-01, + 1.3319e-02, 1.8018e-01, 6.2599e-01, 1.4615e-01, + 7.4026e-01, 9.7573e-01, 1.0896e-01, 4.9573e-01, + 7.0308e-01, 6.3058e-01, 1.5012e-01, 5.9222e-01, + 6.5591e-01, 4.5450e-01, 1.3452e-01, 1.1006e-01, + 6.2072e-01, 1.7700e-01, 3.2033e-01, 7.1407e-01, + 9.3104e-01, 7.3480e-01, 7.5735e-01, 5.3846e-01, + 1.2357e-01, 6.0479e-01, 5.0554e-01, 7.2641e-01, + 7.1854e-01, 3.9348e-02, 4.3940e-02, 5.5093e-01, + 1.1556e-02, 4.5159e-01, 8.0712e-01, 7.2793e-01, + 9.4343e-01, 4.6801e-01, 6.8975e-01, 6.8837e-01, + 5.9669e-02, 7.6994e-01, 4.3286e-01, 7.1974e-01, + 4.6165e-01, 5.8672e-01, 5.0695e-01, 4.7555e-01, + 6.9675e-01, 3.7575e-01, 7.2625e-01, 6.6386e-01, + 3.0499e-01, 7.8592e-01, 5.7412e-01, 1.8771e-01, + 2.0747e-02, 9.7205e-01, 1.9993e-01, 4.5722e-01, + 9.2382e-02, 3.5018e-01, 5.2392e-01, 1.1923e-01, + 8.3795e-01, 3.4123e-02, 2.8436e-01, 1.9208e-01, + 6.4406e-02, 7.8742e-01, 2.0108e-01, 9.3581e-01, + 1.7748e-01, 8.9101e-01, 1.2175e-01, 8.8756e-01, + 7.8316e-01, 4.0800e-01, 5.6851e-01, 9.2774e-01, + 3.7604e-01, 5.3144e-01, 8.0134e-02, 6.2059e-02, + 8.7489e-03, 2.6598e-01, 6.7003e-01, 8.4602e-01, + 6.0322e-01, 6.8827e-01, 3.2298e-01, 7.1255e-01, + 9.5221e-01, 9.1295e-01, 2.3726e-01, 8.5325e-02, + 3.8353e-01, 7.7125e-01, 7.3426e-01, 5.3143e-01, + 8.0892e-01, 8.7041e-01, 5.8474e-01, 8.7437e-01, + 3.1761e-01, 8.3607e-01, 2.6952e-01, 6.1480e-01, + 7.6446e-01, 6.1953e-01, 4.9927e-01, 7.5728e-01, + 9.1551e-01, 2.2186e-01, 7.1374e-01, 2.5784e-01, + 8.5969e-01, 9.0881e-01, 1.8012e-01, 9.8941e-02, + 6.1818e-01, 2.8776e-01, 9.6763e-01, 2.0033e-01, + 8.2090e-01, 1.9388e-01, 1.7567e-01, 4.4690e-01, + 2.0662e-01, 1.0745e-01, 9.3569e-01, 8.0811e-01, + 9.1073e-02, 3.4629e-01, 7.8523e-01, 1.1764e-01, + 2.8677e-01, 7.0343e-01, 8.7211e-01, 3.6672e-01, + 9.7703e-01, 5.2406e-01, 4.9980e-01, 5.4475e-01, + 8.9090e-01, 4.3209e-02, 4.4884e-01, 9.0686e-01, + 1.1154e-01, 1.0570e-01, 4.1471e-01, 9.7202e-01, + 3.8561e-01, 2.0579e-01, 4.4916e-01, 1.9721e-01, + 5.1959e-01, 7.6768e-01, 9.2048e-01, 2.2598e-01, + 9.6825e-01, 5.9095e-01, 2.7735e-02, 9.8780e-01, + 5.5600e-01, 6.4011e-01, 4.9115e-01, 1.5679e-01, + 9.0323e-01, 1.9041e-01, 8.5013e-01, 1.2025e-01, + 1.1804e-01, 6.8892e-01, 2.0461e-01, 1.9657e-01, + 7.0597e-01, 5.5451e-01, 1.8591e-01, 6.9899e-01, + 5.3076e-01, 7.3978e-01, 9.5676e-01, 4.0627e-01, + 8.3752e-01, 3.1046e-01, 5.6607e-01, 4.4132e-01, + 5.2029e-01, 8.5109e-01, 6.6508e-01, 2.6182e-01, + 6.7981e-01, 8.4438e-01, 4.1711e-01, 3.6277e-01, + 9.2177e-01, 3.6580e-02, 5.1418e-02, 4.5413e-01, + 5.0643e-01, 1.2991e-01, 7.2776e-01, 6.5957e-01, + 6.6907e-01, 4.4236e-01, 5.5565e-01, 1.1326e-01, + 6.2352e-01, 4.1680e-02, 8.5603e-01, 7.0539e-01, + 3.5385e-01, 5.1926e-01, 6.4794e-02, 6.4126e-01, + 3.0824e-01, 8.7537e-01, 3.8092e-03, 1.4017e-01, + 7.9757e-01, 2.7307e-01, 7.7809e-01, 5.3453e-01, + 3.5741e-01, 8.3152e-01, 9.6330e-01, 4.5118e-01, + 6.9011e-01, 8.9458e-01, 5.3158e-01, 8.2853e-01, + 1.8923e-01, 6.5167e-01, 3.8821e-01, 5.7283e-01, + 1.9550e-01, 4.8766e-01, 5.8973e-01, 6.6654e-01, + 3.4043e-01, 3.0734e-01, 7.2507e-01, 4.5141e-01, + 8.7825e-01, 1.8915e-01, 9.1650e-01, 2.5972e-01, + 1.2721e-02, 4.2352e-01, 6.5855e-01, 4.8197e-01, + 6.3384e-01, 1.1340e-01, 9.1519e-02, 7.2253e-01, + 7.7717e-01, 7.7128e-01, 7.9797e-01, 9.5449e-01, + 1.9479e-01, 9.4967e-01, 6.5866e-01, 2.6908e-01, + 6.6522e-01, 9.9513e-01, 6.4628e-01, 3.2376e-01, + 6.8241e-01, 2.0082e-01, 6.7192e-01, 6.3818e-01, + 8.3533e-01, 1.4580e-01, 6.7572e-01, 3.1304e-02, + 5.6257e-01, 6.4916e-01, 4.5939e-01, 1.6735e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.0747, 0.3561, 0.1255, ..., 0.0664, 0.4841, 0.3262]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.197850227355957 seconds + +[16.64, 16.64, 16.92, 16.76, 17.12, 17.2, 17.04, 17.0, 16.76, 16.88] +[16.68, 16.88, 19.88, 22.24, 22.24, 24.24, 24.56, 25.08, 21.56, 20.04, 19.4, 19.68, 19.88, 19.92] +14.228495121002197 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 141920, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.197850227355957, 'TIME_S_1KI': 0.0718563291104563, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 278.53162281036373, 'W': 19.575620643059622} +[16.64, 16.64, 16.92, 16.76, 17.12, 17.2, 17.04, 17.0, 16.76, 16.88, 16.16, 16.08, 16.16, 16.12, 16.12, 16.24, 16.56, 16.84, 16.88, 17.08] +299.82 +14.991 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 141920, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.197850227355957, 'TIME_S_1KI': 0.0718563291104563, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 278.53162281036373, 'W': 19.575620643059622, 'J_1KI': 1.9625959893627658, 'W_1KI': 0.1379341928062262, 'W_D': 4.584620643059623, 'J_D': 65.2322524514198, 'W_D_1KI': 0.032304260449969154, 'J_D_1KI': 0.00022762303022808028} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_500000_1e-05.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_500000_1e-05.json index 3cf2612..35c105f 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_500000_1e-05.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_500000_1e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 1458, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.73922610282898, "TIME_S_1KI": 7.365724350362812, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 524.544666223526, "W": 35.90986855086994, "J_1KI": 359.77000426853635, "W_1KI": 24.629539472475955, "W_D": 17.579868550869936, "J_D": 256.7936518120765, "W_D_1KI": 12.0575230115706, "J_D_1KI": 8.269906043601233} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 1484, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.88543152809143, "TIME_S_1KI": 7.335196447500964, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 480.2575412559509, "W": 32.95171788766838, "J_1KI": 323.6236800916111, "W_1KI": 22.204661649372223, "W_D": 16.90171788766838, "J_D": 246.33548707246783, "W_D_1KI": 11.389297767970607, "J_D_1KI": 7.67472895415809} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_500000_1e-05.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_500000_1e-05.output index ac4d6c8..2875e7e 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_500000_1e-05.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_500000_1e-05.output @@ -1,15 +1,15 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 500000 -sd 1e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 7.201478004455566} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 7.073613166809082} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 6, ..., 2499994, - 2499998, 2500000]), - col_indices=tensor([111852, 327751, 365150, ..., 493517, 11445, - 207886]), - values=tensor([0.9407, 0.2669, 0.8671, ..., 0.7942, 0.4760, 0.2816]), +tensor(crow_indices=tensor([ 0, 6, 13, ..., 2499990, + 2499995, 2500000]), + col_indices=tensor([ 8141, 69274, 149925, ..., 390687, 407872, + 439375]), + values=tensor([0.4271, 0.3560, 0.2859, ..., 0.3294, 0.0849, 0.5690]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.4423, 0.4635, 0.1741, ..., 0.0346, 0.7600, 0.4318]) +tensor([0.1896, 0.3447, 0.8973, ..., 0.8957, 0.5716, 0.6993]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -17,20 +17,20 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 7.201478004455566 seconds +Time: 7.073613166809082 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1458 -ss 500000 -sd 1e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.73922610282898} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1484 -ss 500000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.88543152809143} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 4, ..., 2499994, +tensor(crow_indices=tensor([ 0, 10, 15, ..., 2499994, 2500000, 2500000]), - col_indices=tensor([198857, 399888, 193187, ..., 179513, 216653, - 450880]), - values=tensor([0.4554, 0.7901, 0.7135, ..., 0.0158, 0.9399, 0.2709]), + col_indices=tensor([ 19808, 30523, 42041, ..., 253465, 473291, + 475423]), + values=tensor([0.2655, 0.1335, 0.5252, ..., 0.0072, 0.8874, 0.1974]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.8645, 0.3649, 0.9819, ..., 0.4118, 0.2155, 0.1417]) +tensor([0.7673, 0.2797, 0.0430, ..., 0.8352, 0.7956, 0.1250]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -38,17 +38,17 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 10.73922610282898 seconds +Time: 10.88543152809143 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 4, ..., 2499994, +tensor(crow_indices=tensor([ 0, 10, 15, ..., 2499994, 2500000, 2500000]), - col_indices=tensor([198857, 399888, 193187, ..., 179513, 216653, - 450880]), - values=tensor([0.4554, 0.7901, 0.7135, ..., 0.0158, 0.9399, 0.2709]), + col_indices=tensor([ 19808, 30523, 42041, ..., 253465, 473291, + 475423]), + values=tensor([0.2655, 0.1335, 0.5252, ..., 0.0072, 0.8874, 0.1974]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.8645, 0.3649, 0.9819, ..., 0.4118, 0.2155, 0.1417]) +tensor([0.7673, 0.2797, 0.0430, ..., 0.8352, 0.7956, 0.1250]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -56,13 +56,13 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 10.73922610282898 seconds +Time: 10.88543152809143 seconds -[20.76, 20.72, 20.4, 20.4, 20.36, 20.36, 20.12, 20.36, 20.28, 20.32] -[20.56, 20.48, 21.52, 22.84, 24.72, 30.76, 37.24, 43.6, 43.6, 49.32, 53.6, 53.68, 53.6, 53.56] -14.607256650924683 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1458, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.73922610282898, 'TIME_S_1KI': 7.365724350362812, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 524.544666223526, 'W': 35.90986855086994} -[20.76, 20.72, 20.4, 20.4, 20.36, 20.36, 20.12, 20.36, 20.28, 20.32, 20.48, 20.4, 20.32, 20.04, 20.2, 20.4, 20.36, 20.36, 20.48, 20.52] -366.6 -18.330000000000002 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1458, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.73922610282898, 'TIME_S_1KI': 7.365724350362812, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 524.544666223526, 'W': 35.90986855086994, 'J_1KI': 359.77000426853635, 'W_1KI': 24.629539472475955, 'W_D': 17.579868550869936, 'J_D': 256.7936518120765, 'W_D_1KI': 12.0575230115706, 'J_D_1KI': 8.269906043601233} +[20.16, 19.76, 19.4, 19.48, 19.12, 18.68, 18.6, 18.12, 18.12, 17.68] +[17.44, 17.16, 17.36, 21.36, 23.28, 27.36, 35.28, 38.6, 44.16, 47.92, 48.84, 48.56, 48.96, 49.04] +14.574582815170288 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1484, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.88543152809143, 'TIME_S_1KI': 7.335196447500964, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 480.2575412559509, 'W': 32.95171788766838} +[20.16, 19.76, 19.4, 19.48, 19.12, 18.68, 18.6, 18.12, 18.12, 17.68, 16.68, 16.56, 16.56, 16.56, 16.72, 16.64, 16.76, 17.08, 17.04, 17.08] +321.0 +16.05 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1484, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.88543152809143, 'TIME_S_1KI': 7.335196447500964, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 480.2575412559509, 'W': 32.95171788766838, 'J_1KI': 323.6236800916111, 'W_1KI': 22.204661649372223, 'W_D': 16.90171788766838, 'J_D': 246.33548707246783, 'W_D_1KI': 11.389297767970607, 'J_D_1KI': 7.67472895415809} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.0001.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.0001.json index 36f1dbe..0acd95a 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.0001.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.0001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 3515, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.52223539352417, "TIME_S_1KI": 2.9935235827949276, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 470.9832169723511, "W": 32.170385708153674, "J_1KI": 133.99238036197755, "W_1KI": 9.15231456846477, "W_D": 13.629385708153674, "J_D": 199.53792237424858, "W_D_1KI": 3.8774923778531076, "J_D_1KI": 1.1031272767718656} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 3392, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.727018594741821, "TIME_S_1KI": 3.1624465196762443, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 423.8805270671845, "W": 29.00744018011372, "J_1KI": 124.96477802688223, "W_1KI": 8.55172175121277, "W_D": 13.914440180113719, "J_D": 203.32922177100187, "W_D_1KI": 4.102134487061827, "J_D_1KI": 1.2093556860441705} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.0001.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.0001.output index 60bafaf..f123524 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.0001.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.0001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 50000 -sd 0.0001 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.9865975379943848} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 3.0953831672668457} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 13, ..., 249989, 249995, +tensor(crow_indices=tensor([ 0, 6, 13, ..., 249985, 249991, 250000]), - col_indices=tensor([12071, 16957, 24871, ..., 32088, 41674, 47752]), - values=tensor([0.0278, 0.4403, 0.7542, ..., 0.8727, 0.3256, 0.0294]), + col_indices=tensor([ 782, 10679, 21591, ..., 21721, 25862, 26402]), + values=tensor([0.1080, 0.2599, 0.9753, ..., 0.8598, 0.0309, 0.7621]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.9906, 0.0790, 0.7013, ..., 0.2118, 0.2385, 0.3873]) +tensor([0.0624, 0.3415, 0.4601, ..., 0.0482, 0.7737, 0.1465]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 2.9865975379943848 seconds +Time: 3.0953831672668457 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 3515 -ss 50000 -sd 0.0001 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.52223539352417} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 3392 -ss 50000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.727018594741821} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 10, ..., 249992, 249995, +tensor(crow_indices=tensor([ 0, 3, 6, ..., 249992, 249997, 250000]), - col_indices=tensor([ 9701, 11138, 26862, ..., 20273, 37187, 48197]), - values=tensor([0.8537, 0.5403, 0.1220, ..., 0.0155, 0.7712, 0.8752]), + col_indices=tensor([29888, 37512, 45145, ..., 10362, 27481, 28096]), + values=tensor([0.5987, 0.4413, 0.1210, ..., 0.9023, 0.1888, 0.1246]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.5658, 0.7328, 0.9479, ..., 0.1014, 0.1582, 0.5663]) +tensor([0.0260, 0.0462, 0.3716, ..., 0.4992, 0.3586, 0.2225]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,16 +36,16 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 10.52223539352417 seconds +Time: 10.727018594741821 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 10, ..., 249992, 249995, +tensor(crow_indices=tensor([ 0, 3, 6, ..., 249992, 249997, 250000]), - col_indices=tensor([ 9701, 11138, 26862, ..., 20273, 37187, 48197]), - values=tensor([0.8537, 0.5403, 0.1220, ..., 0.0155, 0.7712, 0.8752]), + col_indices=tensor([29888, 37512, 45145, ..., 10362, 27481, 28096]), + values=tensor([0.5987, 0.4413, 0.1210, ..., 0.9023, 0.1888, 0.1246]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.5658, 0.7328, 0.9479, ..., 0.1014, 0.1582, 0.5663]) +tensor([0.0260, 0.0462, 0.3716, ..., 0.4992, 0.3586, 0.2225]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -53,13 +53,13 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 10.52223539352417 seconds +Time: 10.727018594741821 seconds -[20.72, 20.68, 20.52, 20.64, 20.64, 20.68, 20.68, 20.72, 20.72, 20.72] -[20.64, 20.72, 21.28, 22.68, 24.8, 29.24, 34.6, 38.2, 42.72, 43.84, 43.84, 44.32, 44.16, 44.08] -14.640272617340088 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 3515, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.52223539352417, 'TIME_S_1KI': 2.9935235827949276, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 470.9832169723511, 'W': 32.170385708153674} -[20.72, 20.68, 20.52, 20.64, 20.64, 20.68, 20.68, 20.72, 20.72, 20.72, 20.32, 20.24, 20.44, 20.32, 20.48, 20.64, 20.52, 20.64, 20.96, 20.84] -370.82 -18.541 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 3515, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.52223539352417, 'TIME_S_1KI': 2.9935235827949276, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 470.9832169723511, 'W': 32.170385708153674, 'J_1KI': 133.99238036197755, 'W_1KI': 9.15231456846477, 'W_D': 13.629385708153674, 'J_D': 199.53792237424858, 'W_D_1KI': 3.8774923778531076, 'J_D_1KI': 1.1031272767718656} +[16.52, 16.28, 16.24, 16.16, 16.44, 16.44, 16.6, 16.52, 16.56, 16.68] +[16.44, 16.64, 17.44, 19.52, 22.44, 27.08, 32.32, 35.8, 38.92, 39.84, 40.12, 40.12, 40.24, 40.6] +14.612820863723755 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 3392, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.727018594741821, 'TIME_S_1KI': 3.1624465196762443, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 423.8805270671845, 'W': 29.00744018011372} +[16.52, 16.28, 16.24, 16.16, 16.44, 16.44, 16.6, 16.52, 16.56, 16.68, 16.84, 16.68, 16.84, 17.24, 17.32, 17.48, 17.4, 17.24, 16.96, 16.88] +301.86 +15.093 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 3392, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.727018594741821, 'TIME_S_1KI': 3.1624465196762443, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 423.8805270671845, 'W': 29.00744018011372, 'J_1KI': 124.96477802688223, 'W_1KI': 8.55172175121277, 'W_D': 13.914440180113719, 'J_D': 203.32922177100187, 'W_D_1KI': 4.102134487061827, 'J_D_1KI': 1.2093556860441705} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.001.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.001.json index 091be62..3f36d4c 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.001.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 27.268765687942505, "TIME_S_1KI": 27.268765687942505, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1234.21608165741, "W": 36.82999610504039, "J_1KI": 1234.21608165741, "W_1KI": 36.82999610504039, "W_D": 18.278996105040388, "J_D": 612.5504571070677, "W_D_1KI": 18.278996105040388, "J_D_1KI": 18.278996105040388} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 29.441463470458984, "TIME_S_1KI": 29.441463470458984, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1067.5749865341186, "W": 32.87033016720936, "J_1KI": 1067.5749865341186, "W_1KI": 32.87033016720936, "W_D": 17.51733016720936, "J_D": 568.9344591989515, "W_D_1KI": 17.51733016720936, "J_D_1KI": 17.51733016720936} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.001.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.001.output index 9980b68..c095d19 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.001.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 50000 -sd 0.001 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 27.268765687942505} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 29.441463470458984} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 46, 102, ..., 2499892, - 2499945, 2500000]), - col_indices=tensor([ 987, 1836, 5791, ..., 47187, 47558, 49789]), - values=tensor([0.1085, 0.8855, 0.3536, ..., 0.4174, 0.4340, 0.6085]), +tensor(crow_indices=tensor([ 0, 37, 79, ..., 2499907, + 2499951, 2500000]), + col_indices=tensor([ 2466, 3763, 4276, ..., 47502, 48879, 49149]), + values=tensor([0.0148, 0.9908, 0.2997, ..., 0.9281, 0.7443, 0.4383]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.6397, 0.2759, 0.9232, ..., 0.5725, 0.2810, 0.6127]) +tensor([0.6700, 0.5614, 0.5608, ..., 0.8928, 0.8615, 0.5607]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,16 +16,16 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 27.268765687942505 seconds +Time: 29.441463470458984 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 46, 102, ..., 2499892, - 2499945, 2500000]), - col_indices=tensor([ 987, 1836, 5791, ..., 47187, 47558, 49789]), - values=tensor([0.1085, 0.8855, 0.3536, ..., 0.4174, 0.4340, 0.6085]), +tensor(crow_indices=tensor([ 0, 37, 79, ..., 2499907, + 2499951, 2500000]), + col_indices=tensor([ 2466, 3763, 4276, ..., 47502, 48879, 49149]), + values=tensor([0.0148, 0.9908, 0.2997, ..., 0.9281, 0.7443, 0.4383]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.6397, 0.2759, 0.9232, ..., 0.5725, 0.2810, 0.6127]) +tensor([0.6700, 0.5614, 0.5608, ..., 0.8928, 0.8615, 0.5607]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -33,13 +33,13 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 27.268765687942505 seconds +Time: 29.441463470458984 seconds -[20.68, 20.56, 20.48, 20.12, 20.08, 20.28, 20.44, 20.68, 20.96, 20.96] -[20.84, 20.76, 20.6, 24.92, 26.28, 30.48, 35.16, 37.64, 40.0, 42.72, 43.28, 43.52, 43.36, 43.36, 43.52, 42.92, 43.08, 42.76, 42.76, 42.52, 42.68, 42.8, 42.88, 43.04, 43.16, 42.96, 42.88, 42.76, 42.52, 42.72, 42.64, 42.64] -33.511165142059326 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 27.268765687942505, 'TIME_S_1KI': 27.268765687942505, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1234.21608165741, 'W': 36.82999610504039} -[20.68, 20.56, 20.48, 20.12, 20.08, 20.28, 20.44, 20.68, 20.96, 20.96, 20.44, 20.76, 20.8, 20.8, 20.72, 20.84, 20.84, 20.56, 20.68, 20.76] -371.02 -18.551 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 27.268765687942505, 'TIME_S_1KI': 27.268765687942505, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1234.21608165741, 'W': 36.82999610504039, 'J_1KI': 1234.21608165741, 'W_1KI': 36.82999610504039, 'W_D': 18.278996105040388, 'J_D': 612.5504571070677, 'W_D_1KI': 18.278996105040388, 'J_D_1KI': 18.278996105040388} +[16.92, 17.36, 17.08, 16.84, 17.2, 17.28, 17.4, 17.44, 17.36, 17.36] +[17.08, 17.24, 17.08, 21.52, 22.36, 25.24, 29.48, 32.12, 34.64, 38.4, 38.76, 38.64, 38.8, 39.0, 39.0, 39.44, 39.4, 39.28, 39.32, 39.32, 39.24, 39.12, 39.0, 39.08, 39.32, 39.36, 39.28, 39.28, 39.16, 38.92, 39.08] +32.47837734222412 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 29.441463470458984, 'TIME_S_1KI': 29.441463470458984, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1067.5749865341186, 'W': 32.87033016720936} +[16.92, 17.36, 17.08, 16.84, 17.2, 17.28, 17.4, 17.44, 17.36, 17.36, 16.68, 16.88, 16.76, 16.92, 16.84, 17.04, 17.0, 17.0, 16.68, 17.0] +307.06000000000006 +15.353000000000003 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 29.441463470458984, 'TIME_S_1KI': 29.441463470458984, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1067.5749865341186, 'W': 32.87033016720936, 'J_1KI': 1067.5749865341186, 'W_1KI': 32.87033016720936, 'W_D': 17.51733016720936, 'J_D': 568.9344591989515, 'W_D_1KI': 17.51733016720936, 'J_D_1KI': 17.51733016720936} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_1e-05.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_1e-05.json index 02be475..719c8cd 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_1e-05.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_1e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 19539, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.163734436035156, "TIME_S_1KI": 0.5201767969719615, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 495.2794138240814, "W": 33.936722825817846, "J_1KI": 25.348247803064712, "W_1KI": 1.7368710182618274, "W_D": 13.302722825817849, "J_D": 194.14263413858416, "W_D_1KI": 0.6808292556332386, "J_D_1KI": 0.03484463153862729} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 20098, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.650188207626343, "TIME_S_1KI": 0.5299128374776765, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 402.8303679275513, "W": 27.52967890413959, "J_1KI": 20.04330619601708, "W_1KI": 1.3697720621026763, "W_D": 12.350678904139592, "J_D": 180.72235947370535, "W_D_1KI": 0.6145227835674988, "J_D_1KI": 0.030576315233729664} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_1e-05.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_1e-05.output index 4c99396..b0c7cd1 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_1e-05.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_1e-05.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 50000 -sd 1e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.5373842716217041} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.648245096206665} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 24999, 25000, 25000]), - col_indices=tensor([13933, 723, 18387, ..., 22194, 38514, 2158]), - values=tensor([0.9124, 0.6353, 0.3193, ..., 0.0372, 0.2371, 0.8076]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 24997, 24999, 25000]), + col_indices=tensor([ 889, 16856, 49649, ..., 20622, 24354, 47394]), + values=tensor([0.8512, 0.0995, 0.9072, ..., 0.9114, 0.3857, 0.4483]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.6534, 0.7497, 0.2436, ..., 0.0965, 0.5741, 0.5754]) +tensor([0.8531, 0.5584, 0.8209, ..., 0.8853, 0.7506, 0.6837]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -15,18 +15,18 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 0.5373842716217041 seconds +Time: 0.648245096206665 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 19539 -ss 50000 -sd 1e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.163734436035156} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 16197 -ss 50000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 8.461615800857544} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 24996, 24998, 25000]), - col_indices=tensor([44723, 48345, 32100, ..., 22467, 28064, 29572]), - values=tensor([0.7283, 0.2640, 0.9583, ..., 0.2460, 0.4237, 0.5300]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 25000, 25000, 25000]), + col_indices=tensor([37259, 33129, 13575, ..., 31298, 24333, 9136]), + values=tensor([0.0302, 0.8728, 0.1875, ..., 0.5590, 0.6136, 0.6206]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.7754, 0.9311, 0.4703, ..., 0.3816, 0.8788, 0.3934]) +tensor([0.6191, 0.3887, 0.4199, ..., 0.2754, 0.8424, 0.8817]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -34,15 +34,18 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 10.163734436035156 seconds +Time: 8.461615800857544 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 20098 -ss 50000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.650188207626343} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 24996, 24998, 25000]), - col_indices=tensor([44723, 48345, 32100, ..., 22467, 28064, 29572]), - values=tensor([0.7283, 0.2640, 0.9583, ..., 0.2460, 0.4237, 0.5300]), +tensor(crow_indices=tensor([ 0, 1, 2, ..., 24999, 25000, 25000]), + col_indices=tensor([ 35, 8013, 35741, ..., 26171, 43365, 6398]), + values=tensor([0.7135, 0.5997, 0.1893, ..., 0.8752, 0.2236, 0.9882]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.7754, 0.9311, 0.4703, ..., 0.3816, 0.8788, 0.3934]) +tensor([0.7523, 0.4685, 0.7648, ..., 0.0829, 0.9708, 0.7467]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -50,13 +53,29 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 10.163734436035156 seconds +Time: 10.650188207626343 seconds -[25.08, 25.2, 25.2, 25.4, 25.08, 25.4, 25.12, 25.0, 25.28, 25.36] -[25.56, 25.6, 26.0, 26.32, 28.76, 32.48, 32.48, 37.24, 41.24, 45.24, 45.88, 45.72, 45.6, 45.64] -14.594202756881714 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 19539, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.163734436035156, 'TIME_S_1KI': 0.5201767969719615, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 495.2794138240814, 'W': 33.936722825817846} -[25.08, 25.2, 25.2, 25.4, 25.08, 25.4, 25.12, 25.0, 25.28, 25.36, 20.68, 20.68, 20.6, 20.6, 20.52, 20.72, 20.52, 20.72, 20.72, 20.72] -412.67999999999995 -20.633999999999997 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 19539, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.163734436035156, 'TIME_S_1KI': 0.5201767969719615, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 495.2794138240814, 'W': 33.936722825817846, 'J_1KI': 25.348247803064712, 'W_1KI': 1.7368710182618274, 'W_D': 13.302722825817849, 'J_D': 194.14263413858416, 'W_D_1KI': 0.6808292556332386, 'J_D_1KI': 0.03484463153862729} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 24999, 25000, 25000]), + col_indices=tensor([ 35, 8013, 35741, ..., 26171, 43365, 6398]), + values=tensor([0.7135, 0.5997, 0.1893, ..., 0.8752, 0.2236, 0.9882]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.7523, 0.4685, 0.7648, ..., 0.0829, 0.9708, 0.7467]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 10.650188207626343 seconds + +[16.64, 16.56, 16.56, 16.96, 17.08, 17.16, 16.88, 16.96, 16.48, 16.36] +[16.44, 16.44, 16.68, 18.12, 19.04, 22.72, 28.64, 33.16, 36.92, 39.76, 39.32, 39.28, 39.68, 39.6] +14.632585048675537 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 20098, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.650188207626343, 'TIME_S_1KI': 0.5299128374776765, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 402.8303679275513, 'W': 27.52967890413959} +[16.64, 16.56, 16.56, 16.96, 17.08, 17.16, 16.88, 16.96, 16.48, 16.36, 17.12, 16.92, 16.68, 16.88, 16.76, 16.8, 16.8, 17.12, 17.32, 17.2] +303.58 +15.178999999999998 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 20098, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.650188207626343, 'TIME_S_1KI': 0.5299128374776765, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 402.8303679275513, 'W': 27.52967890413959, 'J_1KI': 20.04330619601708, 'W_1KI': 1.3697720621026763, 'W_D': 12.350678904139592, 'J_D': 180.72235947370535, 'W_D_1KI': 0.6145227835674988, 'J_D_1KI': 0.030576315233729664} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.0001.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.0001.json new file mode 100644 index 0000000..c3c7f57 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 96690, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.663233041763306, "TIME_S_1KI": 0.11028268736956569, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 262.53495136260995, "W": 18.48739374467239, "J_1KI": 2.7152234084456506, "W_1KI": 0.19120274841940624, "W_D": 3.6973937446723912, "J_D": 52.505783147812, "W_D_1KI": 0.038239670541652615, "J_D_1KI": 0.0003954873362462779} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.0001.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.0001.output new file mode 100644 index 0000000..61cae4f --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.0001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 5000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.11520600318908691} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 2498, 2498, 2500]), + col_indices=tensor([4712, 1560, 1507, ..., 2651, 244, 3781]), + values=tensor([0.1646, 0.3564, 0.3355, ..., 0.5785, 0.6935, 0.4198]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.6842, 0.2217, 0.0992, ..., 0.1824, 0.3701, 0.4149]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 0.11520600318908691 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 91141 -ss 5000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.897401094436646} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 2499, 2500, 2500]), + col_indices=tensor([1451, 2006, 3586, ..., 3975, 4446, 2086]), + values=tensor([0.6609, 0.8356, 0.1353, ..., 0.7408, 0.3224, 0.8471]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.2892, 0.1223, 0.3419, ..., 0.7884, 0.7802, 0.0113]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 9.897401094436646 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 96690 -ss 5000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.663233041763306} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 2, ..., 2499, 2499, 2500]), + col_indices=tensor([2205, 2444, 2425, ..., 3761, 2544, 2990]), + values=tensor([0.2656, 0.9114, 0.3983, ..., 0.8675, 0.7517, 0.7885]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.1659, 0.6941, 0.7553, ..., 0.7483, 0.8019, 0.7277]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.663233041763306 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 2, ..., 2499, 2499, 2500]), + col_indices=tensor([2205, 2444, 2425, ..., 3761, 2544, 2990]), + values=tensor([0.2656, 0.9114, 0.3983, ..., 0.8675, 0.7517, 0.7885]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.1659, 0.6941, 0.7553, ..., 0.7483, 0.8019, 0.7277]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.663233041763306 seconds + +[16.32, 16.04, 16.32, 16.56, 16.56, 16.64, 16.96, 16.96, 16.76, 16.76] +[16.76, 16.88, 17.08, 18.96, 20.48, 21.96, 22.64, 22.36, 21.0, 19.8, 19.8, 19.6, 19.72, 19.8] +14.20075511932373 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 96690, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.663233041763306, 'TIME_S_1KI': 0.11028268736956569, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 262.53495136260995, 'W': 18.48739374467239} +[16.32, 16.04, 16.32, 16.56, 16.56, 16.64, 16.96, 16.96, 16.76, 16.76, 16.48, 16.4, 16.2, 16.28, 16.28, 16.12, 16.12, 16.28, 16.36, 16.36] +295.79999999999995 +14.789999999999997 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 96690, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.663233041763306, 'TIME_S_1KI': 0.11028268736956569, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 262.53495136260995, 'W': 18.48739374467239, 'J_1KI': 2.7152234084456506, 'W_1KI': 0.19120274841940624, 'W_D': 3.6973937446723912, 'J_D': 52.505783147812, 'W_D_1KI': 0.038239670541652615, 'J_D_1KI': 0.0003954873362462779} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.001.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.001.json new file mode 100644 index 0000000..810fc32 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 17852, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.54783010482788, "TIME_S_1KI": 0.5908486502816425, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 261.2027643966674, "W": 18.408959018952206, "J_1KI": 14.6315686979984, "W_1KI": 1.0311986902841253, "W_D": 3.4579590189522076, "J_D": 49.0646132674217, "W_D_1KI": 0.19370149109075777, "J_D_1KI": 0.010850408418707023} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.001.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.001.output new file mode 100644 index 0000000..5d5c1f0 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 5000 -sd 0.001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.6220724582672119} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 4, ..., 24988, 24993, 25000]), + col_indices=tensor([2208, 3192, 3630, ..., 2657, 2751, 4682]), + values=tensor([0.3516, 0.9043, 0.4344, ..., 0.9354, 0.2858, 0.8708]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.1847, 0.5253, 0.6086, ..., 0.9552, 0.0514, 0.1920]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 0.6220724582672119 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 16879 -ss 5000 -sd 0.001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 9.927400827407837} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 17, ..., 24988, 24992, 25000]), + col_indices=tensor([1765, 1880, 2380, ..., 3402, 4335, 4928]), + values=tensor([0.8113, 0.6065, 0.0419, ..., 0.8515, 0.2786, 0.9879]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.6729, 0.2847, 0.7618, ..., 0.5837, 0.8359, 0.7138]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 9.927400827407837 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 17852 -ss 5000 -sd 0.001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.54783010482788} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 10, ..., 24988, 24991, 25000]), + col_indices=tensor([ 732, 2237, 2424, ..., 1459, 1662, 4133]), + values=tensor([0.5131, 0.9715, 0.7721, ..., 0.9714, 0.5730, 0.7149]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.7581, 0.5458, 0.9932, ..., 0.3205, 0.5744, 0.9847]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.54783010482788 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 10, ..., 24988, 24991, 25000]), + col_indices=tensor([ 732, 2237, 2424, ..., 1459, 1662, 4133]), + values=tensor([0.5131, 0.9715, 0.7721, ..., 0.9714, 0.5730, 0.7149]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.7581, 0.5458, 0.9932, ..., 0.3205, 0.5744, 0.9847]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.54783010482788 seconds + +[16.36, 16.36, 16.32, 16.6, 16.8, 17.04, 17.2, 17.08, 16.8, 16.52] +[16.48, 16.4, 17.36, 19.52, 19.52, 21.32, 21.84, 22.56, 21.0, 20.28, 19.68, 19.84, 19.92, 19.92] +14.188893795013428 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 17852, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.54783010482788, 'TIME_S_1KI': 0.5908486502816425, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 261.2027643966674, 'W': 18.408959018952206} +[16.36, 16.36, 16.32, 16.6, 16.8, 17.04, 17.2, 17.08, 16.8, 16.52, 16.4, 16.36, 16.32, 16.32, 16.76, 16.96, 16.72, 16.44, 16.2, 16.2] +299.02 +14.950999999999999 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 17852, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.54783010482788, 'TIME_S_1KI': 0.5908486502816425, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 261.2027643966674, 'W': 18.408959018952206, 'J_1KI': 14.6315686979984, 'W_1KI': 1.0311986902841253, 'W_D': 3.4579590189522076, 'J_D': 49.0646132674217, 'W_D_1KI': 0.19370149109075777, 'J_D_1KI': 0.010850408418707023} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.01.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.01.json new file mode 100644 index 0000000..cca3281 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 1933, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.477078676223755, "TIME_S_1KI": 5.420113127896407, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 268.2967868804932, "W": 18.818848497168876, "J_1KI": 138.79813082281075, "W_1KI": 9.7355656995183, "W_D": 3.947848497168877, "J_D": 56.283734206199696, "W_D_1KI": 2.0423427300408057, "J_D_1KI": 1.0565663373206444} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.01.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.01.output new file mode 100644 index 0000000..3224ea8 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.01.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 5000 -sd 0.01 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 5.431562900543213} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 36, 79, ..., 249900, 249941, + 250000]), + col_indices=tensor([ 80, 388, 404, ..., 4737, 4807, 4857]), + values=tensor([0.4885, 0.5213, 0.1721, ..., 0.5810, 0.1625, 0.7107]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.1545, 0.4718, 0.9539, ..., 0.2261, 0.6017, 0.7355]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 5.431562900543213 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1933 -ss 5000 -sd 0.01 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.477078676223755} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 40, 89, ..., 249917, 249964, + 250000]), + col_indices=tensor([ 165, 177, 195, ..., 4656, 4719, 4927]), + values=tensor([0.2100, 0.9405, 0.2582, ..., 0.7931, 0.5258, 0.8197]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.9173, 0.2185, 0.4076, ..., 0.3362, 0.1795, 0.2923]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.477078676223755 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 40, 89, ..., 249917, 249964, + 250000]), + col_indices=tensor([ 165, 177, 195, ..., 4656, 4719, 4927]), + values=tensor([0.2100, 0.9405, 0.2582, ..., 0.7931, 0.5258, 0.8197]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.9173, 0.2185, 0.4076, ..., 0.3362, 0.1795, 0.2923]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.477078676223755 seconds + +[17.04, 17.04, 16.96, 17.04, 16.64, 16.4, 16.44, 16.32, 16.24, 16.36] +[16.44, 16.52, 17.76, 19.52, 21.52, 21.52, 22.36, 22.76, 21.68, 21.36, 19.84, 20.04, 20.0, 20.08] +14.25681209564209 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1933, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.477078676223755, 'TIME_S_1KI': 5.420113127896407, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 268.2967868804932, 'W': 18.818848497168876} +[17.04, 17.04, 16.96, 17.04, 16.64, 16.4, 16.44, 16.32, 16.24, 16.36, 16.4, 16.24, 16.32, 16.36, 16.36, 16.48, 16.68, 16.56, 16.4, 16.08] +297.41999999999996 +14.870999999999999 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1933, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.477078676223755, 'TIME_S_1KI': 5.420113127896407, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 268.2967868804932, 'W': 18.818848497168876, 'J_1KI': 138.79813082281075, 'W_1KI': 9.7355656995183, 'W_D': 3.947848497168877, 'J_D': 56.283734206199696, 'W_D_1KI': 2.0423427300408057, 'J_D_1KI': 1.0565663373206444} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.05.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.05.json new file mode 100644 index 0000000..04d5dbf --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 26.625333547592163, "TIME_S_1KI": 26.625333547592163, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 615.7839365768435, "W": 20.26185984115326, "J_1KI": 615.7839365768435, "W_1KI": 20.26185984115326, "W_D": 5.26185984115326, "J_D": 159.91467674255395, "W_D_1KI": 5.26185984115326, "J_D_1KI": 5.26185984115326} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.05.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.05.output new file mode 100644 index 0000000..b962b23 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.05.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 5000 -sd 0.05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 26.625333547592163} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 251, 521, ..., 1249514, + 1249753, 1250000]), + col_indices=tensor([ 14, 21, 29, ..., 4968, 4983, 4999]), + values=tensor([0.5630, 0.8243, 0.2167, ..., 0.8539, 0.0380, 0.9608]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.8150, 0.5277, 0.3367, ..., 0.0434, 0.1834, 0.0206]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 26.625333547592163 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 251, 521, ..., 1249514, + 1249753, 1250000]), + col_indices=tensor([ 14, 21, 29, ..., 4968, 4983, 4999]), + values=tensor([0.5630, 0.8243, 0.2167, ..., 0.8539, 0.0380, 0.9608]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.8150, 0.5277, 0.3367, ..., 0.0434, 0.1834, 0.0206]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 26.625333547592163 seconds + +[16.8, 16.84, 16.76, 16.6, 16.6, 16.68, 17.2, 17.2, 17.04, 17.0] +[16.84, 16.4, 16.52, 21.4, 23.24, 25.84, 26.88, 26.88, 24.16, 22.8, 20.48, 20.64, 20.68, 20.72, 20.68, 20.44, 20.28, 20.32, 20.28, 20.28, 20.12, 20.12, 20.08, 20.12, 19.96, 20.12, 20.04, 19.92, 19.96, 20.2] +30.391283988952637 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 26.625333547592163, 'TIME_S_1KI': 26.625333547592163, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 615.7839365768435, 'W': 20.26185984115326} +[16.8, 16.84, 16.76, 16.6, 16.6, 16.68, 17.2, 17.2, 17.04, 17.0, 16.4, 16.32, 16.36, 16.36, 16.56, 16.52, 16.56, 16.68, 16.44, 16.36] +300.0 +15.0 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 26.625333547592163, 'TIME_S_1KI': 26.625333547592163, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 615.7839365768435, 'W': 20.26185984115326, 'J_1KI': 615.7839365768435, 'W_1KI': 20.26185984115326, 'W_D': 5.26185984115326, 'J_D': 159.91467674255395, 'W_D_1KI': 5.26185984115326, 'J_D_1KI': 5.26185984115326} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.1.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.1.json new file mode 100644 index 0000000..3a570df --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 53.914990186691284, "TIME_S_1KI": 53.914990186691284, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1144.4719800853732, "W": 20.188888585576834, "J_1KI": 1144.4719800853732, "W_1KI": 20.188888585576834, "W_D": 5.141888585576837, "J_D": 291.48446611952824, "W_D_1KI": 5.141888585576837, "J_D_1KI": 5.141888585576837} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.1.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.1.output new file mode 100644 index 0000000..aab9423 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.1.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 5000 -sd 0.1 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 53.914990186691284} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 478, 982, ..., 2498978, + 2499487, 2500000]), + col_indices=tensor([ 3, 18, 24, ..., 4984, 4986, 4997]), + values=tensor([0.0150, 0.0039, 0.1247, ..., 0.8538, 0.3013, 0.1357]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.4451, 0.4302, 0.3190, ..., 0.9031, 0.3775, 0.0047]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 53.914990186691284 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 478, 982, ..., 2498978, + 2499487, 2500000]), + col_indices=tensor([ 3, 18, 24, ..., 4984, 4986, 4997]), + values=tensor([0.0150, 0.0039, 0.1247, ..., 0.8538, 0.3013, 0.1357]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.4451, 0.4302, 0.3190, ..., 0.9031, 0.3775, 0.0047]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 53.914990186691284 seconds + +[16.52, 16.4, 16.4, 16.56, 16.36, 16.52, 16.8, 16.56, 16.56, 16.76] +[16.48, 16.28, 19.4, 20.36, 22.04, 22.04, 24.88, 25.48, 22.92, 22.96, 20.08, 20.12, 20.12, 20.04, 19.92, 20.0, 20.24, 20.32, 20.32, 20.4, 20.4, 20.04, 20.08, 20.4, 20.48, 20.68, 20.96, 20.6, 20.48, 20.44, 20.44, 20.12, 20.08, 20.2, 19.96, 20.0, 20.32, 20.44, 20.44, 20.48, 20.64, 20.44, 20.44, 20.56, 20.76, 20.64, 20.8, 20.6, 20.64, 20.44, 20.4, 20.08, 20.08, 20.2, 20.36, 20.36] +56.68821120262146 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 53.914990186691284, 'TIME_S_1KI': 53.914990186691284, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1144.4719800853732, 'W': 20.188888585576834} +[16.52, 16.4, 16.4, 16.56, 16.36, 16.52, 16.8, 16.56, 16.56, 16.76, 16.72, 16.68, 16.8, 16.84, 16.84, 16.84, 17.0, 17.2, 17.0, 17.16] +300.93999999999994 +15.046999999999997 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 53.914990186691284, 'TIME_S_1KI': 53.914990186691284, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1144.4719800853732, 'W': 20.188888585576834, 'J_1KI': 1144.4719800853732, 'W_1KI': 20.188888585576834, 'W_D': 5.141888585576837, 'J_D': 291.48446611952824, 'W_D_1KI': 5.141888585576837, 'J_D_1KI': 5.141888585576837} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_1e-05.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_1e-05.json new file mode 100644 index 0000000..aba0d5e --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 293134, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.89356017112732, "TIME_S_1KI": 0.03716239048055606, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 278.4485914421082, "W": 19.57952781791354, "J_1KI": 0.9499020633638819, "W_1KI": 0.06679377969772711, "W_D": 4.613527817913537, "J_D": 65.610893910408, "W_D_1KI": 0.01573863085794735, "J_D_1KI": 5.3690908792386244e-05} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_1e-05.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_1e-05.output new file mode 100644 index 0000000..ebedd90 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_1e-05.output @@ -0,0 +1,437 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 5000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.04628562927246094} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([ 683, 1119, 1321, 2450, 3482, 3631, 1761, 3022, 756, + 3517, 37, 3468, 4655, 1287, 913, 1692, 3561, 1823, + 1971, 4332, 175, 242, 3518, 2634, 2163, 1929, 1347, + 4194, 4673, 3242, 1554, 3336, 2363, 4819, 1624, 2276, + 4446, 1440, 2278, 2820, 2808, 2194, 1293, 3294, 2532, + 2630, 3533, 2517, 2068, 4492, 3196, 31, 2012, 3028, + 3263, 1298, 1827, 4518, 2739, 383, 2502, 2163, 2983, + 275, 3460, 724, 4585, 3927, 4513, 2645, 242, 1435, + 3115, 1351, 1335, 3004, 3671, 2087, 2361, 3470, 3033, + 1776, 2762, 2985, 544, 2787, 1009, 4955, 757, 4621, + 3559, 4933, 3451, 2535, 2363, 1115, 250, 284, 3453, + 4194, 4788, 4427, 434, 2792, 219, 1976, 286, 1619, + 3123, 2185, 100, 1443, 2614, 3193, 4750, 1625, 61, + 2975, 2813, 3271, 969, 1209, 2770, 2904, 1769, 343, + 239, 3167, 403, 2400, 1507, 4176, 1210, 627, 332, + 3526, 2019, 4707, 4667, 3689, 1411, 474, 2037, 1559, + 3233, 2371, 3442, 4237, 1757, 4685, 2495, 737, 562, + 4385, 4537, 1150, 2708, 4099, 4510, 4059, 58, 3153, + 2292, 1450, 3200, 4511, 1556, 237, 2082, 3442, 4661, + 3624, 407, 1680, 104, 2285, 3192, 1818, 2013, 2874, + 4274, 1703, 393, 4638, 3642, 1595, 4200, 2976, 747, + 1685, 436, 4175, 3319, 2858, 4687, 1967, 1550, 4498, + 5, 3295, 2892, 3076, 2947, 1470, 2928, 4594, 372, + 1505, 3795, 2014, 3988, 420, 2057, 4772, 3022, 3131, + 376, 1473, 4703, 771, 759, 172, 3505, 2361, 168, + 3559, 881, 3500, 894, 4238, 842, 291, 2606, 4128, + 2513, 4919, 1689, 1039, 4346, 4963, 184, 2438, 3794, + 631, 3050, 4745, 3174, 1910, 3181, 4415]), + values=tensor([0.5133, 0.9500, 0.6089, 0.1299, 0.0389, 0.7021, 0.0545, + 0.1504, 0.2775, 0.3654, 0.5414, 0.7066, 0.5062, 0.9276, + 0.5403, 0.1473, 0.0619, 0.8013, 0.5229, 0.9618, 0.3595, + 0.9768, 0.4894, 0.9436, 0.2586, 0.3228, 0.7550, 0.4654, + 0.5557, 0.6099, 0.1466, 0.3234, 0.9559, 0.4861, 0.6590, + 0.2645, 0.3128, 0.2881, 0.8916, 0.9625, 0.3287, 0.6208, + 0.1989, 0.4749, 0.6654, 0.5023, 0.5464, 0.6484, 0.8692, + 0.5946, 0.3095, 0.4520, 0.2934, 0.1142, 0.3825, 0.0692, + 0.4451, 0.9095, 0.2024, 0.8392, 0.4692, 0.1054, 0.2753, + 0.1688, 0.2684, 0.5848, 0.9464, 0.6200, 0.5357, 0.5307, + 0.7002, 0.6351, 0.9452, 0.4196, 0.3107, 0.9700, 0.4879, + 0.0926, 0.0442, 0.1064, 0.9432, 0.8436, 0.3680, 0.1497, + 0.1266, 0.6045, 0.6916, 0.0824, 0.1706, 0.8211, 0.8262, + 0.7835, 0.0310, 0.3323, 0.1890, 0.5250, 0.8324, 0.5975, + 0.0174, 0.0556, 0.9553, 0.6279, 0.3153, 0.4085, 0.9318, + 0.3588, 0.1032, 0.7200, 0.2145, 0.8631, 0.4178, 0.0372, + 0.7636, 0.4317, 0.2105, 0.2684, 0.0231, 0.6996, 0.0880, + 0.2381, 0.6281, 0.3203, 0.4143, 0.7477, 0.1347, 0.5900, + 0.7586, 0.5291, 0.6348, 0.4495, 0.3601, 0.9398, 0.3999, + 0.2033, 0.1346, 0.0706, 0.9911, 0.9515, 0.0420, 0.6637, + 0.2691, 0.3435, 0.7224, 0.4624, 0.4390, 0.3084, 0.3677, + 0.2556, 0.8927, 0.7015, 0.4402, 0.6275, 0.9141, 0.3633, + 0.0870, 0.2460, 0.1945, 0.8036, 0.3884, 0.5353, 0.6776, + 0.4646, 0.1680, 0.4783, 0.9893, 0.5596, 0.0460, 0.9167, + 0.8564, 0.2217, 0.2454, 0.6476, 0.0091, 0.6634, 0.6906, + 0.5109, 0.0619, 0.8391, 0.3721, 0.4015, 0.1086, 0.8568, + 0.0263, 0.0960, 0.2106, 0.8204, 0.3496, 0.0650, 0.0530, + 0.2300, 0.7920, 0.0833, 0.8839, 0.6947, 0.7490, 0.6930, + 0.4034, 0.9770, 0.5568, 0.5813, 0.4457, 0.4409, 0.3165, + 0.4290, 0.8018, 0.4890, 0.7248, 0.5066, 0.4197, 0.9251, + 0.4526, 0.8257, 0.6029, 0.9210, 0.8099, 0.1966, 0.6605, + 0.5583, 0.0851, 0.2553, 0.8703, 0.6237, 0.8267, 0.9769, + 0.6623, 0.5390, 0.0172, 0.1684, 0.4788, 0.5289, 0.0477, + 0.8018, 0.0914, 0.3275, 0.7127, 0.1031, 0.8096, 0.1163, + 0.3143, 0.8185, 0.2797, 0.8908, 0.1307, 0.5822, 0.2044, + 0.6227, 0.4853, 0.6034, 0.6732, 0.0321]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.7002, 0.3467, 0.9676, ..., 0.8135, 0.6463, 0.9360]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 0.04628562927246094 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 226852 -ss 5000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 8.58342981338501} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 250, 250, 250]), + col_indices=tensor([2568, 3647, 442, 965, 263, 2383, 651, 4423, 3036, + 1922, 4223, 1539, 1097, 1063, 4112, 2393, 2048, 391, + 3893, 324, 2099, 2458, 2688, 682, 96, 2079, 4917, + 1561, 2320, 1455, 2135, 3126, 2991, 4240, 1021, 4993, + 3258, 3975, 1172, 1489, 4782, 364, 2199, 94, 1257, + 4686, 607, 1510, 89, 4888, 4165, 3842, 3018, 4662, + 3670, 3231, 4131, 746, 49, 680, 3901, 1594, 359, + 3311, 2321, 3005, 4317, 2855, 829, 3097, 2418, 1365, + 3858, 1930, 3446, 1588, 4464, 2454, 3676, 2837, 1569, + 2885, 1556, 3076, 4363, 2721, 3030, 172, 2121, 2698, + 3156, 442, 947, 2541, 828, 1038, 4897, 3795, 2214, + 609, 3658, 77, 3238, 2356, 89, 2253, 2806, 2065, + 2259, 579, 2660, 1688, 2237, 2605, 1390, 4025, 2509, + 2831, 635, 2338, 2347, 3405, 393, 82, 2030, 4203, + 4365, 3211, 1439, 3151, 1397, 476, 3123, 1758, 2491, + 252, 1078, 102, 4624, 527, 163, 2201, 1415, 53, + 3597, 2281, 1819, 1693, 3944, 4697, 560, 1457, 1677, + 2072, 2996, 1150, 4324, 2498, 4491, 2244, 3104, 2934, + 632, 2182, 4187, 1162, 422, 1444, 3294, 1160, 1691, + 2846, 266, 3519, 3656, 2923, 1457, 1651, 1147, 1014, + 671, 3331, 4535, 2766, 1343, 680, 3907, 3255, 700, + 2823, 4644, 4966, 3493, 4426, 2084, 2312, 2388, 1167, + 2294, 2501, 1866, 3421, 4059, 858, 4657, 794, 658, + 2225, 1411, 1995, 2476, 795, 2719, 1100, 685, 3038, + 1607, 4350, 2782, 410, 2489, 2516, 1183, 2789, 4067, + 4708, 2699, 3392, 4757, 4834, 2136, 1271, 2790, 3056, + 2835, 3630, 2085, 603, 3829, 4234, 710, 378, 2071, + 1558, 4206, 4361, 1063, 3780, 352, 168]), + values=tensor([6.8562e-01, 9.9314e-01, 3.4074e-01, 1.7233e-01, + 1.4522e-02, 6.3720e-01, 5.5464e-02, 7.3826e-01, + 1.5940e-01, 1.2632e-01, 2.2414e-01, 7.6966e-01, + 6.9475e-01, 9.2958e-01, 3.8229e-01, 7.5368e-01, + 7.6972e-01, 6.6374e-01, 5.6166e-01, 6.7113e-01, + 2.6640e-01, 3.1404e-01, 8.1747e-01, 7.0390e-01, + 3.3211e-02, 4.2381e-01, 1.8457e-01, 3.9280e-01, + 7.9738e-01, 4.8542e-01, 5.6000e-01, 2.0755e-01, + 7.0598e-01, 8.6707e-01, 1.7337e-01, 7.0748e-01, + 9.7389e-01, 7.9562e-01, 6.7701e-01, 4.6490e-01, + 5.4665e-01, 4.9560e-02, 5.8946e-01, 3.8658e-01, + 3.0672e-01, 2.5947e-01, 8.6455e-01, 8.5056e-02, + 3.3869e-01, 3.9093e-01, 5.9721e-01, 6.2207e-01, + 8.8265e-01, 8.1640e-01, 1.7680e-01, 2.4072e-01, + 3.6980e-01, 2.2490e-01, 6.0225e-01, 7.0554e-01, + 8.5790e-01, 7.4936e-01, 1.7010e-01, 2.0063e-01, + 1.1246e-01, 6.8727e-01, 6.8037e-01, 8.9757e-01, + 3.8505e-01, 6.5721e-01, 9.3013e-01, 4.9507e-01, + 7.9582e-01, 3.6413e-01, 6.2028e-01, 2.8858e-01, + 2.8115e-01, 4.5974e-01, 9.8822e-01, 1.1635e-01, + 5.8307e-01, 5.1420e-02, 1.1202e-01, 5.4531e-01, + 7.6023e-01, 9.0514e-01, 5.3398e-01, 1.7667e-01, + 9.2343e-01, 9.0805e-01, 9.6041e-01, 5.0364e-01, + 2.4720e-01, 1.5194e-01, 2.2205e-01, 3.0452e-01, + 6.8304e-02, 7.0941e-02, 2.3679e-01, 2.9428e-01, + 2.6988e-01, 2.9905e-01, 9.7067e-01, 3.9498e-01, + 4.5558e-01, 6.9955e-01, 5.3969e-02, 3.5860e-01, + 7.2397e-01, 7.1675e-01, 8.0095e-01, 4.8315e-01, + 4.1035e-01, 3.9824e-01, 5.0060e-01, 5.6947e-01, + 2.5338e-01, 1.2799e-01, 9.1108e-01, 7.6016e-02, + 8.5394e-01, 4.5257e-01, 4.8350e-01, 1.3291e-01, + 2.2106e-01, 8.0845e-01, 6.7657e-01, 4.4898e-01, + 6.6830e-01, 4.0859e-01, 8.4227e-01, 7.7311e-01, + 5.4753e-01, 3.9804e-01, 9.4899e-01, 8.2056e-01, + 7.7146e-01, 6.3508e-01, 6.2972e-01, 7.4169e-01, + 7.8963e-01, 1.0699e-01, 5.7796e-01, 7.2429e-01, + 6.3979e-02, 4.5238e-02, 6.3144e-01, 9.8512e-01, + 5.1816e-01, 3.2546e-01, 8.7580e-01, 9.7697e-01, + 4.6167e-01, 2.4042e-01, 1.1377e-01, 9.7747e-01, + 7.4258e-01, 6.3887e-01, 7.3930e-01, 2.3402e-01, + 4.1461e-01, 4.8691e-01, 2.7849e-01, 5.9673e-01, + 8.6946e-02, 9.5615e-01, 9.7242e-01, 8.9092e-01, + 4.1164e-01, 3.3893e-01, 9.4485e-01, 3.2960e-01, + 7.1004e-01, 4.1240e-01, 1.1151e-01, 7.6114e-01, + 5.5779e-01, 9.3723e-01, 2.2015e-01, 9.0422e-01, + 2.5683e-01, 4.6041e-01, 3.3427e-01, 4.3355e-02, + 3.1777e-01, 6.8533e-01, 4.9880e-01, 7.0528e-01, + 6.2605e-01, 9.9580e-01, 3.8253e-02, 1.0464e-02, + 6.2010e-02, 4.9009e-02, 8.8508e-01, 8.3043e-01, + 9.5592e-01, 8.5708e-01, 5.1611e-01, 1.7460e-01, + 4.5394e-01, 4.2516e-01, 8.0836e-01, 5.2242e-01, + 8.0860e-01, 5.1184e-01, 7.3172e-01, 9.2625e-01, + 3.8652e-01, 8.6518e-01, 6.9408e-01, 2.5732e-01, + 6.0297e-01, 2.2091e-01, 1.2658e-02, 7.6721e-01, + 9.6888e-02, 6.6146e-01, 4.4139e-01, 1.9043e-01, + 1.1703e-04, 3.3229e-01, 3.7446e-01, 3.2871e-01, + 5.5144e-01, 4.6404e-01, 7.9360e-01, 3.2754e-01, + 9.8665e-01, 3.2413e-01, 4.6510e-01, 6.8652e-01, + 9.6619e-01, 4.0817e-01, 5.0618e-01, 8.0048e-01, + 3.4373e-01, 9.9556e-01, 1.4700e-01, 1.2820e-01, + 8.0477e-01, 2.3035e-01, 5.4135e-01, 3.8689e-01, + 1.8548e-01, 9.7019e-01, 2.2577e-01, 3.2056e-01, + 4.1451e-02, 1.3423e-01]), size=(5000, 5000), nnz=250, + layout=torch.sparse_csr) +tensor([0.0595, 0.8939, 0.2592, ..., 0.5348, 0.8468, 0.6804]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 8.58342981338501 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 277505 -ss 5000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.940149784088135} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([3014, 4957, 1583, 2867, 2783, 475, 2139, 2382, 3400, + 1371, 4277, 2356, 2363, 809, 3070, 166, 954, 1491, + 2451, 1189, 312, 609, 4247, 23, 459, 898, 2311, + 4831, 338, 2271, 1779, 2454, 4584, 3113, 487, 1534, + 2828, 4851, 633, 1451, 3532, 1285, 3207, 3942, 1871, + 1291, 465, 1879, 4867, 2362, 2141, 1675, 4085, 954, + 3823, 3407, 284, 572, 14, 2939, 2313, 3750, 1562, + 2613, 2778, 2860, 3224, 2726, 239, 3475, 2082, 2253, + 3516, 1146, 3276, 4995, 2558, 345, 3127, 2150, 75, + 826, 1135, 4736, 4690, 2556, 910, 1899, 2387, 947, + 695, 304, 2013, 2, 897, 3875, 3772, 1882, 451, + 3308, 1440, 3959, 2068, 783, 1822, 1945, 4659, 2440, + 4920, 2894, 4923, 1763, 2739, 4990, 4910, 2298, 1281, + 1642, 4403, 354, 879, 3935, 4111, 1373, 3061, 948, + 4840, 4778, 2992, 2315, 2233, 3168, 3973, 2138, 2299, + 4743, 1438, 4906, 254, 4427, 953, 1389, 2612, 1867, + 4913, 4975, 2438, 2961, 96, 3956, 1648, 4671, 3511, + 2332, 4616, 3807, 1099, 2689, 951, 1859, 3672, 4327, + 4946, 755, 3445, 807, 2050, 4470, 819, 3494, 4764, + 1487, 2681, 1451, 1828, 600, 2998, 2378, 1446, 2079, + 873, 270, 4942, 3757, 4929, 2560, 3562, 3539, 1466, + 871, 1762, 750, 1346, 533, 2678, 341, 1486, 2504, + 3221, 679, 2068, 2145, 3144, 834, 1808, 3153, 3407, + 2103, 1634, 1022, 1783, 3740, 3527, 3470, 3178, 4350, + 3648, 2120, 4578, 1596, 135, 2530, 1745, 608, 4825, + 4913, 4142, 3012, 1856, 2018, 3602, 264, 275, 4814, + 1938, 4047, 1223, 3103, 4868, 3533, 4726, 3018, 1931, + 379, 2338, 475, 3665, 4431, 938, 707]), + values=tensor([0.8419, 0.4424, 0.5698, 0.2999, 0.9295, 0.4679, 0.3442, + 0.3474, 0.7467, 0.0757, 0.0276, 0.8208, 0.7200, 0.1976, + 0.3319, 0.9583, 0.8463, 0.9566, 0.3073, 0.6760, 0.4346, + 0.2886, 0.9486, 0.0795, 0.8036, 0.5111, 0.4404, 0.5873, + 0.2286, 0.4238, 0.6160, 0.9372, 0.8314, 0.1765, 0.9714, + 0.5934, 0.0764, 0.5254, 0.7722, 0.8765, 0.7821, 0.7165, + 0.7425, 0.1690, 0.9418, 0.7089, 0.3090, 0.3146, 0.3776, + 0.3970, 0.7107, 0.4232, 0.2742, 0.1785, 0.3661, 0.7381, + 0.7677, 0.2922, 0.0118, 0.5142, 0.3228, 0.6287, 0.6950, + 0.5212, 0.9233, 0.5583, 0.3402, 0.9655, 0.1707, 0.5180, + 0.7601, 0.0519, 0.3853, 0.1663, 0.4842, 0.9445, 0.1159, + 0.1236, 0.2320, 0.4008, 0.3127, 0.0194, 0.2149, 0.2742, + 0.3828, 0.5264, 0.2515, 0.8214, 0.1769, 0.1933, 0.8188, + 0.5274, 0.2875, 0.2494, 0.8088, 0.9923, 0.5445, 0.3175, + 0.6285, 0.6236, 0.2042, 0.2625, 0.5051, 0.4802, 0.6055, + 0.2595, 0.3970, 0.4291, 0.2183, 0.7748, 0.7343, 0.0474, + 0.5801, 0.6534, 0.2948, 0.0363, 0.3237, 0.2880, 0.2211, + 0.1790, 0.3192, 0.9079, 0.1088, 0.8037, 0.5242, 0.3090, + 0.6078, 0.5167, 0.1361, 0.1093, 0.6079, 0.0095, 0.5118, + 0.3018, 0.1316, 0.6571, 0.0073, 0.3654, 0.4280, 0.8191, + 0.3184, 0.2360, 0.6869, 0.0155, 0.5085, 0.4025, 0.0799, + 0.7194, 0.4048, 0.5539, 0.2632, 0.0734, 0.9784, 0.3601, + 0.9418, 0.0499, 0.8840, 0.6116, 0.9865, 0.6081, 0.4861, + 0.7266, 0.7795, 0.1224, 0.6387, 0.9470, 0.4315, 0.0825, + 0.8006, 0.5528, 0.3202, 0.1662, 0.3257, 0.8268, 0.0860, + 0.4786, 0.2279, 0.0058, 0.4003, 0.0577, 0.1538, 0.9729, + 0.3529, 0.3205, 0.9176, 0.5843, 0.6548, 0.1570, 0.8380, + 0.7278, 0.2116, 0.1503, 0.0103, 0.8089, 0.9813, 0.7760, + 0.2123, 0.9690, 0.9240, 0.5892, 0.4778, 0.5100, 0.0404, + 0.6261, 0.6426, 0.9521, 0.0053, 0.5755, 0.0743, 0.7500, + 0.3281, 0.4225, 0.6900, 0.0916, 0.8990, 0.2711, 0.5755, + 0.5712, 0.7556, 0.0051, 0.6971, 0.0437, 0.5565, 0.4256, + 0.2960, 0.6043, 0.6836, 0.9303, 0.4472, 0.6016, 0.6132, + 0.9503, 0.8339, 0.2697, 0.0658, 0.7983, 0.4874, 0.1771, + 0.9875, 0.2001, 0.2752, 0.5608, 0.4997, 0.6797, 0.1612, + 0.9007, 0.9904, 0.7264, 0.3981, 0.6661]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.5019, 0.1367, 0.6742, ..., 0.0249, 0.2703, 0.5698]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 9.940149784088135 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 293134 -ss 5000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.89356017112732} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([3878, 1793, 196, 4523, 2590, 2367, 223, 4753, 811, + 1344, 3831, 3891, 1471, 342, 75, 2706, 3424, 2402, + 4777, 1498, 772, 4383, 2094, 2326, 2946, 1126, 2886, + 1923, 932, 4969, 3541, 896, 863, 2668, 4869, 4410, + 1937, 3764, 94, 3879, 1282, 4972, 2002, 1481, 4954, + 2173, 3425, 2770, 4498, 897, 827, 829, 4189, 4991, + 1934, 1179, 1128, 2114, 2395, 2017, 3561, 4691, 2694, + 1416, 4554, 620, 4382, 4973, 3582, 230, 4891, 3107, + 1979, 140, 933, 4466, 308, 3556, 3173, 4882, 4797, + 1238, 3925, 3354, 291, 1290, 414, 2445, 3042, 2325, + 2660, 1731, 777, 3782, 2858, 3541, 2112, 2412, 4004, + 942, 1750, 1450, 45, 960, 1184, 3190, 4107, 4960, + 4877, 4158, 1493, 3953, 2210, 1987, 3321, 3595, 1029, + 1281, 3168, 3718, 1747, 2899, 1694, 1765, 2658, 2297, + 2811, 4794, 2166, 3385, 3144, 590, 756, 1857, 2472, + 2864, 4092, 2483, 1698, 3039, 4797, 2893, 4815, 1160, + 92, 3486, 728, 546, 1233, 1206, 3098, 284, 3217, + 1908, 3538, 556, 1018, 3457, 4789, 3509, 986, 1353, + 3714, 674, 3739, 3917, 378, 4295, 1240, 678, 634, + 156, 182, 2959, 787, 2431, 894, 4155, 1278, 4710, + 115, 3800, 3528, 4651, 4055, 3457, 4797, 3790, 2898, + 2898, 1677, 2106, 3532, 1869, 3926, 1788, 4954, 1802, + 3662, 3116, 1672, 1899, 2743, 4402, 201, 2790, 4915, + 3309, 2448, 4340, 71, 1083, 3547, 1833, 4517, 4811, + 4522, 4837, 4905, 1773, 2748, 2712, 2488, 4842, 2297, + 377, 2695, 2905, 534, 1022, 2504, 1436, 3486, 3980, + 4913, 361, 4684, 2741, 722, 1718, 3274, 513, 1785, + 4555, 575, 662, 3842, 1584, 2198, 215]), + values=tensor([0.8076, 0.3370, 0.6780, 0.9299, 0.5410, 0.0897, 0.3343, + 0.8017, 0.5673, 0.5602, 0.9800, 0.8553, 0.3447, 0.6119, + 0.3490, 0.5758, 0.1388, 0.7568, 0.7228, 0.2456, 0.6799, + 0.7488, 0.1368, 0.7230, 0.3714, 0.8061, 0.2178, 0.6691, + 0.7090, 0.6240, 0.5615, 0.6385, 0.8034, 0.6963, 0.9896, + 0.1078, 0.2316, 0.3754, 0.7350, 0.4907, 0.3665, 0.2209, + 0.4611, 0.7569, 0.4815, 0.7270, 0.4688, 0.5127, 0.0439, + 0.4951, 0.3454, 0.1899, 0.8750, 0.1915, 0.8080, 0.6042, + 0.7305, 0.2510, 0.4960, 0.3143, 0.3207, 0.3323, 0.5478, + 0.3218, 0.1649, 0.9155, 0.2697, 0.4415, 0.6177, 0.1457, + 0.9256, 0.6524, 0.8106, 0.1943, 0.2636, 0.7375, 0.5837, + 0.4529, 0.4107, 0.4337, 0.4074, 0.1673, 0.9988, 0.7338, + 0.1243, 0.9778, 0.4221, 0.2348, 0.5442, 0.1259, 0.4222, + 0.6127, 0.0857, 0.6974, 0.0596, 0.3553, 0.4614, 0.5799, + 0.4404, 0.3360, 0.3314, 0.9445, 0.7231, 0.9851, 0.8853, + 0.4987, 0.3871, 0.5069, 0.6349, 0.9384, 0.3450, 0.4613, + 0.2127, 0.4994, 0.0034, 0.9538, 0.3203, 0.8248, 0.5140, + 0.0568, 0.3913, 0.0456, 0.0790, 0.1457, 0.8710, 0.7025, + 0.5191, 0.7160, 0.2410, 0.7547, 0.7169, 0.9282, 0.0473, + 0.4454, 0.5093, 0.4795, 0.3417, 0.5014, 0.0605, 0.9341, + 0.8068, 0.8325, 0.0916, 0.8219, 0.9882, 0.3617, 0.8114, + 0.3412, 0.2133, 0.4138, 0.2870, 0.1987, 0.5576, 0.8136, + 0.4067, 0.6195, 0.5018, 0.5513, 0.5252, 0.0402, 0.7889, + 0.3122, 0.6215, 0.9385, 0.7669, 0.1080, 0.2818, 0.0494, + 0.0251, 0.3317, 0.4666, 0.5981, 0.5539, 0.1688, 0.7416, + 0.8841, 0.4123, 0.1102, 0.1371, 0.7232, 0.6598, 0.7427, + 0.8150, 0.1180, 0.3866, 0.1447, 0.4442, 0.5099, 0.1417, + 0.2917, 0.8599, 0.3553, 0.2307, 0.1388, 0.1482, 0.8529, + 0.3988, 0.9926, 0.3184, 0.2404, 0.4847, 0.5288, 0.0738, + 0.0517, 0.1797, 0.1796, 0.7215, 0.5955, 0.6432, 0.0017, + 0.6486, 0.6664, 0.4487, 0.7630, 0.7774, 0.9276, 0.9518, + 0.4507, 0.3399, 0.7495, 0.4581, 0.6140, 0.0659, 0.8137, + 0.4343, 0.4836, 0.5681, 0.7947, 0.1417, 0.8229, 0.0824, + 0.2070, 0.8783, 0.3511, 0.9580, 0.1053, 0.3375, 0.2396, + 0.0513, 0.5334, 0.3977, 0.6765, 0.1035, 0.8974, 0.8093, + 0.7238, 0.8002, 0.6243, 0.9654, 0.2803]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.4617, 0.6014, 0.4133, ..., 0.3579, 0.3877, 0.5185]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.89356017112732 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([3878, 1793, 196, 4523, 2590, 2367, 223, 4753, 811, + 1344, 3831, 3891, 1471, 342, 75, 2706, 3424, 2402, + 4777, 1498, 772, 4383, 2094, 2326, 2946, 1126, 2886, + 1923, 932, 4969, 3541, 896, 863, 2668, 4869, 4410, + 1937, 3764, 94, 3879, 1282, 4972, 2002, 1481, 4954, + 2173, 3425, 2770, 4498, 897, 827, 829, 4189, 4991, + 1934, 1179, 1128, 2114, 2395, 2017, 3561, 4691, 2694, + 1416, 4554, 620, 4382, 4973, 3582, 230, 4891, 3107, + 1979, 140, 933, 4466, 308, 3556, 3173, 4882, 4797, + 1238, 3925, 3354, 291, 1290, 414, 2445, 3042, 2325, + 2660, 1731, 777, 3782, 2858, 3541, 2112, 2412, 4004, + 942, 1750, 1450, 45, 960, 1184, 3190, 4107, 4960, + 4877, 4158, 1493, 3953, 2210, 1987, 3321, 3595, 1029, + 1281, 3168, 3718, 1747, 2899, 1694, 1765, 2658, 2297, + 2811, 4794, 2166, 3385, 3144, 590, 756, 1857, 2472, + 2864, 4092, 2483, 1698, 3039, 4797, 2893, 4815, 1160, + 92, 3486, 728, 546, 1233, 1206, 3098, 284, 3217, + 1908, 3538, 556, 1018, 3457, 4789, 3509, 986, 1353, + 3714, 674, 3739, 3917, 378, 4295, 1240, 678, 634, + 156, 182, 2959, 787, 2431, 894, 4155, 1278, 4710, + 115, 3800, 3528, 4651, 4055, 3457, 4797, 3790, 2898, + 2898, 1677, 2106, 3532, 1869, 3926, 1788, 4954, 1802, + 3662, 3116, 1672, 1899, 2743, 4402, 201, 2790, 4915, + 3309, 2448, 4340, 71, 1083, 3547, 1833, 4517, 4811, + 4522, 4837, 4905, 1773, 2748, 2712, 2488, 4842, 2297, + 377, 2695, 2905, 534, 1022, 2504, 1436, 3486, 3980, + 4913, 361, 4684, 2741, 722, 1718, 3274, 513, 1785, + 4555, 575, 662, 3842, 1584, 2198, 215]), + values=tensor([0.8076, 0.3370, 0.6780, 0.9299, 0.5410, 0.0897, 0.3343, + 0.8017, 0.5673, 0.5602, 0.9800, 0.8553, 0.3447, 0.6119, + 0.3490, 0.5758, 0.1388, 0.7568, 0.7228, 0.2456, 0.6799, + 0.7488, 0.1368, 0.7230, 0.3714, 0.8061, 0.2178, 0.6691, + 0.7090, 0.6240, 0.5615, 0.6385, 0.8034, 0.6963, 0.9896, + 0.1078, 0.2316, 0.3754, 0.7350, 0.4907, 0.3665, 0.2209, + 0.4611, 0.7569, 0.4815, 0.7270, 0.4688, 0.5127, 0.0439, + 0.4951, 0.3454, 0.1899, 0.8750, 0.1915, 0.8080, 0.6042, + 0.7305, 0.2510, 0.4960, 0.3143, 0.3207, 0.3323, 0.5478, + 0.3218, 0.1649, 0.9155, 0.2697, 0.4415, 0.6177, 0.1457, + 0.9256, 0.6524, 0.8106, 0.1943, 0.2636, 0.7375, 0.5837, + 0.4529, 0.4107, 0.4337, 0.4074, 0.1673, 0.9988, 0.7338, + 0.1243, 0.9778, 0.4221, 0.2348, 0.5442, 0.1259, 0.4222, + 0.6127, 0.0857, 0.6974, 0.0596, 0.3553, 0.4614, 0.5799, + 0.4404, 0.3360, 0.3314, 0.9445, 0.7231, 0.9851, 0.8853, + 0.4987, 0.3871, 0.5069, 0.6349, 0.9384, 0.3450, 0.4613, + 0.2127, 0.4994, 0.0034, 0.9538, 0.3203, 0.8248, 0.5140, + 0.0568, 0.3913, 0.0456, 0.0790, 0.1457, 0.8710, 0.7025, + 0.5191, 0.7160, 0.2410, 0.7547, 0.7169, 0.9282, 0.0473, + 0.4454, 0.5093, 0.4795, 0.3417, 0.5014, 0.0605, 0.9341, + 0.8068, 0.8325, 0.0916, 0.8219, 0.9882, 0.3617, 0.8114, + 0.3412, 0.2133, 0.4138, 0.2870, 0.1987, 0.5576, 0.8136, + 0.4067, 0.6195, 0.5018, 0.5513, 0.5252, 0.0402, 0.7889, + 0.3122, 0.6215, 0.9385, 0.7669, 0.1080, 0.2818, 0.0494, + 0.0251, 0.3317, 0.4666, 0.5981, 0.5539, 0.1688, 0.7416, + 0.8841, 0.4123, 0.1102, 0.1371, 0.7232, 0.6598, 0.7427, + 0.8150, 0.1180, 0.3866, 0.1447, 0.4442, 0.5099, 0.1417, + 0.2917, 0.8599, 0.3553, 0.2307, 0.1388, 0.1482, 0.8529, + 0.3988, 0.9926, 0.3184, 0.2404, 0.4847, 0.5288, 0.0738, + 0.0517, 0.1797, 0.1796, 0.7215, 0.5955, 0.6432, 0.0017, + 0.6486, 0.6664, 0.4487, 0.7630, 0.7774, 0.9276, 0.9518, + 0.4507, 0.3399, 0.7495, 0.4581, 0.6140, 0.0659, 0.8137, + 0.4343, 0.4836, 0.5681, 0.7947, 0.1417, 0.8229, 0.0824, + 0.2070, 0.8783, 0.3511, 0.9580, 0.1053, 0.3375, 0.2396, + 0.0513, 0.5334, 0.3977, 0.6765, 0.1035, 0.8974, 0.8093, + 0.7238, 0.8002, 0.6243, 0.9654, 0.2803]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.4617, 0.6014, 0.4133, ..., 0.3579, 0.3877, 0.5185]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.89356017112732 seconds + +[16.36, 16.36, 16.4, 16.6, 16.6, 16.96, 16.96, 16.88, 16.88, 16.48] +[16.32, 16.28, 19.04, 20.36, 23.52, 24.24, 24.96, 24.96, 22.16, 21.36, 19.68, 19.76, 19.8, 19.64] +14.221415042877197 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 293134, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.89356017112732, 'TIME_S_1KI': 0.03716239048055606, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 278.4485914421082, 'W': 19.57952781791354} +[16.36, 16.36, 16.4, 16.6, 16.6, 16.96, 16.96, 16.88, 16.88, 16.48, 16.6, 16.64, 16.68, 16.76, 16.68, 16.56, 16.4, 16.4, 16.52, 16.64] +299.32000000000005 +14.966000000000003 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 293134, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.89356017112732, 'TIME_S_1KI': 0.03716239048055606, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 278.4485914421082, 'W': 19.57952781791354, 'J_1KI': 0.9499020633638819, 'W_1KI': 0.06679377969772711, 'W_D': 4.613527817913537, 'J_D': 65.610893910408, 'W_D_1KI': 0.01573863085794735, 'J_D_1KI': 5.3690908792386244e-05} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_20_10_10_synthetic_30000_0.0001.json b/pytorch/output_synthetic_16core/altra_16_csr_20_10_10_synthetic_30000_0.0001.json deleted file mode 100644 index d8def16..0000000 --- a/pytorch/output_synthetic_16core/altra_16_csr_20_10_10_synthetic_30000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 9519, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [30000, 30000], "MATRIX_ROWS": 30000, "MATRIX_SIZE": 900000000, "MATRIX_NNZ": 90000, "MATRIX_DENSITY": 0.0001, "TIME_S": 21.79372525215149, "TIME_S_1KI": 2.2894973476364626, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 607.9340667724609, "W": 24.009013571692563, "J_1KI": 63.86532900225454, "W_1KI": 2.5222201462015508, "W_D": 5.522013571692561, "J_D": 139.82332749271384, "W_D_1KI": 0.5801043777384768, "J_D_1KI": 0.06094173523883567} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_20_10_10_synthetic_30000_0.0001.output b/pytorch/output_synthetic_16core/altra_16_csr_20_10_10_synthetic_30000_0.0001.output deleted file mode 100644 index 91dd9e3..0000000 --- a/pytorch/output_synthetic_16core/altra_16_csr_20_10_10_synthetic_30000_0.0001.output +++ /dev/null @@ -1,62 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 30000 -sd 0.0001 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [30000, 30000], "MATRIX_ROWS": 30000, "MATRIX_SIZE": 900000000, "MATRIX_NNZ": 90000, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.2060210704803467} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 6, ..., 89996, 89998, 90000]), - col_indices=tensor([ 2876, 4713, 6957, ..., 29701, 15647, 23288]), - values=tensor([0.6297, 0.3832, 0.4268, ..., 0.4020, 0.1713, 0.6526]), - size=(30000, 30000), nnz=90000, layout=torch.sparse_csr) -tensor([0.2297, 0.3740, 0.0656, ..., 0.6156, 0.3028, 0.9303]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([30000, 30000]) -Rows: 30000 -Size: 900000000 -NNZ: 90000 -Density: 0.0001 -Time: 2.2060210704803467 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 9519 -ss 30000 -sd 0.0001 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [30000, 30000], "MATRIX_ROWS": 30000, "MATRIX_SIZE": 900000000, "MATRIX_NNZ": 90000, "MATRIX_DENSITY": 0.0001, "TIME_S": 21.79372525215149} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 3, ..., 89997, 89999, 90000]), - col_indices=tensor([15244, 15936, 9998, ..., 16898, 18863, 20836]), - values=tensor([0.4356, 0.9410, 0.0325, ..., 0.8568, 0.9195, 0.8628]), - size=(30000, 30000), nnz=90000, layout=torch.sparse_csr) -tensor([0.3669, 0.2405, 0.0914, ..., 0.8449, 0.6451, 0.3598]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([30000, 30000]) -Rows: 30000 -Size: 900000000 -NNZ: 90000 -Density: 0.0001 -Time: 21.79372525215149 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 3, ..., 89997, 89999, 90000]), - col_indices=tensor([15244, 15936, 9998, ..., 16898, 18863, 20836]), - values=tensor([0.4356, 0.9410, 0.0325, ..., 0.8568, 0.9195, 0.8628]), - size=(30000, 30000), nnz=90000, layout=torch.sparse_csr) -tensor([0.3669, 0.2405, 0.0914, ..., 0.8449, 0.6451, 0.3598]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([30000, 30000]) -Rows: 30000 -Size: 900000000 -NNZ: 90000 -Density: 0.0001 -Time: 21.79372525215149 seconds - -[20.48, 20.6, 20.6, 20.52, 20.52, 20.76, 20.88, 21.04, 21.0, 21.0] -[20.96, 20.88, 23.92, 25.0, 27.36, 27.36, 28.16, 28.88, 25.92, 25.64, 24.4, 24.24, 24.2, 24.16, 24.24, 24.84, 24.92, 24.92, 24.6, 24.64, 24.64, 24.68, 24.56, 24.8, 24.96] -25.32107639312744 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 9519, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [30000, 30000], 'MATRIX_ROWS': 30000, 'MATRIX_SIZE': 900000000, 'MATRIX_NNZ': 90000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 21.79372525215149, 'TIME_S_1KI': 2.2894973476364626, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 607.9340667724609, 'W': 24.009013571692563} -[20.48, 20.6, 20.6, 20.52, 20.52, 20.76, 20.88, 21.04, 21.0, 21.0, 20.24, 20.28, 20.44, 20.36, 20.2, 20.24, 20.24, 20.44, 20.52, 20.48] -369.74 -18.487000000000002 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 9519, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [30000, 30000], 'MATRIX_ROWS': 30000, 'MATRIX_SIZE': 900000000, 'MATRIX_NNZ': 90000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 21.79372525215149, 'TIME_S_1KI': 2.2894973476364626, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 607.9340667724609, 'W': 24.009013571692563, 'J_1KI': 63.86532900225454, 'W_1KI': 2.5222201462015508, 'W_D': 5.522013571692561, 'J_D': 139.82332749271384, 'W_D_1KI': 0.5801043777384768, 'J_D_1KI': 0.06094173523883567} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_20_10_10_synthetic_30000_0.001.json b/pytorch/output_synthetic_16core/altra_16_csr_20_10_10_synthetic_30000_0.001.json deleted file mode 100644 index d309fe1..0000000 --- a/pytorch/output_synthetic_16core/altra_16_csr_20_10_10_synthetic_30000_0.001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [30000, 30000], "MATRIX_ROWS": 30000, "MATRIX_SIZE": 900000000, "MATRIX_NNZ": 900000, "MATRIX_DENSITY": 0.001, "TIME_S": 20.699798345565796, "TIME_S_1KI": 20.699798345565796, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 537.3931253051758, "W": 23.043866328673488, "J_1KI": 537.3931253051758, "W_1KI": 23.043866328673488, "W_D": 4.700866328673488, "J_D": 109.62627590250972, "W_D_1KI": 4.700866328673488, "J_D_1KI": 4.700866328673488} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_20_10_10_synthetic_30000_0.001.output b/pytorch/output_synthetic_16core/altra_16_csr_20_10_10_synthetic_30000_0.001.output deleted file mode 100644 index 67d8d59..0000000 --- a/pytorch/output_synthetic_16core/altra_16_csr_20_10_10_synthetic_30000_0.001.output +++ /dev/null @@ -1,45 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 30000 -sd 0.001 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [30000, 30000], "MATRIX_ROWS": 30000, "MATRIX_SIZE": 900000000, "MATRIX_NNZ": 900000, "MATRIX_DENSITY": 0.001, "TIME_S": 20.699798345565796} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 33, 63, ..., 899945, 899974, - 900000]), - col_indices=tensor([ 547, 1664, 1767, ..., 28485, 29124, 29514]), - values=tensor([0.5453, 0.3696, 0.4974, ..., 0.8638, 0.8625, 0.2546]), - size=(30000, 30000), nnz=900000, layout=torch.sparse_csr) -tensor([0.4458, 0.9665, 0.0852, ..., 0.0100, 0.1262, 0.9671]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([30000, 30000]) -Rows: 30000 -Size: 900000000 -NNZ: 900000 -Density: 0.001 -Time: 20.699798345565796 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 33, 63, ..., 899945, 899974, - 900000]), - col_indices=tensor([ 547, 1664, 1767, ..., 28485, 29124, 29514]), - values=tensor([0.5453, 0.3696, 0.4974, ..., 0.8638, 0.8625, 0.2546]), - size=(30000, 30000), nnz=900000, layout=torch.sparse_csr) -tensor([0.4458, 0.9665, 0.0852, ..., 0.0100, 0.1262, 0.9671]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([30000, 30000]) -Rows: 30000 -Size: 900000000 -NNZ: 900000 -Density: 0.001 -Time: 20.699798345565796 seconds - -[20.6, 20.52, 20.4, 20.52, 20.64, 20.44, 20.4, 20.36, 20.16, 20.16] -[20.16, 20.08, 20.2, 21.68, 22.88, 25.96, 26.92, 27.04, 26.52, 24.64, 24.28, 23.92, 24.12, 24.12, 24.48, 24.6, 24.4, 24.32, 24.28, 24.36, 24.16, 24.4, 24.32] -23.320441007614136 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [30000, 30000], 'MATRIX_ROWS': 30000, 'MATRIX_SIZE': 900000000, 'MATRIX_NNZ': 900000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 20.699798345565796, 'TIME_S_1KI': 20.699798345565796, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 537.3931253051758, 'W': 23.043866328673488} -[20.6, 20.52, 20.4, 20.52, 20.64, 20.44, 20.4, 20.36, 20.16, 20.16, 20.44, 20.2, 20.16, 20.16, 20.4, 20.44, 20.56, 20.52, 20.28, 20.2] -366.86 -18.343 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [30000, 30000], 'MATRIX_ROWS': 30000, 'MATRIX_SIZE': 900000000, 'MATRIX_NNZ': 900000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 20.699798345565796, 'TIME_S_1KI': 20.699798345565796, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 537.3931253051758, 'W': 23.043866328673488, 'J_1KI': 537.3931253051758, 'W_1KI': 23.043866328673488, 'W_D': 4.700866328673488, 'J_D': 109.62627590250972, 'W_D_1KI': 4.700866328673488, 'J_D_1KI': 4.700866328673488} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_20_10_10_synthetic_30000_0.01.json b/pytorch/output_synthetic_16core/altra_16_csr_20_10_10_synthetic_30000_0.01.json deleted file mode 100644 index e69de29..0000000 diff --git a/pytorch/output_synthetic_16core/altra_16_csr_20_10_10_synthetic_30000_0.01.output b/pytorch/output_synthetic_16core/altra_16_csr_20_10_10_synthetic_30000_0.01.output deleted file mode 100644 index 054bc13..0000000 --- a/pytorch/output_synthetic_16core/altra_16_csr_20_10_10_synthetic_30000_0.01.output +++ /dev/null @@ -1 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 30000 -sd 0.01 -c 16'] diff --git a/pytorch/output_synthetic_16core/altra_16_csr_20_10_10_synthetic_30000_1e-05.json b/pytorch/output_synthetic_16core/altra_16_csr_20_10_10_synthetic_30000_1e-05.json deleted file mode 100644 index 00f9500..0000000 --- a/pytorch/output_synthetic_16core/altra_16_csr_20_10_10_synthetic_30000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 52473, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [30000, 30000], "MATRIX_ROWS": 30000, "MATRIX_SIZE": 900000000, "MATRIX_NNZ": 9000, "MATRIX_DENSITY": 1e-05, "TIME_S": 21.229777336120605, "TIME_S_1KI": 0.40458478333849035, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 561.8060250091554, "W": 23.114903822014995, "J_1KI": 10.706573380770212, "W_1KI": 0.4405104305455185, "W_D": 4.660903822014994, "J_D": 113.28292210769662, "W_D_1KI": 0.08882480174594543, "J_D_1KI": 0.0016927715538647577} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_20_10_10_synthetic_30000_1e-05.output b/pytorch/output_synthetic_16core/altra_16_csr_20_10_10_synthetic_30000_1e-05.output deleted file mode 100644 index a0c66f4..0000000 --- a/pytorch/output_synthetic_16core/altra_16_csr_20_10_10_synthetic_30000_1e-05.output +++ /dev/null @@ -1,62 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 30000 -sd 1e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [30000, 30000], "MATRIX_ROWS": 30000, "MATRIX_SIZE": 900000000, "MATRIX_NNZ": 9000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.4002048969268799} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 8999, 9000, 9000]), - col_indices=tensor([17165, 27151, 23572, ..., 25119, 9148, 7528]), - values=tensor([0.4884, 0.2785, 0.9649, ..., 0.5831, 0.3229, 0.8447]), - size=(30000, 30000), nnz=9000, layout=torch.sparse_csr) -tensor([0.9734, 0.5614, 0.1566, ..., 0.4974, 0.8204, 0.0911]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([30000, 30000]) -Rows: 30000 -Size: 900000000 -NNZ: 9000 -Density: 1e-05 -Time: 0.4002048969268799 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 52473 -ss 30000 -sd 1e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [30000, 30000], "MATRIX_ROWS": 30000, "MATRIX_SIZE": 900000000, "MATRIX_NNZ": 9000, "MATRIX_DENSITY": 1e-05, "TIME_S": 21.229777336120605} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 8998, 9000, 9000]), - col_indices=tensor([25247, 22356, 16191, ..., 29211, 15014, 22819]), - values=tensor([0.9864, 0.6356, 0.7247, ..., 0.1858, 0.6120, 0.1833]), - size=(30000, 30000), nnz=9000, layout=torch.sparse_csr) -tensor([0.8210, 0.2318, 0.4195, ..., 0.3881, 0.9911, 0.4380]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([30000, 30000]) -Rows: 30000 -Size: 900000000 -NNZ: 9000 -Density: 1e-05 -Time: 21.229777336120605 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 8998, 9000, 9000]), - col_indices=tensor([25247, 22356, 16191, ..., 29211, 15014, 22819]), - values=tensor([0.9864, 0.6356, 0.7247, ..., 0.1858, 0.6120, 0.1833]), - size=(30000, 30000), nnz=9000, layout=torch.sparse_csr) -tensor([0.8210, 0.2318, 0.4195, ..., 0.3881, 0.9911, 0.4380]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([30000, 30000]) -Rows: 30000 -Size: 900000000 -NNZ: 9000 -Density: 1e-05 -Time: 21.229777336120605 seconds - -[20.6, 20.72, 20.72, 20.72, 20.72, 20.6, 20.28, 20.32, 20.44, 20.48] -[20.48, 20.52, 20.6, 21.72, 23.52, 25.24, 26.32, 26.68, 25.64, 24.84, 24.92, 24.56, 24.64, 24.64, 24.44, 24.12, 24.32, 24.4, 24.32, 24.44, 24.28, 24.08, 24.04, 23.96] -24.3049259185791 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 52473, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [30000, 30000], 'MATRIX_ROWS': 30000, 'MATRIX_SIZE': 900000000, 'MATRIX_NNZ': 9000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 21.229777336120605, 'TIME_S_1KI': 0.40458478333849035, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 561.8060250091554, 'W': 23.114903822014995} -[20.6, 20.72, 20.72, 20.72, 20.72, 20.6, 20.28, 20.32, 20.44, 20.48, 20.76, 20.68, 20.68, 20.44, 20.48, 20.24, 20.24, 20.32, 20.36, 20.4] -369.08000000000004 -18.454 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 52473, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [30000, 30000], 'MATRIX_ROWS': 30000, 'MATRIX_SIZE': 900000000, 'MATRIX_NNZ': 9000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 21.229777336120605, 'TIME_S_1KI': 0.40458478333849035, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 561.8060250091554, 'W': 23.114903822014995, 'J_1KI': 10.706573380770212, 'W_1KI': 0.4405104305455185, 'W_D': 4.660903822014994, 'J_D': 113.28292210769662, 'W_D_1KI': 0.08882480174594543, 'J_D_1KI': 0.0016927715538647577} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.0001.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.0001.json index 9b3aaee..f73c7ea 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.0001.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.0001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 66220, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.006447315216064, "TIME_S_1KI": 0.15110914097275843, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1856.6265121269225, "W": 145.81, "J_1KI": 28.037247238401125, "W_1KI": 2.2019027484143763, "W_D": 109.5725, "J_D": 1395.2075200605393, "W_D_1KI": 1.6546738145575355, "J_D_1KI": 0.024987523626661668} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 63031, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.264819622039795, "TIME_S_1KI": 0.16285351052719765, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1907.2002365589144, "W": 143.8, "J_1KI": 30.25813070646054, "W_1KI": 2.2814170804842067, "W_D": 106.65425000000002, "J_D": 1414.5411045202616, "W_D_1KI": 1.6920919864828419, "J_D_1KI": 0.026845393322061237} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.0001.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.0001.output index 9d496c4..b2fc454 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.0001.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.0001.output @@ -1,14 +1,54 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '100000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.20569086074829102} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.20868682861328125} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 18, ..., 999978, +tensor(crow_indices=tensor([ 0, 11, 23, ..., 999975, + 999990, 1000000]), + col_indices=tensor([ 1102, 1885, 5689, ..., 70464, 82505, 82637]), + values=tensor([0.9145, 0.6563, 0.0210, ..., 0.3467, 0.9517, 0.4307]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.3954, 0.8531, 0.4592, ..., 0.1653, 0.9288, 0.8508]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 0.20868682861328125 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '50314', '-ss', '100000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 8.38151502609253} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 9, 15, ..., 999976, + 999987, 1000000]), + col_indices=tensor([ 9326, 16949, 19479, ..., 70135, 76689, 93251]), + values=tensor([0.2491, 0.4486, 0.5526, ..., 0.3620, 0.8491, 0.1510]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.1294, 0.2549, 0.0676, ..., 0.6377, 0.6452, 0.0657]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 8.38151502609253 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '63031', '-ss', '100000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.264819622039795} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 8, 17, ..., 999978, 999986, 1000000]), - col_indices=tensor([ 4321, 11912, 13631, ..., 82074, 92560, 99324]), - values=tensor([0.9071, 0.2919, 0.8193, ..., 0.7739, 0.0445, 0.1624]), + col_indices=tensor([ 1906, 11602, 20474, ..., 94634, 95193, 99629]), + values=tensor([0.7595, 0.5479, 0.3671, ..., 0.3196, 0.4186, 0.5082]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.6567, 0.9688, 0.9697, ..., 0.6873, 0.4864, 0.9023]) +tensor([0.5397, 0.2720, 0.7091, ..., 0.7919, 0.2241, 0.5973]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +56,16 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 0.20569086074829102 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '51047', '-ss', '100000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 8.09404468536377} +Time: 10.264819622039795 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 17, ..., 999979, - 999988, 1000000]), - col_indices=tensor([15686, 48109, 49313, ..., 51931, 56127, 66767]), - values=tensor([0.4545, 0.6496, 0.9508, ..., 0.7270, 0.9957, 0.0621]), +tensor(crow_indices=tensor([ 0, 8, 17, ..., 999978, + 999986, 1000000]), + col_indices=tensor([ 1906, 11602, 20474, ..., 94634, 95193, 99629]), + values=tensor([0.7595, 0.5479, 0.3671, ..., 0.3196, 0.4186, 0.5082]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.3660, 0.6002, 0.9317, ..., 0.1977, 0.4107, 0.4541]) +tensor([0.5397, 0.2720, 0.7091, ..., 0.7919, 0.2241, 0.5973]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,50 +73,13 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 8.09404468536377 seconds +Time: 10.264819622039795 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '66220', '-ss', '100000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.006447315216064} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 10, 18, ..., 999980, - 999990, 1000000]), - col_indices=tensor([ 4776, 21129, 24622, ..., 75160, 86654, 97411]), - values=tensor([0.8410, 0.1609, 0.8553, ..., 0.3742, 0.0938, 0.8797]), - size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.6086, 0.7634, 0.6649, ..., 0.3430, 0.9091, 0.5785]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 1000000 -Density: 0.0001 -Time: 10.006447315216064 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 10, 18, ..., 999980, - 999990, 1000000]), - col_indices=tensor([ 4776, 21129, 24622, ..., 75160, 86654, 97411]), - values=tensor([0.8410, 0.1609, 0.8553, ..., 0.3742, 0.0938, 0.8797]), - size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.6086, 0.7634, 0.6649, ..., 0.3430, 0.9091, 0.5785]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 1000000 -Density: 0.0001 -Time: 10.006447315216064 seconds - -[41.04, 40.85, 39.33, 39.23, 39.35, 39.32, 44.72, 39.63, 39.87, 39.35] -[145.81] -12.733190536499023 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 66220, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.006447315216064, 'TIME_S_1KI': 0.15110914097275843, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1856.6265121269225, 'W': 145.81} -[41.04, 40.85, 39.33, 39.23, 39.35, 39.32, 44.72, 39.63, 39.87, 39.35, 40.83, 39.21, 39.3, 44.69, 39.3, 39.36, 39.77, 39.25, 41.13, 39.66] -724.75 -36.2375 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 66220, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.006447315216064, 'TIME_S_1KI': 0.15110914097275843, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1856.6265121269225, 'W': 145.81, 'J_1KI': 28.037247238401125, 'W_1KI': 2.2019027484143763, 'W_D': 109.5725, 'J_D': 1395.2075200605393, 'W_D_1KI': 1.6546738145575355, 'J_D_1KI': 0.024987523626661668} +[40.75, 39.66, 39.67, 39.19, 39.32, 45.15, 39.35, 39.19, 39.95, 39.54] +[143.8] +13.262866735458374 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 63031, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.264819622039795, 'TIME_S_1KI': 0.16285351052719765, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1907.2002365589144, 'W': 143.8} +[40.75, 39.66, 39.67, 39.19, 39.32, 45.15, 39.35, 39.19, 39.95, 39.54, 40.42, 44.44, 57.71, 39.25, 40.02, 40.75, 39.74, 39.58, 39.68, 39.82] +742.915 +37.14575 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 63031, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.264819622039795, 'TIME_S_1KI': 0.16285351052719765, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1907.2002365589144, 'W': 143.8, 'J_1KI': 30.25813070646054, 'W_1KI': 2.2814170804842067, 'W_D': 106.65425000000002, 'J_D': 1414.5411045202616, 'W_D_1KI': 1.6920919864828419, 'J_D_1KI': 0.026845393322061237} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.001.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.001.json new file mode 100644 index 0000000..47e1eaa --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 4290, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.725477457046509, "TIME_S_1KI": 2.500111295348837, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2014.465433692932, "W": 126.69, "J_1KI": 469.57236216618463, "W_1KI": 29.53146853146853, "W_D": 91.17699999999999, "J_D": 1449.7822625923156, "W_D_1KI": 21.25337995337995, "J_D_1KI": 4.954167821300688} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.001.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.001.output new file mode 100644 index 0000000..2b06cc0 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.001.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '100000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.4475483894348145} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 108, 211, ..., 9999795, + 9999912, 10000000]), + col_indices=tensor([ 147, 1138, 2699, ..., 95915, 96101, 99505]), + values=tensor([0.5370, 0.7637, 0.8320, ..., 0.1671, 0.6910, 0.1145]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.0022, 0.6683, 0.3307, ..., 0.4747, 0.3475, 0.4636]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 2.4475483894348145 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '4290', '-ss', '100000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.725477457046509} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 113, 209, ..., 9999816, + 9999914, 10000000]), + col_indices=tensor([ 524, 3053, 3097, ..., 98248, 99944, 99996]), + values=tensor([0.2951, 0.6504, 0.4617, ..., 0.6241, 0.9747, 0.0943]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.1529, 0.0141, 0.4287, ..., 0.1937, 0.2308, 0.9820]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 10.725477457046509 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 113, 209, ..., 9999816, + 9999914, 10000000]), + col_indices=tensor([ 524, 3053, 3097, ..., 98248, 99944, 99996]), + values=tensor([0.2951, 0.6504, 0.4617, ..., 0.6241, 0.9747, 0.0943]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.1529, 0.0141, 0.4287, ..., 0.1937, 0.2308, 0.9820]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 10.725477457046509 seconds + +[39.92, 39.19, 39.59, 39.28, 39.81, 39.63, 39.63, 39.45, 39.41, 39.18] +[126.69] +15.900745391845703 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 4290, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.725477457046509, 'TIME_S_1KI': 2.500111295348837, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2014.465433692932, 'W': 126.69} +[39.92, 39.19, 39.59, 39.28, 39.81, 39.63, 39.63, 39.45, 39.41, 39.18, 40.85, 39.15, 39.3, 39.24, 39.74, 39.48, 39.55, 39.09, 39.1, 39.29] +710.26 +35.513 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 4290, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.725477457046509, 'TIME_S_1KI': 2.500111295348837, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2014.465433692932, 'W': 126.69, 'J_1KI': 469.57236216618463, 'W_1KI': 29.53146853146853, 'W_D': 91.17699999999999, 'J_D': 1449.7822625923156, 'W_D_1KI': 21.25337995337995, 'J_D_1KI': 4.954167821300688} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_1e-05.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_1e-05.json index b72a71c..7f413fc 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_1e-05.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_1e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 101854, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 13.331042528152466, "TIME_S_1KI": 0.13088383890816724, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1504.6270782256126, "W": 115.31, "J_1KI": 14.77239065943029, "W_1KI": 1.1321106682113615, "W_D": 79.84075000000001, "J_D": 1041.8051721085908, "W_D_1KI": 0.7838744673748701, "J_D_1KI": 0.007696059726420858} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 102924, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.60866904258728, "TIME_S_1KI": 0.103072840567674, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1542.0244372987747, "W": 115.47, "J_1KI": 14.982165843717448, "W_1KI": 1.121895767750962, "W_D": 79.97325000000001, "J_D": 1067.989138565898, "W_D_1KI": 0.7770126501107615, "J_D_1KI": 0.00754938255519375} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_1e-05.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_1e-05.output index 3cc7b50..fb29049 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_1e-05.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_1e-05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '100000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.1346125602722168} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.12978029251098633} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 3, ..., 99997, 99999, +tensor(crow_indices=tensor([ 0, 3, 4, ..., 99999, 100000, 100000]), - col_indices=tensor([50727, 53996, 86356, ..., 6143, 63321, 22305]), - values=tensor([0.4164, 0.0014, 0.4337, ..., 0.6487, 0.2549, 0.7487]), + col_indices=tensor([21616, 77637, 85619, ..., 53732, 81470, 6094]), + values=tensor([0.4857, 0.1991, 0.9153, ..., 0.9203, 0.8308, 0.8562]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.9720, 0.1729, 0.4503, ..., 0.2850, 0.8795, 0.9664]) +tensor([0.0197, 0.8164, 0.2872, ..., 0.9903, 0.3891, 0.9778]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 0.1346125602722168 seconds +Time: 0.12978029251098633 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '78001', '-ss', '100000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 8.040945768356323} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '80905', '-ss', '100000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 8.253613233566284} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 2, ..., 100000, 100000, +tensor(crow_indices=tensor([ 0, 2, 3, ..., 99999, 99999, 100000]), - col_indices=tensor([16049, 52557, 57673, ..., 90883, 73385, 65676]), - values=tensor([0.2845, 0.3961, 0.0285, ..., 0.0101, 0.6896, 0.8511]), + col_indices=tensor([18950, 61338, 17160, ..., 57514, 79997, 96494]), + values=tensor([0.7220, 0.1840, 0.6067, ..., 0.9597, 0.4652, 0.5228]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.5851, 0.1832, 0.4128, ..., 0.6645, 0.1519, 0.8981]) +tensor([0.0221, 0.6414, 0.1516, ..., 0.3018, 0.8902, 0.3461]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,19 +36,19 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 8.040945768356323 seconds +Time: 8.253613233566284 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '101854', '-ss', '100000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 13.331042528152466} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '102924', '-ss', '100000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.60866904258728} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 100000, 100000, +tensor(crow_indices=tensor([ 0, 0, 0, ..., 99999, 99999, 100000]), - col_indices=tensor([ 641, 46150, 85524, ..., 87101, 55219, 61785]), - values=tensor([0.2560, 0.7953, 0.3517, ..., 0.8505, 0.5170, 0.2719]), + col_indices=tensor([ 4611, 80501, 8771, ..., 95435, 27789, 45343]), + values=tensor([0.8274, 0.0201, 0.6109, ..., 0.4116, 0.6491, 0.0785]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.1951, 0.6662, 0.1969, ..., 0.4780, 0.9904, 0.5617]) +tensor([0.0461, 0.3256, 0.3375, ..., 0.6234, 0.9526, 0.7301]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -56,16 +56,16 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 13.331042528152466 seconds +Time: 10.60866904258728 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 100000, 100000, +tensor(crow_indices=tensor([ 0, 0, 0, ..., 99999, 99999, 100000]), - col_indices=tensor([ 641, 46150, 85524, ..., 87101, 55219, 61785]), - values=tensor([0.2560, 0.7953, 0.3517, ..., 0.8505, 0.5170, 0.2719]), + col_indices=tensor([ 4611, 80501, 8771, ..., 95435, 27789, 45343]), + values=tensor([0.8274, 0.0201, 0.6109, ..., 0.4116, 0.6491, 0.0785]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.1951, 0.6662, 0.1969, ..., 0.4780, 0.9904, 0.5617]) +tensor([0.0461, 0.3256, 0.3375, ..., 0.6234, 0.9526, 0.7301]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -73,13 +73,13 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 13.331042528152466 seconds +Time: 10.60866904258728 seconds -[42.16, 39.6, 39.44, 39.17, 39.28, 39.19, 39.17, 39.11, 40.48, 39.44] -[115.31] -13.048539400100708 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 101854, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 13.331042528152466, 'TIME_S_1KI': 0.13088383890816724, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1504.6270782256126, 'W': 115.31} -[42.16, 39.6, 39.44, 39.17, 39.28, 39.19, 39.17, 39.11, 40.48, 39.44, 39.93, 39.1, 39.15, 39.29, 39.15, 39.23, 39.19, 39.11, 39.43, 39.06] -709.3849999999999 -35.469249999999995 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 101854, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 13.331042528152466, 'TIME_S_1KI': 0.13088383890816724, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1504.6270782256126, 'W': 115.31, 'J_1KI': 14.77239065943029, 'W_1KI': 1.1321106682113615, 'W_D': 79.84075000000001, 'J_D': 1041.8051721085908, 'W_D_1KI': 0.7838744673748701, 'J_D_1KI': 0.007696059726420858} +[39.93, 39.22, 39.43, 40.14, 39.51, 39.36, 39.3, 39.3, 39.26, 39.1] +[115.47] +13.354329586029053 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 102924, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.60866904258728, 'TIME_S_1KI': 0.103072840567674, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1542.0244372987747, 'W': 115.47} +[39.93, 39.22, 39.43, 40.14, 39.51, 39.36, 39.3, 39.3, 39.26, 39.1, 41.76, 39.08, 39.78, 39.45, 39.66, 39.16, 39.27, 39.06, 39.08, 38.96] +709.935 +35.49675 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 102924, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.60866904258728, 'TIME_S_1KI': 0.103072840567674, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1542.0244372987747, 'W': 115.47, 'J_1KI': 14.982165843717448, 'W_1KI': 1.121895767750962, 'W_D': 79.97325000000001, 'J_D': 1067.989138565898, 'W_D_1KI': 0.7770126501107615, 'J_D_1KI': 0.00754938255519375} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.0001.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.0001.json index 9b9bfd5..cb11125 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.0001.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.0001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 282693, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.381328821182251, "TIME_S_1KI": 0.0367229780050523, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1282.42685277462, "W": 97.9, "J_1KI": 4.5364648320779795, "W_1KI": 0.3463120770588589, "W_D": 62.39075000000001, "J_D": 817.2785818666817, "W_D_1KI": 0.22070143229581213, "J_D_1KI": 0.00078071063767342} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 278690, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.3841392993927, "TIME_S_1KI": 0.0372605378714439, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1292.3170569992064, "W": 98.52, "J_1KI": 4.63711312569237, "W_1KI": 0.3535110696472783, "W_D": 63.16824999999999, "J_D": 828.5973095390796, "W_D_1KI": 0.2266613441458251, "J_D_1KI": 0.0008133099291177477} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.0001.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.0001.output index b8e2459..26aaa7d 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.0001.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.0001.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.05349230766296387} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.05305743217468262} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 9999, 10000, 10000]), - col_indices=tensor([3626, 2250, 5764, ..., 7539, 8316, 7972]), - values=tensor([0.1411, 0.7419, 0.4018, ..., 0.4202, 0.3955, 0.4235]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 9999, 9999, 10000]), + col_indices=tensor([2207, 830, 7633, ..., 2513, 8541, 2972]), + values=tensor([0.9417, 0.1071, 0.2127, ..., 0.2034, 0.4535, 0.3737]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.9736, 0.6802, 0.3390, ..., 0.1575, 0.6861, 0.0446]) +tensor([0.2095, 0.5712, 0.5435, ..., 0.2564, 0.5818, 0.1577]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -15,18 +15,18 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 0.05349230766296387 seconds +Time: 0.05305743217468262 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '196289', '-ss', '10000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 7.290691137313843} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '197898', '-ss', '10000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 7.456049680709839} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 6, ..., 9998, 10000, 10000]), - col_indices=tensor([ 763, 7857, 9582, ..., 1442, 6306, 9133]), - values=tensor([0.7701, 0.8887, 0.1796, ..., 0.1701, 0.0666, 0.3737]), +tensor(crow_indices=tensor([ 0, 0, 2, ..., 10000, 10000, 10000]), + col_indices=tensor([7930, 9951, 4041, ..., 9045, 6420, 8503]), + values=tensor([0.2418, 0.2435, 0.4116, ..., 0.5201, 0.9725, 0.0713]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.4503, 0.2095, 0.3791, ..., 0.5528, 0.9269, 0.0093]) +tensor([0.5895, 0.0291, 0.5304, ..., 0.4324, 0.9976, 0.6205]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -34,18 +34,18 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 7.290691137313843 seconds +Time: 7.456049680709839 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '282693', '-ss', '10000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.381328821182251} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '278690', '-ss', '10000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.3841392993927} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 4, ..., 9997, 9998, 10000]), - col_indices=tensor([4956, 145, 658, ..., 4096, 6098, 6574]), - values=tensor([0.3279, 0.7076, 0.5307, ..., 0.3493, 0.0702, 0.3289]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 9996, 9998, 10000]), + col_indices=tensor([9574, 4944, 2003, ..., 2641, 7523, 8416]), + values=tensor([0.4157, 0.2537, 0.8916, ..., 0.3966, 0.1591, 0.0732]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.0837, 0.8046, 0.5398, ..., 0.6704, 0.0489, 0.7610]) +tensor([0.4368, 0.0363, 0.2687, ..., 0.3029, 0.2331, 0.6830]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,15 +53,15 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 10.381328821182251 seconds +Time: 10.3841392993927 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 4, ..., 9997, 9998, 10000]), - col_indices=tensor([4956, 145, 658, ..., 4096, 6098, 6574]), - values=tensor([0.3279, 0.7076, 0.5307, ..., 0.3493, 0.0702, 0.3289]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 9996, 9998, 10000]), + col_indices=tensor([9574, 4944, 2003, ..., 2641, 7523, 8416]), + values=tensor([0.4157, 0.2537, 0.8916, ..., 0.3966, 0.1591, 0.0732]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.0837, 0.8046, 0.5398, ..., 0.6704, 0.0489, 0.7610]) +tensor([0.4368, 0.0363, 0.2687, ..., 0.3029, 0.2331, 0.6830]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -69,13 +69,13 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 10.381328821182251 seconds +Time: 10.3841392993927 seconds -[39.48, 38.98, 44.27, 38.82, 39.35, 38.99, 39.28, 39.0, 38.75, 39.31] -[97.9] -13.099354982376099 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 282693, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.381328821182251, 'TIME_S_1KI': 0.0367229780050523, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1282.42685277462, 'W': 97.9} -[39.48, 38.98, 44.27, 38.82, 39.35, 38.99, 39.28, 39.0, 38.75, 39.31, 39.89, 39.14, 38.98, 38.75, 41.57, 38.58, 39.15, 38.62, 39.12, 38.99] -710.185 -35.509249999999994 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 282693, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.381328821182251, 'TIME_S_1KI': 0.0367229780050523, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1282.42685277462, 'W': 97.9, 'J_1KI': 4.5364648320779795, 'W_1KI': 0.3463120770588589, 'W_D': 62.39075000000001, 'J_D': 817.2785818666817, 'W_D_1KI': 0.22070143229581213, 'J_D_1KI': 0.00078071063767342} +[40.74, 38.88, 38.93, 39.04, 38.99, 38.97, 39.42, 39.32, 39.26, 38.71] +[98.52] +13.11730670928955 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 278690, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.3841392993927, 'TIME_S_1KI': 0.0372605378714439, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1292.3170569992064, 'W': 98.52} +[40.74, 38.88, 38.93, 39.04, 38.99, 38.97, 39.42, 39.32, 39.26, 38.71, 39.37, 39.1, 39.73, 39.04, 39.15, 38.73, 39.2, 38.61, 38.78, 44.95] +707.0350000000001 +35.35175 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 278690, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.3841392993927, 'TIME_S_1KI': 0.0372605378714439, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1292.3170569992064, 'W': 98.52, 'J_1KI': 4.63711312569237, 'W_1KI': 0.3535110696472783, 'W_D': 63.16824999999999, 'J_D': 828.5973095390796, 'W_D_1KI': 0.2266613441458251, 'J_D_1KI': 0.0008133099291177477} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.001.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.001.json index d2eaa5f..9de9eeb 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.001.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 189141, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.465899229049683, "TIME_S_1KI": 0.05533384738924761, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1388.9149661660194, "W": 107.77, "J_1KI": 7.343278116146259, "W_1KI": 0.5697865613484121, "W_D": 72.38875, "J_D": 932.9295560643077, "W_D_1KI": 0.3827237352028381, "J_D_1KI": 0.002023483724855204} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 181643, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.058825254440308, "TIME_S_1KI": 0.05537689453730839, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1334.3994569778442, "W": 108.0, "J_1KI": 7.346275149484671, "W_1KI": 0.5945728709611711, "W_D": 72.86524999999999, "J_D": 900.2902780792116, "W_D_1KI": 0.4011453785722543, "J_D_1KI": 0.002208427401949177} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.001.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.001.output index 117af51..62ff1c2 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.001.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.06988883018493652} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.07387351989746094} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 21, ..., 99977, 99988, +tensor(crow_indices=tensor([ 0, 11, 20, ..., 99983, 99988, 100000]), - col_indices=tensor([ 768, 2423, 2910, ..., 9615, 9787, 9788]), - values=tensor([0.1330, 0.2030, 0.8709, ..., 0.6786, 0.0798, 0.8357]), + col_indices=tensor([2080, 2520, 2867, ..., 8307, 8901, 9286]), + values=tensor([0.8261, 0.1055, 0.9939, ..., 0.1447, 0.1951, 0.2617]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.0016, 0.6011, 0.7478, ..., 0.9565, 0.9755, 0.4110]) +tensor([0.7373, 0.8108, 0.8070, ..., 0.3032, 0.8916, 0.0356]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,20 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 0.06988883018493652 seconds +Time: 0.07387351989746094 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '150238', '-ss', '10000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 8.34029221534729} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '142134', '-ss', '10000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 8.216149806976318} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 21, ..., 99979, 99987, +tensor(crow_indices=tensor([ 0, 8, 16, ..., 99977, 99988, 100000]), - col_indices=tensor([ 978, 1327, 2112, ..., 8470, 8534, 8708]), - values=tensor([0.4296, 0.3021, 0.5865, ..., 0.4657, 0.4173, 0.7957]), + col_indices=tensor([ 929, 1145, 1167, ..., 7253, 9439, 9881]), + values=tensor([3.5267e-01, 8.9746e-01, 4.0379e-01, ..., + 8.5718e-04, 5.6681e-01, 4.6851e-01]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.7639, 0.4914, 0.7736, ..., 0.7926, 0.8542, 0.7117]) +tensor([0.4055, 0.0658, 0.7904, ..., 0.2959, 0.0826, 0.7426]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,19 +37,19 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 8.34029221534729 seconds +Time: 8.216149806976318 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '189141', '-ss', '10000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.465899229049683} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '181643', '-ss', '10000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.058825254440308} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 8, 15, ..., 99985, 99995, +tensor(crow_indices=tensor([ 0, 10, 23, ..., 99984, 99996, 100000]), - col_indices=tensor([ 277, 3135, 4455, ..., 4161, 8684, 9934]), - values=tensor([0.4295, 0.8999, 0.7885, ..., 0.8935, 0.6648, 0.4808]), + col_indices=tensor([2026, 2065, 2399, ..., 4623, 7297, 9355]), + values=tensor([0.4157, 0.6883, 0.2119, ..., 0.3441, 0.2622, 0.5721]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.1477, 0.6711, 0.3568, ..., 0.3604, 0.6617, 0.9866]) +tensor([0.4888, 0.3451, 0.6891, ..., 0.9797, 0.8702, 0.1612]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -56,16 +57,16 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 10.465899229049683 seconds +Time: 10.058825254440308 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 8, 15, ..., 99985, 99995, +tensor(crow_indices=tensor([ 0, 10, 23, ..., 99984, 99996, 100000]), - col_indices=tensor([ 277, 3135, 4455, ..., 4161, 8684, 9934]), - values=tensor([0.4295, 0.8999, 0.7885, ..., 0.8935, 0.6648, 0.4808]), + col_indices=tensor([2026, 2065, 2399, ..., 4623, 7297, 9355]), + values=tensor([0.4157, 0.6883, 0.2119, ..., 0.3441, 0.2622, 0.5721]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.1477, 0.6711, 0.3568, ..., 0.3604, 0.6617, 0.9866]) +tensor([0.4888, 0.3451, 0.6891, ..., 0.9797, 0.8702, 0.1612]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -73,13 +74,13 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 10.465899229049683 seconds +Time: 10.058825254440308 seconds -[39.55, 39.91, 39.11, 39.43, 38.85, 39.56, 39.33, 39.4, 38.87, 38.91] -[107.77] -12.887769937515259 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 189141, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.465899229049683, 'TIME_S_1KI': 0.05533384738924761, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1388.9149661660194, 'W': 107.77} -[39.55, 39.91, 39.11, 39.43, 38.85, 39.56, 39.33, 39.4, 38.87, 38.91, 39.56, 38.88, 39.03, 38.89, 39.75, 39.07, 39.18, 39.11, 38.8, 42.89] -707.6249999999999 -35.381249999999994 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 189141, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.465899229049683, 'TIME_S_1KI': 0.05533384738924761, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1388.9149661660194, 'W': 107.77, 'J_1KI': 7.343278116146259, 'W_1KI': 0.5697865613484121, 'W_D': 72.38875, 'J_D': 932.9295560643077, 'W_D_1KI': 0.3827237352028381, 'J_D_1KI': 0.002023483724855204} +[40.01, 39.91, 39.26, 39.32, 39.03, 38.69, 39.03, 38.68, 38.9, 38.67] +[108.0] +12.355550527572632 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 181643, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.058825254440308, 'TIME_S_1KI': 0.05537689453730839, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1334.3994569778442, 'W': 108.0} +[40.01, 39.91, 39.26, 39.32, 39.03, 38.69, 39.03, 38.68, 38.9, 38.67, 40.03, 39.23, 39.15, 39.23, 38.85, 38.69, 38.72, 38.72, 38.62, 38.62] +702.6950000000002 +35.13475000000001 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 181643, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.058825254440308, 'TIME_S_1KI': 0.05537689453730839, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1334.3994569778442, 'W': 108.0, 'J_1KI': 7.346275149484671, 'W_1KI': 0.5945728709611711, 'W_D': 72.86524999999999, 'J_D': 900.2902780792116, 'W_D_1KI': 0.4011453785722543, 'J_D_1KI': 0.002208427401949177} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.01.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.01.json index a1716dd..37875a8 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.01.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.01.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 105256, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.995163202285767, "TIME_S_1KI": 0.10446115378017184, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1749.1652185320856, "W": 133.05, "J_1KI": 16.61819961362854, "W_1KI": 1.264060956145018, "W_D": 97.78125000000001, "J_D": 1285.4983955249193, "W_D_1KI": 0.928985045983127, "J_D_1KI": 0.008825958101990642} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 104114, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.469164609909058, "TIME_S_1KI": 0.10055482077250953, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1774.261237783432, "W": 135.86, "J_1KI": 17.041524077294426, "W_1KI": 1.304915765410992, "W_D": 100.35275000000001, "J_D": 1310.5549420725108, "W_D_1KI": 0.9638737345601938, "J_D_1KI": 0.009257868630157269} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.01.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.01.output index a290fe5..0c99b95 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.01.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.01.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '0.01', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.13490986824035645} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.14159297943115234} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 107, 208, ..., 999789, - 999899, 1000000]), - col_indices=tensor([ 114, 296, 309, ..., 9749, 9750, 9977]), - values=tensor([0.3507, 0.7412, 0.8612, ..., 0.2456, 0.4049, 0.8296]), +tensor(crow_indices=tensor([ 0, 124, 236, ..., 999773, + 999882, 1000000]), + col_indices=tensor([ 35, 69, 144, ..., 9773, 9862, 9873]), + values=tensor([0.1838, 0.7773, 0.5109, ..., 0.8192, 0.8376, 0.6812]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.8457, 0.6850, 0.0016, ..., 0.7234, 0.0569, 0.9899]) +tensor([0.0358, 0.2032, 0.7087, ..., 0.4931, 0.1706, 0.1726]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 0.13490986824035645 seconds +Time: 0.14159297943115234 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '77829', '-ss', '10000', '-sd', '0.01', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 7.763918876647949} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '74156', '-ss', '10000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 7.9134438037872314} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 109, 200, ..., 999783, - 999885, 1000000]), - col_indices=tensor([ 5, 70, 184, ..., 9826, 9903, 9930]), - values=tensor([0.4822, 0.0560, 0.4645, ..., 0.7540, 0.5324, 0.2081]), +tensor(crow_indices=tensor([ 0, 93, 199, ..., 999798, + 999892, 1000000]), + col_indices=tensor([ 57, 323, 325, ..., 9719, 9779, 9889]), + values=tensor([0.3339, 0.1610, 0.8675, ..., 0.7107, 0.3615, 0.1870]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.2499, 0.5119, 0.0857, ..., 0.6236, 0.3822, 0.7230]) +tensor([0.9536, 0.3002, 0.1616, ..., 0.3121, 0.8413, 0.9505]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,19 +36,19 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 7.763918876647949 seconds +Time: 7.9134438037872314 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '105256', '-ss', '10000', '-sd', '0.01', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.995163202285767} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '98394', '-ss', '10000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 9.923112392425537} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 112, 220, ..., 999823, - 999909, 1000000]), - col_indices=tensor([ 16, 85, 154, ..., 9645, 9832, 9858]), - values=tensor([0.5111, 0.0405, 0.8270, ..., 0.3072, 0.2885, 0.2472]), +tensor(crow_indices=tensor([ 0, 97, 191, ..., 999779, + 999891, 1000000]), + col_indices=tensor([ 18, 52, 269, ..., 9883, 9995, 9999]), + values=tensor([0.5511, 0.2767, 0.8168, ..., 0.6887, 0.5827, 0.0686]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.6255, 0.9183, 0.8326, ..., 0.9246, 0.2373, 0.5392]) +tensor([0.2767, 0.4380, 0.7945, ..., 0.2102, 0.5547, 0.8740]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -56,16 +56,19 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 10.995163202285767 seconds +Time: 9.923112392425537 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '104114', '-ss', '10000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.469164609909058} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 112, 220, ..., 999823, - 999909, 1000000]), - col_indices=tensor([ 16, 85, 154, ..., 9645, 9832, 9858]), - values=tensor([0.5111, 0.0405, 0.8270, ..., 0.3072, 0.2885, 0.2472]), +tensor(crow_indices=tensor([ 0, 112, 221, ..., 999805, + 999915, 1000000]), + col_indices=tensor([ 402, 501, 665, ..., 9291, 9326, 9607]), + values=tensor([0.0486, 0.5637, 0.4384, ..., 0.7973, 0.3634, 0.8351]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.6255, 0.9183, 0.8326, ..., 0.9246, 0.2373, 0.5392]) +tensor([0.7936, 0.9785, 0.9590, ..., 0.6005, 0.0137, 0.6516]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -73,13 +76,30 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 10.995163202285767 seconds +Time: 10.469164609909058 seconds -[40.1, 38.91, 39.01, 39.37, 38.97, 38.99, 39.2, 38.87, 38.93, 38.8] -[133.05] -13.146675825119019 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 105256, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.995163202285767, 'TIME_S_1KI': 0.10446115378017184, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1749.1652185320856, 'W': 133.05} -[40.1, 38.91, 39.01, 39.37, 38.97, 38.99, 39.2, 38.87, 38.93, 38.8, 40.08, 39.28, 39.91, 38.88, 38.99, 38.83, 39.08, 39.35, 39.95, 38.73] -705.375 -35.26875 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 105256, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.995163202285767, 'TIME_S_1KI': 0.10446115378017184, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1749.1652185320856, 'W': 133.05, 'J_1KI': 16.61819961362854, 'W_1KI': 1.264060956145018, 'W_D': 97.78125000000001, 'J_D': 1285.4983955249193, 'W_D_1KI': 0.928985045983127, 'J_D_1KI': 0.008825958101990642} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 112, 221, ..., 999805, + 999915, 1000000]), + col_indices=tensor([ 402, 501, 665, ..., 9291, 9326, 9607]), + values=tensor([0.0486, 0.5637, 0.4384, ..., 0.7973, 0.3634, 0.8351]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.7936, 0.9785, 0.9590, ..., 0.6005, 0.0137, 0.6516]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.469164609909058 seconds + +[42.04, 39.8, 39.79, 39.52, 39.3, 39.73, 39.12, 39.49, 39.14, 38.98] +[135.86] +13.059482097625732 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 104114, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.469164609909058, 'TIME_S_1KI': 0.10055482077250953, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1774.261237783432, 'W': 135.86} +[42.04, 39.8, 39.79, 39.52, 39.3, 39.73, 39.12, 39.49, 39.14, 38.98, 39.91, 39.55, 39.06, 39.42, 39.43, 39.34, 38.96, 39.36, 38.94, 39.46] +710.145 +35.50725 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 104114, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.469164609909058, 'TIME_S_1KI': 0.10055482077250953, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1774.261237783432, 'W': 135.86, 'J_1KI': 17.041524077294426, 'W_1KI': 1.304915765410992, 'W_D': 100.35275000000001, 'J_D': 1310.5549420725108, 'W_D_1KI': 0.9638737345601938, 'J_D_1KI': 0.009257868630157269} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.05.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.05.json index 0aa6e19..1c24924 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.05.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 27486, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.233055591583252, "TIME_S_1KI": 0.3723006472961963, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2077.8737717533113, "W": 151.69, "J_1KI": 75.59753226199925, "W_1KI": 5.518809575784036, "W_D": 115.92275, "J_D": 1587.9282864692211, "W_D_1KI": 4.217519828276212, "J_D_1KI": 0.1534424735602202} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 27505, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.25606393814087, "TIME_S_1KI": 0.37287998320817556, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2107.3890700149536, "W": 153.89000000000001, "J_1KI": 76.6183992006891, "W_1KI": 5.594982730412653, "W_D": 118.26825000000002, "J_D": 1619.580332573891, "W_D_1KI": 4.299881839665516, "J_D_1KI": 0.156330915821324} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.05.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.05.output index a8a40a5..536ca18 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.05.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '0.05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.45975399017333984} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.4628758430480957} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 512, 1006, ..., 4999034, - 4999489, 5000000]), - col_indices=tensor([ 23, 40, 103, ..., 9927, 9976, 9991]), - values=tensor([0.6183, 0.2980, 0.3566, ..., 0.0352, 0.5258, 0.0852]), +tensor(crow_indices=tensor([ 0, 508, 974, ..., 4999019, + 4999492, 5000000]), + col_indices=tensor([ 9, 34, 50, ..., 9951, 9957, 9978]), + values=tensor([0.7868, 0.5776, 0.2287, ..., 0.8734, 0.0439, 0.0393]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.4623, 0.5953, 0.6862, ..., 0.1082, 0.6720, 0.4260]) +tensor([0.6523, 0.3584, 0.2115, ..., 0.2592, 0.0051, 0.7390]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 0.45975399017333984 seconds +Time: 0.4628758430480957 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '22838', '-ss', '10000', '-sd', '0.05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 8.724292278289795} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '22684', '-ss', '10000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 8.659529685974121} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 513, 1006, ..., 4998953, - 4999498, 5000000]), - col_indices=tensor([ 69, 83, 128, ..., 9917, 9953, 9972]), - values=tensor([0.6637, 0.2623, 0.2360, ..., 0.3507, 0.8119, 0.6229]), +tensor(crow_indices=tensor([ 0, 519, 1023, ..., 4999047, + 4999545, 5000000]), + col_indices=tensor([ 4, 44, 83, ..., 9892, 9941, 9972]), + values=tensor([0.8741, 0.5769, 0.9569, ..., 0.2090, 0.9404, 0.5070]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.8552, 0.8520, 0.0158, ..., 0.2551, 0.9127, 0.4905]) +tensor([0.1912, 0.0895, 0.2612, ..., 0.6252, 0.2980, 0.9838]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,19 +36,19 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 8.724292278289795 seconds +Time: 8.659529685974121 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '27486', '-ss', '10000', '-sd', '0.05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.233055591583252} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '27505', '-ss', '10000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.25606393814087} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 518, 1031, ..., 4999018, - 4999521, 5000000]), - col_indices=tensor([ 2, 29, 76, ..., 9919, 9923, 9942]), - values=tensor([0.8327, 0.8899, 0.2406, ..., 0.0134, 0.9622, 0.2874]), +tensor(crow_indices=tensor([ 0, 494, 944, ..., 4998986, + 4999507, 5000000]), + col_indices=tensor([ 48, 74, 75, ..., 9915, 9966, 9976]), + values=tensor([0.3182, 0.9601, 0.3370, ..., 0.9931, 0.0889, 0.2292]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.5989, 0.5463, 0.4311, ..., 0.3886, 0.2295, 0.1764]) +tensor([0.0431, 0.2285, 0.4438, ..., 0.2766, 0.7465, 0.1407]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -56,16 +56,16 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 10.233055591583252 seconds +Time: 10.25606393814087 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 518, 1031, ..., 4999018, - 4999521, 5000000]), - col_indices=tensor([ 2, 29, 76, ..., 9919, 9923, 9942]), - values=tensor([0.8327, 0.8899, 0.2406, ..., 0.0134, 0.9622, 0.2874]), +tensor(crow_indices=tensor([ 0, 494, 944, ..., 4998986, + 4999507, 5000000]), + col_indices=tensor([ 48, 74, 75, ..., 9915, 9966, 9976]), + values=tensor([0.3182, 0.9601, 0.3370, ..., 0.9931, 0.0889, 0.2292]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.5989, 0.5463, 0.4311, ..., 0.3886, 0.2295, 0.1764]) +tensor([0.0431, 0.2285, 0.4438, ..., 0.2766, 0.7465, 0.1407]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -73,13 +73,13 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 10.233055591583252 seconds +Time: 10.25606393814087 seconds -[45.15, 39.18, 39.48, 39.74, 39.27, 40.48, 39.56, 39.63, 39.62, 39.49] -[151.69] -13.698159217834473 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 27486, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.233055591583252, 'TIME_S_1KI': 0.3723006472961963, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2077.8737717533113, 'W': 151.69} -[45.15, 39.18, 39.48, 39.74, 39.27, 40.48, 39.56, 39.63, 39.62, 39.49, 40.16, 40.86, 39.65, 39.13, 39.26, 39.59, 39.63, 39.11, 39.14, 39.23] -715.345 -35.767250000000004 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 27486, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.233055591583252, 'TIME_S_1KI': 0.3723006472961963, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2077.8737717533113, 'W': 151.69, 'J_1KI': 75.59753226199925, 'W_1KI': 5.518809575784036, 'W_D': 115.92275, 'J_D': 1587.9282864692211, 'W_D_1KI': 4.217519828276212, 'J_D_1KI': 0.1534424735602202} +[40.36, 40.12, 40.4, 39.54, 39.59, 39.1, 39.19, 40.07, 39.54, 39.22] +[153.89] +13.69412612915039 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 27505, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.25606393814087, 'TIME_S_1KI': 0.37287998320817556, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2107.3890700149536, 'W': 153.89000000000001} +[40.36, 40.12, 40.4, 39.54, 39.59, 39.1, 39.19, 40.07, 39.54, 39.22, 40.73, 39.18, 39.33, 39.63, 39.65, 39.27, 39.19, 39.18, 39.78, 39.04] +712.435 +35.62175 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 27505, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.25606393814087, 'TIME_S_1KI': 0.37287998320817556, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2107.3890700149536, 'W': 153.89000000000001, 'J_1KI': 76.6183992006891, 'W_1KI': 5.594982730412653, 'W_D': 118.26825000000002, 'J_D': 1619.580332573891, 'W_D_1KI': 4.299881839665516, 'J_D_1KI': 0.156330915821324} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.1.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.1.json new file mode 100644 index 0000000..3ce07d9 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 4716, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.072229146957397, "TIME_S_1KI": 2.1357568165728154, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1865.214413022995, "W": 124.6, "J_1KI": 395.5077211668777, "W_1KI": 26.42069550466497, "W_D": 88.853, "J_D": 1330.0954754440784, "W_D_1KI": 18.84075487701442, "J_D_1KI": 3.9950710086968657} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.1.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.1.output new file mode 100644 index 0000000..3a1030a --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.1.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '0.1', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 2.2264370918273926} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1050, 2086, ..., 9997973, + 9998977, 10000000]), + col_indices=tensor([ 7, 18, 19, ..., 9986, 9989, 9991]), + values=tensor([0.4594, 0.3854, 0.2627, ..., 0.1030, 0.9821, 0.5221]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.7604, 0.6286, 0.0952, ..., 0.1700, 0.1146, 0.5013]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 2.2264370918273926 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '4716', '-ss', '10000', '-sd', '0.1', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.072229146957397} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1024, 2032, ..., 9997988, + 9999031, 10000000]), + col_indices=tensor([ 8, 19, 25, ..., 9964, 9974, 9989]), + values=tensor([0.6442, 0.9503, 0.4324, ..., 0.4734, 0.5264, 0.7582]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.7346, 0.1525, 0.9122, ..., 0.8135, 0.4141, 0.4880]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 10.072229146957397 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1024, 2032, ..., 9997988, + 9999031, 10000000]), + col_indices=tensor([ 8, 19, 25, ..., 9964, 9974, 9989]), + values=tensor([0.6442, 0.9503, 0.4324, ..., 0.4734, 0.5264, 0.7582]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.7346, 0.1525, 0.9122, ..., 0.8135, 0.4141, 0.4880]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 10.072229146957397 seconds + +[40.38, 39.18, 39.76, 39.52, 39.78, 39.24, 39.97, 39.14, 39.12, 39.09] +[124.6] +14.969618082046509 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 4716, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.072229146957397, 'TIME_S_1KI': 2.1357568165728154, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1865.214413022995, 'W': 124.6} +[40.38, 39.18, 39.76, 39.52, 39.78, 39.24, 39.97, 39.14, 39.12, 39.09, 39.77, 39.88, 39.63, 39.31, 39.15, 39.05, 39.3, 39.01, 43.67, 41.22] +714.94 +35.747 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 4716, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.072229146957397, 'TIME_S_1KI': 2.1357568165728154, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1865.214413022995, 'W': 124.6, 'J_1KI': 395.5077211668777, 'W_1KI': 26.42069550466497, 'W_D': 88.853, 'J_D': 1330.0954754440784, 'W_D_1KI': 18.84075487701442, 'J_D_1KI': 3.9950710086968657} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_1e-05.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_1e-05.json index cda3d72..6beae0d 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_1e-05.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_1e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 375977, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.999524116516113, "TIME_S_1KI": 0.029255843087518954, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1323.755545105934, "W": 96.41999999999999, "J_1KI": 3.5208418203930933, "W_1KI": 0.25645185742744897, "W_D": 61.132499999999986, "J_D": 839.2914941006896, "W_D_1KI": 0.16259638222550846, "J_D_1KI": 0.0004324636406628822} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 355068, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.454679489135742, "TIME_S_1KI": 0.029444161369472165, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1283.4049695920944, "W": 97.06, "J_1KI": 3.614532905224054, "W_1KI": 0.27335608953777873, "W_D": 61.48125, "J_D": 812.9542735084892, "W_D_1KI": 0.17315345229646154, "J_D_1KI": 0.0004876627921875853} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_1e-05.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_1e-05.output index e9eebf5..f912c9b 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_1e-05.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_1e-05.output @@ -1,373 +1,266 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.04503059387207031} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.1263408660888672} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([4113, 614, 2519, 8527, 7416, 2734, 949, 6682, 484, - 5512, 2710, 4041, 4037, 3756, 4925, 4764, 8722, 3874, - 8352, 2481, 3774, 8294, 3589, 6629, 1741, 283, 3355, - 5134, 1636, 5257, 6914, 8377, 9479, 3405, 2807, 6603, - 730, 4849, 7221, 7178, 5773, 4547, 9572, 5072, 5733, - 8766, 8040, 7105, 6968, 6795, 4519, 4433, 7044, 2666, - 5807, 2089, 4272, 1275, 3276, 409, 2016, 5940, 4287, - 7005, 5810, 8597, 1286, 8246, 5523, 3085, 4475, 3444, - 5153, 3360, 5524, 9599, 3802, 5759, 6854, 9537, 9505, - 7933, 4849, 4073, 6294, 3565, 5654, 9049, 3619, 8438, - 2201, 1301, 373, 5050, 213, 3319, 2294, 9757, 2234, - 1810, 8112, 6888, 8132, 3918, 894, 916, 3277, 7303, - 4439, 8812, 5563, 6709, 2634, 805, 7224, 2711, 9378, - 7538, 3829, 8492, 5794, 788, 7855, 1497, 1779, 6847, - 7754, 9099, 5015, 8895, 2234, 7226, 3476, 4035, 7187, - 1217, 8293, 3002, 5806, 4814, 7027, 5895, 484, 4485, - 1879, 740, 7180, 6167, 776, 9922, 6200, 307, 9057, - 5384, 8371, 1115, 2660, 6841, 8748, 2961, 6800, 8955, - 3135, 5759, 4649, 6636, 3819, 980, 6238, 6734, 5414, - 7505, 121, 231, 1121, 9712, 5687, 7052, 816, 111, - 7191, 929, 7825, 966, 6662, 9913, 5970, 6853, 3650, - 2653, 2810, 2912, 8729, 1828, 3693, 9288, 554, 3170, - 4646, 1863, 8302, 8291, 8262, 6434, 7153, 7572, 278, - 3707, 342, 4305, 5353, 1645, 3158, 9062, 1831, 3442, - 5149, 7522, 6560, 8383, 4295, 5695, 856, 6038, 4672, - 1377, 9614, 3675, 6820, 6247, 2481, 4842, 2377, 3247, - 6377, 2541, 3714, 5057, 6938, 4228, 9505, 8711, 4039, - 7522, 438, 3550, 5566, 2760, 948, 1983, 2270, 3067, - 3282, 4364, 4801, 1680, 5226, 6645, 9002, 5222, 5459, - 2737, 7616, 6264, 7887, 223, 52, 8752, 4534, 2743, - 9660, 5066, 7136, 6475, 2976, 4605, 3242, 1471, 6147, - 891, 7320, 5035, 8599, 8089, 3160, 3600, 3366, 4723, - 332, 3965, 3678, 9459, 6018, 1966, 3522, 7175, 3624, - 2006, 5446, 2753, 5477, 6570, 1978, 5598, 2923, 6454, - 9211, 2207, 7417, 583, 5535, 3914, 4811, 6018, 9955, - 2760, 7, 9152, 4641, 1755, 9204, 8476, 4159, 6732, - 58, 6965, 1178, 3322, 2479, 8384, 5390, 3733, 5480, - 8738, 5963, 9220, 7794, 4116, 1556, 5705, 7177, 3304, - 2076, 1869, 1107, 6532, 2831, 264, 3709, 7642, 6257, - 6756, 655, 8454, 9755, 5961, 1723, 1346, 428, 3491, - 5232, 2725, 8447, 629, 6562, 3064, 3992, 6895, 8506, - 5127, 8552, 8547, 3524, 6818, 2773, 8892, 4914, 4601, - 3216, 6409, 4010, 3353, 1843, 5349, 643, 8379, 9654, - 2758, 7403, 996, 8474, 2873, 6165, 5586, 7092, 5911, - 8887, 1753, 295, 9968, 4807, 2036, 5209, 7858, 3526, - 9272, 6028, 3155, 8750, 4799, 3899, 6965, 9772, 1579, - 2739, 680, 8103, 4989, 9956, 5468, 7300, 4242, 9104, - 304, 2675, 6847, 1715, 8675, 3104, 6316, 3118, 1365, - 4880, 5482, 4494, 9331, 1914, 1008, 1058, 615, 6840, - 8374, 755, 6768, 5703, 9941, 8123, 2894, 6827, 9261, - 1016, 5865, 3837, 191, 6962, 9169, 8329, 8697, 1364, - 4230, 12, 6621, 8033, 606, 5058, 6909, 5930, 5310, - 6295, 626, 7718, 4644, 2636, 7562, 3504, 6140, 8756, - 1941, 7761, 1448, 1556, 2243, 5037, 7498, 1075, 4282, - 9354, 546, 1061, 8555, 3809, 8048, 7039, 3042, 5633, - 8033, 9170, 5913, 9232, 189, 9152, 3734, 3120, 8218, - 2534, 2830, 7684, 1386, 3969, 8993, 270, 2230, 9222, - 6405, 2868, 8681, 7600, 6500, 9840, 129, 2788, 7646, - 6078, 5454, 227, 3233, 2061, 5164, 3557, 6028, 3733, - 533, 4737, 6629, 2120, 69, 2205, 5593, 6558, 2333, - 3390, 1311, 9344, 3055, 4149, 912, 4398, 8156, 1605, - 9401, 3684, 6447, 9959, 9297, 3115, 8842, 7985, 959, - 5159, 2972, 1020, 5456, 4954, 6079, 2307, 3735, 9141, - 5359, 6625, 439, 3624, 1571, 5500, 1385, 3593, 7729, - 4466, 771, 2426, 5714, 3813, 9066, 4085, 1948, 4327, - 6300, 7334, 3057, 3389, 3217, 8355, 3678, 993, 2886, - 3792, 6839, 106, 42, 5192, 9697, 4408, 7262, 2508, - 88, 8331, 4710, 9900, 9903, 3495, 9276, 9055, 8491, - 3453, 7654, 804, 1179, 3222, 3598, 899, 6379, 4786, - 3808, 4427, 4525, 6611, 3148, 9561, 2867, 5923, 2301, - 4082, 9909, 8137, 5769, 1741, 7368, 198, 6198, 8534, - 8001, 1520, 5923, 6663, 3858, 5085, 5322, 4593, 7385, - 8632, 6602, 4215, 8714, 8313, 8763, 9207, 7857, 2640, - 7415, 4558, 7180, 1606, 8371, 2903, 5499, 2049, 4696, - 2750, 4471, 6398, 7882, 5766, 8388, 2474, 8083, 5520, - 5630, 3073, 829, 6105, 2802, 6994, 7594, 6776, 9456, - 3158, 3947, 6700, 9417, 1170, 1057, 2702, 8543, 886, - 6746, 6426, 41, 4872, 4253, 849, 6873, 4723, 5116, - 9608, 1782, 3927, 1606, 7152, 3573, 3687, 9143, 1779, - 4739, 6691, 1104, 9137, 1092, 5265, 1787, 8273, 6300, - 7652, 6105, 7010, 8713, 8375, 2087, 9024, 1102, 9773, - 4208, 6779, 1389, 7057, 9095, 2520, 9602, 7300, 2025, - 205, 4186, 7643, 9814, 1418, 2692, 313, 3309, 2211, - 4739, 1720, 4420, 6850, 3036, 5240, 6950, 1575, 7069, - 9747, 881, 5097, 5976, 9380, 7660, 67, 1202, 8152, - 9709, 6503, 338, 8670, 2752, 3705, 8300, 7886, 1942, - 8394, 8289, 8174, 9235, 6871, 5125, 1886, 2830, 8164, - 6878, 2136, 743, 6072, 308, 5043, 9261, 3554, 8760, - 6364, 5471, 1575, 9111, 3881, 1608, 8489, 3488, 5805, - 5923, 7733, 9321, 4538, 6016, 8063, 4674, 785, 4098, - 6931, 8453, 5136, 3086, 4405, 3151, 6385, 3120, 1990, - 6688, 2458, 4568, 7201, 760, 2534, 892, 9138, 4014, - 4753, 6411, 985, 1726, 1194, 6147, 5453, 5251, 4285, - 7680, 7080, 7430, 9006, 483, 4860, 8364, 4544, 8855, - 8967, 1292, 2680, 7780, 6091, 4988, 2734, 5577, 646, - 4717, 5082, 2801, 536, 1895, 1705, 8475, 7323, 8164, - 6604, 4784, 2300, 4820, 4982, 4372, 4588, 9893, 677, - 2459, 4938, 6627, 7089, 4988, 4206, 8924, 8519, 566, - 2145, 1792, 2859, 7022, 7840, 707, 4725, 3813, 6440, - 8160, 6788, 107, 7055, 9787, 5540, 8695, 1745, 3333, - 6649, 3171, 8138, 903, 2895, 6390, 2887, 3854, 7506, - 9383, 5244, 9744, 9149, 6163, 357, 5074, 31, 3859, - 520, 4913, 8192, 5427, 3680, 104, 353, 6679, 6413, - 3473, 5814, 3830, 867, 5023, 863, 5313, 4279, 3748, - 5611, 4646, 365, 9047, 754, 2070, 9816, 1759, 6619, - 8704, 6091, 5714, 7641, 8645, 7279, 7123, 188, 1229, - 2513, 2885, 2723, 9850, 7571, 2720, 579, 2999, 6057, - 8994, 1759, 5804, 5690, 6353, 3198, 9940, 9471, 3214, - 1425, 5299, 1460, 7850, 7707, 3587, 2142, 7188, 2556, - 9738, 5045, 7827, 4034, 6410, 937, 523, 7552, 7368, - 2636, 295, 2971, 1178, 2221, 5797, 791, 5332, 9031, - 6571]), - values=tensor([9.6079e-01, 5.9008e-01, 4.6121e-01, 2.2117e-01, - 1.2953e-01, 7.8771e-01, 9.2520e-01, 3.1107e-01, - 3.5062e-01, 3.6810e-01, 4.4742e-01, 2.7022e-01, - 4.1784e-02, 1.1642e-01, 4.0841e-01, 3.5582e-01, - 8.0780e-01, 4.1190e-01, 3.8340e-01, 9.5484e-01, - 1.7139e-01, 3.0430e-01, 9.5748e-01, 7.4335e-01, - 6.9420e-01, 7.3451e-01, 8.5826e-01, 6.0262e-01, - 1.9270e-01, 1.7381e-01, 7.0628e-01, 5.1518e-01, - 1.3041e-01, 2.1471e-01, 7.2521e-01, 3.5647e-01, - 3.2819e-01, 6.1639e-01, 8.9408e-01, 5.4947e-01, - 3.1015e-01, 5.6897e-01, 5.1346e-01, 1.7084e-01, - 2.9625e-01, 8.6564e-01, 3.0783e-01, 6.6253e-01, - 3.2719e-02, 1.9994e-01, 2.2068e-01, 1.0163e-01, - 3.9084e-02, 5.5442e-01, 8.8869e-01, 3.8630e-01, - 9.4885e-01, 7.9932e-01, 4.4806e-02, 9.2359e-01, - 3.3535e-01, 2.1424e-01, 3.9098e-01, 5.8058e-01, - 9.0490e-01, 1.9532e-01, 4.2696e-02, 7.8329e-01, - 3.9631e-01, 6.5512e-01, 5.2345e-01, 9.6262e-01, - 9.9010e-01, 1.2244e-01, 6.5164e-02, 9.1989e-01, - 3.3934e-01, 7.7869e-01, 1.5466e-01, 3.2238e-01, - 3.4449e-01, 6.1317e-01, 3.2262e-01, 7.5291e-01, - 6.5667e-01, 4.1234e-01, 2.1618e-01, 4.2188e-01, - 4.9130e-01, 8.4706e-01, 7.2233e-01, 2.3481e-01, - 8.6532e-01, 8.9899e-01, 5.7747e-01, 9.1370e-01, - 2.6179e-01, 8.6530e-01, 8.9069e-01, 1.4967e-01, - 4.2417e-01, 1.7737e-01, 6.2351e-01, 5.8608e-01, - 4.2216e-01, 3.4698e-02, 2.7237e-02, 9.6729e-01, - 2.9230e-01, 5.6324e-01, 4.3719e-01, 6.0183e-01, - 1.0615e-01, 5.8346e-01, 1.0998e-01, 1.4670e-02, - 6.8660e-01, 7.3470e-01, 7.4064e-01, 3.7389e-01, - 7.8495e-01, 2.5095e-02, 1.0201e-01, 5.8250e-01, - 5.3988e-01, 2.8653e-01, 3.4730e-02, 8.0745e-01, - 7.2620e-01, 4.6795e-01, 3.7164e-01, 4.2803e-01, - 5.9973e-01, 1.9384e-01, 9.4279e-01, 3.2563e-01, - 1.8314e-01, 1.8319e-01, 8.7212e-01, 1.5914e-01, - 3.5560e-01, 8.7200e-01, 1.0971e-01, 9.2430e-01, - 7.3267e-01, 7.0919e-01, 2.1549e-01, 7.5849e-01, - 4.2152e-01, 2.7361e-01, 9.3761e-02, 1.8642e-01, - 1.6968e-01, 9.8394e-02, 9.4331e-01, 9.5015e-01, - 5.2325e-01, 5.7285e-01, 8.9149e-01, 3.5676e-01, - 6.7088e-01, 5.6674e-01, 8.4336e-02, 9.4964e-01, - 8.8216e-01, 8.1742e-01, 9.7096e-02, 4.4077e-01, - 1.8360e-01, 7.1186e-01, 1.7385e-01, 8.0642e-01, - 3.3752e-02, 6.0913e-02, 9.1093e-01, 1.3954e-01, - 5.2428e-01, 2.6305e-01, 9.1961e-01, 5.8462e-01, - 8.8234e-01, 7.4346e-01, 5.9530e-01, 8.3897e-01, - 8.1794e-01, 2.5699e-01, 6.4052e-01, 6.9276e-01, - 7.5219e-01, 6.3442e-01, 2.5609e-01, 4.0655e-02, - 1.0731e-01, 4.2066e-01, 7.1792e-01, 5.8527e-01, - 9.0724e-01, 7.6542e-01, 3.9914e-01, 6.8811e-01, - 7.3030e-01, 5.3375e-01, 4.1736e-01, 4.4382e-01, - 8.7864e-02, 1.8168e-01, 8.4605e-01, 1.0664e-01, - 9.4157e-01, 7.0188e-01, 9.5259e-01, 1.7034e-01, - 2.8778e-02, 3.4403e-01, 8.5567e-02, 9.7015e-01, - 2.3894e-01, 3.8232e-01, 9.2959e-01, 4.6223e-01, - 6.2552e-01, 6.9841e-01, 8.1046e-01, 9.8615e-01, - 5.3395e-01, 6.7448e-01, 1.5110e-02, 4.3055e-01, - 2.8983e-01, 2.2205e-01, 4.2299e-01, 3.2742e-01, - 9.4562e-01, 1.3228e-01, 6.6648e-01, 9.6769e-01, - 8.3641e-01, 3.1945e-01, 1.8608e-01, 9.8320e-01, - 6.3287e-01, 3.7701e-04, 4.6284e-01, 1.5538e-01, - 4.3775e-01, 1.1248e-01, 3.9611e-01, 6.4523e-01, - 1.8065e-01, 8.7972e-01, 7.4047e-02, 9.8321e-01, - 4.3280e-01, 8.5535e-01, 8.9643e-01, 3.9210e-01, - 3.2548e-01, 4.7127e-01, 6.7657e-01, 2.8452e-01, - 9.5644e-01, 9.1884e-02, 5.5155e-01, 7.3372e-01, - 9.4569e-01, 1.2709e-01, 1.0575e-01, 9.0529e-01, - 1.2786e-01, 7.3029e-01, 8.1745e-01, 7.1494e-01, - 2.7889e-01, 8.4956e-01, 3.2322e-01, 9.5324e-01, - 6.1939e-01, 8.9236e-01, 3.9638e-01, 5.9051e-02, - 7.8152e-02, 7.5012e-01, 5.5313e-01, 6.3409e-01, - 3.8825e-01, 8.1240e-01, 2.7012e-01, 3.0842e-01, - 5.9480e-01, 4.6792e-01, 1.5467e-01, 5.0258e-01, - 8.5370e-01, 6.9643e-01, 5.6808e-01, 3.6627e-01, - 5.9433e-01, 2.7441e-01, 4.5090e-01, 9.1590e-01, - 6.2369e-01, 8.0591e-01, 4.2576e-01, 6.2346e-01, - 6.6044e-01, 5.8409e-01, 5.6222e-01, 3.2085e-01, - 8.3095e-01, 3.2480e-01, 6.2737e-01, 1.9380e-01, - 7.7641e-01, 9.2339e-01, 1.9094e-01, 3.9555e-01, - 2.8543e-01, 3.5448e-01, 4.2909e-02, 1.4642e-01, - 2.7259e-01, 3.6249e-01, 2.5456e-01, 1.6076e-01, - 2.1518e-01, 5.6783e-01, 2.1560e-01, 3.7696e-01, - 8.4695e-01, 8.6367e-01, 2.6642e-01, 9.1417e-01, - 7.0589e-02, 6.2700e-01, 7.2344e-02, 7.7867e-01, - 1.4290e-01, 6.4889e-01, 5.1584e-01, 5.5686e-01, - 6.5519e-01, 1.7406e-01, 8.9123e-01, 9.5773e-01, - 6.0739e-01, 5.0542e-01, 3.3455e-01, 6.9746e-02, - 6.2511e-01, 2.0777e-01, 8.5453e-01, 4.1452e-01, - 2.3019e-01, 7.6034e-01, 5.8986e-01, 3.2863e-01, - 2.8996e-01, 9.4487e-01, 1.1865e-01, 7.4444e-01, - 2.5404e-01, 6.9848e-01, 1.6335e-02, 5.7731e-01, - 5.6511e-01, 5.6679e-01, 8.4587e-01, 3.1103e-01, - 4.7871e-01, 5.4685e-01, 7.7579e-01, 4.0560e-01, - 4.7435e-01, 4.5381e-01, 2.8202e-01, 7.2471e-01, - 2.8117e-01, 1.0864e-01, 4.3474e-01, 9.9898e-01, - 1.0616e-01, 4.4184e-01, 7.7206e-01, 7.2931e-02, - 5.4728e-01, 1.2836e-01, 3.6502e-01, 9.8998e-01, - 1.2635e-01, 2.1816e-01, 7.3257e-01, 8.9531e-01, - 8.3663e-01, 3.1482e-01, 9.1892e-01, 3.9094e-01, - 7.5489e-01, 1.5191e-01, 5.6766e-01, 9.2956e-01, - 9.7694e-01, 8.3368e-01, 1.4911e-01, 2.1533e-01, - 7.1197e-01, 9.9767e-01, 6.0122e-01, 8.6538e-02, - 1.4058e-01, 5.7784e-02, 8.8705e-01, 2.6534e-01, - 5.7663e-01, 1.5194e-01, 6.9278e-01, 8.1533e-01, - 7.2639e-01, 3.1303e-02, 2.9506e-01, 4.0322e-01, - 5.4860e-01, 6.8788e-01, 5.5774e-01, 3.3571e-01, - 9.4743e-01, 2.4843e-01, 3.4099e-01, 7.5755e-01, - 1.0342e-01, 7.4654e-01, 2.6506e-01, 9.6601e-01, - 3.5672e-01, 5.8780e-02, 4.9007e-01, 5.0399e-01, - 8.4615e-01, 4.0587e-01, 1.0572e-01, 9.4780e-02, - 9.5970e-01, 5.4338e-01, 4.3598e-01, 5.0916e-02, - 9.8588e-01, 3.1161e-01, 3.9460e-01, 2.2249e-01, - 3.8783e-01, 7.6573e-02, 3.2922e-01, 5.3642e-01, - 7.5540e-01, 2.1019e-01, 1.7593e-01, 4.7754e-01, - 4.5408e-02, 9.2358e-02, 2.6244e-01, 4.9294e-01, - 9.7251e-01, 6.9946e-01, 9.6946e-01, 2.6101e-01, - 7.8417e-01, 8.5646e-01, 8.6128e-01, 2.1401e-01, - 7.9811e-01, 8.0751e-01, 1.3852e-01, 6.0504e-01, - 7.2703e-01, 1.2071e-01, 8.4736e-01, 6.4192e-01, - 2.4140e-01, 9.4170e-01, 4.1915e-01, 6.2504e-01, - 9.1354e-01, 8.7879e-01, 5.4874e-01, 6.2057e-01, - 3.1631e-01, 4.2865e-01, 7.1077e-01, 6.1382e-01, - 4.0319e-01, 7.2135e-02, 2.8476e-02, 6.4928e-01, - 3.5345e-01, 8.0768e-01, 2.3593e-01, 3.3792e-01, - 2.1400e-01, 2.2000e-01, 1.5807e-02, 6.9164e-01, - 8.6353e-01, 8.9324e-01, 5.2234e-01, 7.1487e-01, - 9.9592e-01, 8.3529e-01, 8.5741e-01, 8.0085e-01, - 3.8615e-01, 6.0506e-01, 5.1739e-01, 9.5544e-01, - 6.5394e-01, 4.0202e-01, 5.0238e-02, 3.2301e-01, - 2.2342e-01, 3.2250e-01, 1.7053e-01, 1.7154e-01, - 1.8552e-01, 8.2680e-01, 4.7119e-01, 5.1469e-01, - 6.6665e-01, 4.2289e-01, 9.7900e-01, 8.2423e-01, - 6.3580e-01, 7.9028e-01, 2.5581e-01, 3.1620e-01, - 7.2692e-01, 6.7538e-01, 4.2782e-01, 3.6750e-01, - 4.2076e-01, 9.7455e-01, 1.7885e-01, 7.8988e-01, - 7.3000e-01, 5.8912e-01, 5.7835e-01, 7.6855e-01, - 6.5106e-01, 4.8761e-02, 5.0213e-01, 4.1002e-01, - 3.7337e-01, 6.0524e-01, 3.8677e-01, 8.6913e-01, - 4.3120e-01, 8.6003e-01, 5.3176e-01, 9.6812e-01, - 7.3253e-01, 1.7968e-02, 3.4114e-01, 2.8416e-01, - 4.4138e-02, 5.9573e-02, 8.8837e-01, 7.3073e-01, - 2.2455e-02, 3.0071e-01, 7.9493e-01, 5.8090e-01, - 6.9503e-01, 9.0058e-02, 6.0456e-01, 9.1062e-01, - 9.6072e-01, 2.0727e-01, 2.6064e-01, 5.0282e-01, - 6.7661e-01, 3.9363e-01, 1.6355e-01, 9.3422e-01, - 5.7538e-01, 3.2045e-01, 1.8307e-01, 7.2388e-01, - 4.8884e-01, 7.1826e-01, 1.0501e-01, 4.1474e-01, - 7.8689e-01, 1.1854e-01, 3.4478e-01, 7.3953e-01, - 8.5906e-02, 1.7198e-01, 2.6567e-01, 3.8442e-02, - 3.5331e-01, 1.7057e-01, 6.8129e-01, 4.4314e-02, - 1.7635e-01, 3.0627e-01, 3.7595e-01, 9.7580e-01, - 7.7478e-01, 1.3801e-01, 7.6050e-02, 5.1964e-01, - 7.1683e-01, 1.4812e-02, 2.0494e-01, 8.6155e-01, - 1.0404e-01, 4.2076e-01, 6.5099e-01, 1.1149e-01, - 4.4182e-01, 7.2736e-01, 5.9277e-01, 8.4298e-01, - 6.7561e-01, 1.1172e-01, 1.5586e-01, 4.1346e-01, - 8.7060e-01, 8.8591e-01, 3.6579e-01, 1.1697e-01, - 1.7621e-01, 5.2057e-01, 3.2276e-01, 2.4297e-01, - 7.0933e-01, 4.2658e-01, 3.8468e-01, 4.6840e-01, - 7.7736e-01, 4.0808e-01, 8.7147e-01, 3.3704e-01, - 5.5257e-01, 4.2282e-01, 8.0689e-01, 7.9939e-01, - 4.8461e-01, 8.3967e-02, 6.6519e-01, 4.5634e-01, - 8.0610e-01, 8.0329e-02, 2.5188e-01, 8.7144e-01, - 5.5357e-01, 9.0614e-01, 3.7022e-01, 1.3199e-01, - 4.9399e-01, 5.7796e-01, 7.9855e-01, 5.1744e-01, - 2.4925e-01, 5.8142e-01, 5.8520e-01, 3.7935e-01, - 5.6113e-01, 7.1411e-01, 2.0834e-01, 4.8056e-02, - 3.8130e-01, 2.4083e-01, 4.0876e-01, 2.7972e-01, - 9.1674e-01, 7.4098e-02, 8.7967e-01, 9.1036e-01, - 8.8990e-01, 2.2696e-01, 9.2005e-01, 9.0147e-01, - 6.5858e-01, 5.9024e-01, 2.3922e-01, 8.9333e-01, - 7.6813e-01, 7.2107e-01, 9.8533e-01, 7.3042e-01, - 7.0476e-01, 7.7595e-01, 2.4322e-01, 6.3765e-01, - 2.8492e-01, 1.1785e-01, 2.3441e-01, 6.9709e-02, - 5.7818e-01, 3.2884e-01, 3.0298e-01, 7.5598e-01, - 1.6391e-01, 3.6590e-01, 8.0827e-02, 6.4771e-01, - 7.2071e-01, 7.5440e-01, 1.4465e-03, 6.1346e-01, - 2.9679e-01, 9.0389e-01, 5.3004e-01, 9.6867e-01, - 9.8515e-01, 7.2093e-02, 1.4018e-01, 1.7865e-01, - 5.2537e-01, 4.7855e-02, 7.0046e-01, 3.4734e-01, - 3.2334e-01, 4.8680e-01, 5.2083e-01, 5.6821e-01, - 5.2706e-01, 4.1377e-01, 5.0577e-01, 8.8593e-01, - 6.8253e-01, 1.4949e-01, 7.2151e-01, 9.8592e-01, - 2.6509e-01, 2.2733e-01, 1.2234e-01, 8.1566e-01, - 4.3301e-01, 2.5919e-01, 6.7630e-01, 3.8161e-01, - 4.9374e-01, 2.2563e-01, 1.6075e-01, 2.7385e-01, - 6.4091e-01, 2.9006e-01, 7.7477e-02, 2.5730e-01, - 7.7645e-01, 5.3171e-01, 7.3614e-01, 1.2321e-01, - 1.2520e-01, 8.6292e-01, 3.3752e-01, 9.9521e-02, - 2.0396e-02, 7.3447e-01, 3.8238e-01, 4.1324e-01, - 5.1392e-01, 9.8613e-01, 3.9540e-01, 3.2257e-03, - 1.8248e-01, 5.4588e-02, 6.7462e-01, 9.7400e-01, - 9.5486e-01, 8.5489e-01, 3.7943e-01, 1.5439e-01, - 9.2984e-01, 6.2689e-01, 7.1684e-01, 8.8493e-02, - 8.7790e-01, 3.8409e-01, 3.1495e-01, 2.8750e-01, - 8.5932e-01, 8.9138e-01, 5.8798e-01, 8.6858e-01, - 1.5927e-01, 1.8627e-01, 5.1673e-01, 8.5682e-01, - 3.6537e-01, 8.8473e-01, 4.7465e-01, 5.1589e-01, - 8.6057e-01, 2.1121e-01, 4.2156e-01, 5.3648e-01, - 2.1603e-01, 2.0873e-01, 8.1242e-02, 1.0171e-01, - 8.4708e-01, 6.0362e-01, 4.3183e-01, 8.5639e-01, - 4.8916e-01, 6.4619e-01, 5.7690e-01, 2.5753e-01, - 4.2611e-01, 4.0426e-01, 6.5076e-01, 3.3454e-02, - 8.3996e-01, 8.2875e-02, 8.0898e-01, 6.1217e-01, - 2.9743e-01, 9.9667e-01, 3.6883e-01, 6.3303e-01, - 6.4874e-01, 9.0628e-01, 4.7359e-01, 5.1549e-01, - 3.7847e-01, 7.2215e-01, 1.0511e-01, 6.1894e-01, - 6.1358e-01, 5.3679e-02, 5.2042e-02, 7.2284e-01, - 3.4372e-01, 3.4180e-01, 7.7002e-01, 8.1055e-01, - 2.8900e-01, 4.9163e-01, 4.5895e-01, 7.2888e-01, - 8.0969e-01, 8.7032e-01, 2.1255e-01, 3.5274e-01, - 6.1097e-02, 7.5921e-01, 9.7577e-01, 9.4678e-01, - 8.8305e-02, 7.2719e-01, 3.3226e-01, 7.4789e-01, - 9.9413e-01, 7.5201e-01, 3.5906e-01, 8.2048e-02, - 7.6101e-01, 2.3741e-03, 9.2050e-01, 2.3936e-04, - 3.2038e-01, 1.9210e-01, 6.0376e-01, 6.5341e-01, - 6.7820e-01, 4.8329e-01, 6.3921e-02, 1.8406e-01, - 9.3601e-01, 2.4145e-02, 7.3602e-01, 2.8236e-01, - 5.4117e-01, 9.6093e-02, 3.5464e-01, 4.6730e-01, - 8.0466e-01, 3.1517e-01, 1.5543e-01, 4.5395e-01, - 4.4196e-01, 1.1306e-01, 4.2106e-01, 3.7645e-01, - 3.7248e-01, 1.4547e-01, 4.5378e-01, 8.3007e-01, - 9.9474e-01, 5.1359e-01, 1.1437e-02, 6.1421e-01, - 8.2695e-01, 7.2613e-01, 4.5298e-01, 6.0201e-01, - 5.6260e-02, 4.9942e-01, 7.2186e-01, 9.9955e-01, - 9.6864e-01, 9.2020e-01, 8.5265e-03, 8.1929e-01, - 3.7340e-01, 4.9719e-01, 9.2959e-01, 8.5224e-01, - 8.7295e-01, 2.1447e-01, 8.9185e-03, 6.4517e-01, - 8.2381e-01, 5.5277e-01, 8.7305e-02, 9.7157e-02, - 9.6945e-01, 4.5957e-01, 2.5012e-01, 3.7574e-01, - 2.2385e-01, 7.1917e-01, 5.4531e-01, 2.1601e-01, - 6.2354e-01, 4.5955e-01, 2.7864e-01, 4.0172e-01, - 3.3217e-01, 3.2830e-01, 1.3320e-01, 8.5280e-01, - 7.1848e-01, 6.2103e-01, 7.3208e-01, 1.9941e-01, - 9.3285e-01, 7.1780e-01, 1.3221e-01, 2.8363e-01, - 9.7332e-01, 4.9667e-01, 7.4568e-01, 6.4196e-01, - 9.5067e-01, 6.2735e-01, 6.6890e-01, 8.1656e-01, - 2.4149e-01, 2.4013e-01, 2.9668e-02, 3.8869e-01, - 5.6431e-02, 5.9606e-01, 2.8391e-01, 7.5948e-01, - 5.7071e-01, 8.5520e-01, 4.0720e-01, 3.8487e-01, - 9.1407e-01, 3.7893e-01, 9.2517e-01, 3.1757e-02, - 5.8191e-01, 2.8741e-01, 4.5689e-01, 7.3211e-01, - 5.1279e-01, 2.0092e-01, 9.6941e-01, 1.0429e-01, - 8.8765e-02, 8.0419e-01, 1.9751e-01, 8.4583e-01, - 9.9007e-01, 5.8099e-01, 5.6395e-01, 1.7666e-01, - 7.5263e-01, 7.2795e-01, 4.5702e-01, 3.5597e-01, - 7.2686e-01, 6.7951e-01, 7.0072e-01, 4.3576e-01, - 3.0328e-01, 3.9285e-01, 6.9036e-01, 9.8846e-01, - 5.6203e-01, 4.7887e-01, 8.7240e-02, 4.9759e-01, - 3.3560e-01, 5.8114e-01, 9.8752e-01, 9.0553e-01]), + col_indices=tensor([6511, 7342, 6569, 9549, 4200, 6366, 6222, 7592, 169, + 4880, 5467, 9729, 3186, 7579, 7366, 6461, 1569, 540, + 4760, 8910, 3523, 1918, 8918, 3090, 6190, 499, 9266, + 7460, 2296, 3773, 5490, 884, 9714, 1811, 1020, 9037, + 8808, 4866, 6500, 7251, 3472, 4368, 6660, 7858, 2942, + 5413, 1404, 5690, 9364, 8617, 6277, 1831, 560, 3190, + 1663, 819, 9507, 4286, 7801, 2558, 3419, 9656, 3132, + 7435, 3025, 8945, 7422, 9343, 1144, 3849, 1843, 8839, + 6036, 1338, 5977, 5948, 8060, 2140, 9438, 3733, 4406, + 5939, 9926, 2756, 7803, 4028, 4889, 5230, 7472, 8834, + 3528, 4118, 8239, 1176, 9979, 731, 8697, 6303, 596, + 6780, 2484, 641, 8003, 4632, 6436, 9612, 7360, 8016, + 621, 8128, 463, 4301, 2226, 5312, 7004, 6810, 3999, + 4752, 5969, 34, 4043, 3161, 9457, 286, 6473, 2546, + 2948, 1709, 8722, 220, 6967, 9215, 550, 2682, 1046, + 1411, 5120, 1681, 795, 7027, 1138, 7720, 5262, 9156, + 6114, 3094, 5529, 9897, 5383, 2658, 5925, 4585, 1891, + 5947, 1641, 3669, 2374, 4271, 1873, 3950, 8634, 9898, + 2804, 4238, 6088, 7480, 3866, 2931, 158, 762, 7315, + 3431, 9534, 8813, 8729, 7588, 5653, 2233, 8886, 2117, + 5788, 7121, 687, 4736, 1055, 4450, 4608, 65, 8244, + 2012, 6731, 5872, 7793, 1928, 6136, 4684, 8489, 8964, + 3740, 2505, 3086, 567, 7565, 6980, 8106, 6069, 7130, + 6970, 9760, 2913, 3403, 1406, 7675, 386, 5541, 6017, + 9133, 5436, 8960, 7368, 6345, 3739, 2407, 5067, 8091, + 3266, 3124, 5929, 6713, 6112, 8945, 2328, 7728, 4986, + 8539, 2814, 6319, 705, 1165, 9519, 3981, 5584, 6598, + 2738, 5532, 5237, 6331, 6207, 8710, 995, 1064, 3408, + 6852, 23, 6885, 9749, 2209, 7966, 1743, 7553, 7522, + 7688, 3322, 2692, 4077, 524, 1297, 4398, 4096, 1166, + 5811, 9504, 9733, 2814, 8085, 4424, 3816, 2029, 7548, + 8169, 3026, 1340, 5158, 5626, 7898, 9054, 3947, 2974, + 8879, 2439, 4906, 5376, 7674, 5565, 4834, 1951, 8992, + 2602, 4891, 1422, 7414, 4875, 8525, 5441, 9284, 999, + 7732, 4808, 2724, 1701, 1389, 3674, 4863, 3129, 2538, + 6570, 3018, 4218, 7797, 9608, 1867, 6688, 2972, 1313, + 6883, 6461, 1346, 4019, 3075, 3279, 8085, 7848, 9504, + 3827, 2046, 5854, 837, 9356, 3800, 7458, 9224, 1721, + 4056, 5289, 942, 2304, 5616, 3284, 4339, 5079, 870, + 9407, 2635, 5239, 2058, 2138, 7880, 4585, 3515, 9934, + 866, 9761, 8919, 2186, 800, 572, 8912, 8785, 4800, + 5094, 6357, 8393, 6810, 1477, 5566, 9893, 349, 3779, + 8545, 1987, 7795, 9938, 6031, 9207, 8810, 6010, 4262, + 5989, 7188, 9196, 6390, 7391, 9696, 2839, 9975, 2181, + 2674, 1526, 8229, 3221, 2082, 4838, 7371, 86, 8603, + 1062, 8643, 4915, 5544, 7520, 8254, 9621, 153, 2491, + 8229, 4372, 2665, 4480, 3338, 4223, 3871, 8012, 4932, + 8756, 3433, 2881, 6190, 3712, 7183, 4288, 7905, 7231, + 8297, 6210, 5179, 8257, 6157, 3488, 6273, 9080, 4379, + 5031, 8844, 157, 4802, 414, 188, 6362, 490, 9145, + 8499, 4374, 6930, 3177, 8307, 8277, 6077, 290, 2856, + 6353, 3841, 363, 9590, 8189, 5238, 5239, 3246, 7783, + 5125, 1355, 7133, 8323, 9928, 5054, 1300, 7618, 7017, + 20, 6323, 9040, 4476, 3660, 9493, 2606, 5099, 8989, + 4686, 4749, 8776, 7509, 5402, 8359, 4880, 1488, 5157, + 230, 5242, 2209, 1343, 2485, 558, 5693, 1249, 4396, + 2349, 6899, 7592, 9403, 4311, 564, 7436, 7000, 380, + 9168, 6194, 2637, 2512, 3205, 2559, 7975, 9819, 6809, + 3325, 1840, 4305, 5238, 9210, 2227, 6924, 8354, 3045, + 3286, 3664, 6276, 8603, 6294, 1616, 3859, 5674, 2042, + 4316, 8399, 3534, 2694, 6067, 6420, 9170, 8993, 3488, + 8272, 5531, 4465, 9064, 8332, 990, 5946, 5090, 3858, + 8019, 8546, 3535, 1789, 3116, 6602, 8289, 9270, 3315, + 3607, 5332, 5665, 4255, 9462, 3870, 5110, 7175, 59, + 8950, 4474, 898, 3887, 9586, 3479, 624, 2157, 8013, + 5204, 446, 1227, 6497, 4047, 2825, 4969, 207, 9153, + 3558, 2821, 5665, 1108, 8779, 8273, 3826, 1024, 2831, + 8755, 975, 6214, 8135, 9347, 2576, 7238, 260, 9515, + 4303, 1860, 3449, 78, 2060, 4646, 1178, 3266, 5040, + 6785, 4432, 9084, 9287, 1308, 6562, 2916, 5846, 4040, + 6006, 6431, 2614, 6680, 3035, 7241, 3980, 3829, 4911, + 4244, 8881, 3409, 5409, 2501, 2137, 7118, 7535, 5989, + 4552, 6736, 6133, 8836, 5682, 7420, 4597, 727, 7253, + 9952, 4260, 141, 3937, 6286, 8262, 9299, 9258, 6831, + 1516, 2229, 9027, 1604, 2300, 5677, 7863, 2419, 2434, + 9276, 3042, 9104, 1277, 9737, 4965, 2010, 6398, 1957, + 4962, 6889, 9183, 5585, 4081, 3764, 4701, 887, 3733, + 6586, 1585, 2197, 3497, 4488, 5454, 6304, 4370, 7000, + 1732, 5777, 1564, 182, 7312, 843, 2073, 2825, 5082, + 2427, 5637, 5250, 785, 6753, 8646, 5170, 7589, 4599, + 6101, 5880, 4216, 4352, 6075, 8934, 4178, 4278, 9476, + 891, 1828, 2805, 9509, 7190, 8284, 7824, 2775, 8561, + 5451, 4644, 949, 2504, 4485, 5019, 3536, 9441, 2681, + 8020, 7135, 6954, 6223, 583, 5238, 9792, 400, 4350, + 4042, 5751, 5273, 2704, 4637, 9449, 3036, 8020, 1070, + 6190, 1221, 7658, 4998, 4039, 1857, 1515, 8470, 2112, + 1220, 6720, 3909, 7827, 2681, 8993, 1851, 8559, 9233, + 8270, 6492, 8370, 3880, 4016, 9974, 5286, 5908, 7345, + 3932, 9971, 7065, 5706, 2639, 3132, 0, 8702, 3654, + 8031, 308, 9810, 4042, 1066, 5288, 9302, 4605, 7758, + 7079, 7715, 7788, 4493, 9273, 652, 1168, 8880, 764, + 2023, 3145, 561, 9693, 6664, 9707, 2412, 9229, 3856, + 5206, 6758, 7833, 2140, 552, 4704, 8527, 9171, 931, + 1092, 2464, 730, 5373, 8715, 2647, 248, 317, 8578, + 5853, 5045, 2023, 2493, 9990, 4722, 4406, 672, 5484, + 6323, 4636, 760, 2773, 5211, 9856, 7135, 8638, 5261, + 9754, 8020, 9297, 4374, 1483, 4463, 9669, 822, 6952, + 4495, 4359, 4772, 3852, 6538, 3023, 7911, 7905, 474, + 649, 7884, 7257, 1371, 4902, 8997, 1667, 7606, 6008, + 8525, 7525, 4626, 8980, 102, 385, 9820, 3167, 1481, + 6318, 5842, 3074, 2300, 4285, 2257, 8773, 3591, 8972, + 60, 9528, 3412, 3530, 7559, 1661, 6470, 9349, 6549, + 5932, 5012, 7259, 9868, 1051, 7359, 5205, 6204, 4945, + 9902, 3225, 241, 2345, 7259, 6271, 4175, 1082, 8824, + 1547, 6383, 7680, 9555, 9178, 8067, 7267, 8856, 7869, + 3762, 119, 911, 8077, 4179, 3785, 853, 5695, 7309, + 7532, 7067, 1393, 4072, 5209, 5182, 3120, 4904, 2492, + 1193, 2101, 6396, 91, 3022, 9202, 3980, 6136, 770, + 2588, 5106, 4044, 935, 4047, 659, 7690, 8988, 3826, + 2975, 8835, 4878, 2590, 451, 8796, 2172, 2353, 4221, + 2312, 3478, 3018, 8148, 35, 1239, 3737, 276, 1484, + 5891]), + values=tensor([0.0306, 0.2996, 0.3526, 0.5136, 0.1960, 0.0494, 0.4248, + 0.9003, 0.3088, 0.8066, 0.0284, 0.9628, 0.6970, 0.3929, + 0.9036, 0.7489, 0.7075, 0.5515, 0.0416, 0.3000, 0.2939, + 0.4662, 0.0532, 0.2066, 0.1559, 0.5483, 0.1194, 0.6794, + 0.0379, 0.2707, 0.3144, 0.3029, 0.5991, 0.1291, 0.8872, + 0.0571, 0.4748, 0.2201, 0.3840, 0.3049, 0.2970, 0.9700, + 0.8529, 0.1435, 0.5565, 0.5637, 0.9930, 0.1648, 0.4519, + 0.4565, 0.0369, 0.8511, 0.6180, 0.7898, 0.5871, 0.6839, + 0.8964, 0.3177, 0.0872, 0.3846, 0.9044, 0.4366, 0.0388, + 0.1350, 0.2187, 0.1852, 0.7787, 0.4795, 0.4954, 0.8048, + 0.2045, 0.8960, 0.4052, 0.7941, 0.0725, 0.2621, 0.1305, + 0.0902, 0.6241, 0.3014, 0.2140, 0.6118, 0.6453, 0.1158, + 0.4621, 0.4834, 0.0720, 0.4786, 0.0461, 0.4857, 0.9225, + 0.9337, 0.8788, 0.7328, 0.6734, 0.8209, 0.0647, 0.5804, + 0.9201, 0.3337, 0.6402, 0.5754, 0.8922, 0.7688, 0.9684, + 0.5926, 0.4561, 0.5474, 0.5628, 0.6181, 0.1490, 0.7242, + 0.4347, 0.6384, 0.2571, 0.2733, 0.0772, 0.6448, 0.1181, + 0.8618, 0.7749, 0.1412, 0.3808, 0.4841, 0.8504, 0.2451, + 0.7311, 0.5086, 0.5760, 0.3132, 0.1736, 0.1022, 0.0208, + 0.2411, 0.1662, 0.6587, 0.2256, 0.4538, 0.6212, 0.5927, + 0.7335, 0.3971, 0.7881, 0.9087, 0.8888, 0.5215, 0.0362, + 0.2192, 0.6024, 0.3066, 0.5152, 0.7761, 0.6246, 0.5107, + 0.7204, 0.0928, 0.2938, 0.2239, 0.8240, 0.8958, 0.7616, + 0.6198, 0.8798, 0.1372, 0.8344, 0.7621, 0.6427, 0.7246, + 0.1530, 0.8750, 0.0458, 0.4190, 0.4886, 0.3299, 0.9645, + 0.3283, 0.9267, 0.2549, 0.8204, 0.8857, 0.4233, 0.5398, + 0.8946, 0.3813, 0.9383, 0.3632, 0.7251, 0.4935, 0.4827, + 0.2048, 0.0825, 0.0035, 0.9126, 0.1871, 0.9901, 0.9622, + 0.7775, 0.0560, 0.9785, 0.3158, 0.2838, 0.7572, 0.3187, + 0.9800, 0.2588, 0.4971, 0.2998, 0.0798, 0.3656, 0.8817, + 0.7570, 0.9729, 0.9450, 0.3333, 0.6794, 0.8595, 0.0825, + 0.5629, 0.1961, 0.2346, 0.7490, 0.4658, 0.0661, 0.7833, + 0.9724, 0.8115, 0.6330, 0.9629, 0.6885, 0.2121, 0.4325, + 0.7806, 0.0604, 0.5040, 0.6010, 0.3542, 0.7338, 0.6247, + 0.5156, 0.0422, 0.2923, 0.7305, 0.0072, 0.0523, 0.0874, + 0.3459, 0.6271, 0.2483, 0.5689, 0.5708, 0.6442, 0.9101, + 0.1929, 0.8536, 0.1759, 0.7602, 0.2479, 0.8308, 0.1534, + 0.8999, 0.5372, 0.4067, 0.1885, 0.6555, 0.8109, 0.1402, + 0.4547, 0.0892, 0.0917, 0.4601, 0.4577, 0.0414, 0.4292, + 0.7770, 0.6217, 0.1772, 0.7251, 0.2974, 0.3561, 0.1741, + 0.9519, 0.1043, 0.1386, 0.2005, 0.3308, 0.6261, 0.5583, + 0.6958, 0.9562, 0.1163, 0.7202, 0.0189, 0.6769, 0.8101, + 0.2518, 0.0598, 0.0523, 0.9468, 0.5857, 0.4039, 0.1540, + 0.6184, 0.6548, 0.6415, 0.1325, 0.2674, 0.2857, 0.1560, + 0.2794, 0.1789, 0.4609, 0.4847, 0.0643, 0.2217, 0.1603, + 0.9383, 0.8847, 0.1370, 0.6934, 0.7808, 0.4786, 0.2836, + 0.2882, 0.3049, 0.8279, 0.1760, 0.4565, 0.6898, 0.4354, + 0.2369, 0.0678, 0.5112, 0.3915, 0.2665, 0.0071, 0.3329, + 0.7702, 0.5882, 0.2246, 0.8406, 0.4273, 0.5306, 0.5471, + 0.0205, 0.2945, 0.3046, 0.8992, 0.5360, 0.5760, 0.5236, + 0.8603, 0.1486, 0.5646, 0.8488, 0.1589, 0.4986, 0.7975, + 0.6924, 0.8408, 0.1090, 0.6127, 0.7986, 0.1942, 0.7795, + 0.6241, 0.8583, 0.0902, 0.4669, 0.0031, 0.7487, 0.2235, + 0.8146, 0.4574, 0.3400, 0.3504, 0.0333, 0.4419, 0.8370, + 0.0392, 0.2485, 0.1795, 0.7482, 0.2109, 0.0408, 0.7418, + 0.9292, 0.6846, 0.3690, 0.4878, 0.6072, 0.6460, 0.4085, + 0.6894, 0.9867, 0.4649, 0.1809, 0.1338, 0.6634, 0.7284, + 0.8447, 0.2901, 0.8729, 0.3228, 0.7217, 0.0421, 0.3467, + 0.3500, 0.0319, 0.6448, 0.0298, 0.7700, 0.7140, 0.6676, + 0.7898, 0.8129, 0.6835, 0.3724, 0.9770, 0.8145, 0.9149, + 0.6854, 0.0386, 0.9383, 0.7113, 0.0544, 0.2496, 0.4624, + 0.2901, 0.2225, 0.8862, 0.3230, 0.0831, 0.5192, 0.8215, + 0.8609, 0.5748, 0.2582, 0.5818, 0.6447, 0.9069, 0.4974, + 0.5823, 0.5074, 0.7554, 0.2160, 0.6916, 0.9777, 0.0044, + 0.7248, 0.6742, 0.4187, 0.4874, 0.0607, 0.8419, 0.6519, + 0.7998, 0.5744, 0.6496, 0.7756, 0.5558, 0.0928, 0.3968, + 0.1019, 0.3508, 0.3882, 0.9473, 0.6436, 0.8376, 0.5488, + 0.6926, 0.5539, 0.1858, 0.0264, 0.5272, 0.2740, 0.8661, + 0.5169, 0.0795, 0.0521, 0.9982, 0.2502, 0.8618, 0.6245, + 0.7443, 0.7449, 0.2283, 0.4514, 0.6829, 0.0254, 0.4974, + 0.3757, 0.0356, 0.0300, 0.0553, 0.2861, 0.3189, 0.8406, + 0.1083, 0.9209, 0.6242, 0.1608, 0.0338, 0.3084, 0.9249, + 0.8197, 0.5299, 0.9741, 0.7855, 0.8666, 0.4031, 0.7025, + 0.5839, 0.0050, 0.7304, 0.3322, 0.1087, 0.2762, 0.7511, + 0.7594, 0.0724, 0.5264, 0.0240, 0.0994, 0.3068, 0.4204, + 0.7297, 0.3932, 0.5193, 0.2475, 0.7232, 0.8687, 0.3627, + 0.8627, 0.7280, 0.1720, 0.7801, 0.8422, 0.2665, 0.3902, + 0.9385, 0.1567, 0.6750, 0.0872, 0.6728, 0.7000, 0.6708, + 0.1144, 0.5964, 0.2046, 0.0215, 0.1444, 0.1121, 0.3866, + 0.0141, 0.0182, 0.0820, 0.4020, 0.2046, 0.0760, 0.7010, + 0.8371, 0.6363, 0.1601, 0.8386, 0.4834, 0.6240, 0.7109, + 0.5248, 0.1620, 0.2978, 0.5724, 0.7957, 0.5286, 0.9592, + 0.1891, 0.3098, 0.1183, 0.8406, 0.4345, 0.2746, 0.2381, + 0.9283, 0.9912, 0.1328, 0.9612, 0.5342, 0.7093, 0.7239, + 0.5647, 0.8151, 0.7272, 0.5546, 0.5172, 0.9452, 0.9265, + 0.5730, 0.6891, 0.8507, 0.0152, 0.3641, 0.6091, 0.0281, + 0.8430, 0.4602, 0.4083, 0.2670, 0.4846, 0.8166, 0.6608, + 0.4757, 0.2061, 0.0230, 0.3786, 0.0203, 0.3579, 0.1856, + 0.9697, 0.6122, 0.1104, 0.4051, 0.6621, 0.7210, 0.4887, + 0.7662, 0.4987, 0.3552, 0.8317, 0.6462, 0.2954, 0.8946, + 0.6938, 0.8724, 0.8605, 0.5661, 0.2432, 0.6532, 0.7869, + 0.5082, 0.1430, 0.1744, 0.4959, 0.2427, 0.6615, 0.1468, + 0.6222, 0.5168, 0.1504, 0.6478, 0.0760, 0.6906, 0.5515, + 0.6555, 0.1942, 0.5614, 0.9788, 0.0980, 0.0822, 0.0901, + 0.3375, 0.1551, 0.9099, 0.1875, 0.4951, 0.8701, 0.2417, + 0.3607, 0.9100, 0.2254, 0.8664, 0.4507, 0.7583, 0.3452, + 0.1628, 0.8842, 0.4641, 0.8382, 0.1592, 0.1465, 0.7834, + 0.1113, 0.8527, 0.7883, 0.0078, 0.7042, 0.5069, 0.0265, + 0.4437, 0.7457, 0.3947, 0.2779, 0.1135, 0.0400, 0.7033, + 0.6583, 0.9688, 0.5398, 0.0529, 0.7115, 0.4981, 0.4662, + 0.1089, 0.2302, 0.4077, 0.1243, 0.1202, 0.6002, 0.2944, + 0.5434, 0.8171, 0.2524, 0.1311, 0.2229, 0.6672, 0.7868, + 0.3712, 0.2741, 0.2983, 0.0211, 0.9064, 0.2443, 0.2039, + 0.1587, 0.0714, 0.1602, 0.5244, 0.6179, 0.1098, 0.8545, + 0.4409, 0.8691, 0.7805, 0.7046, 0.1708, 0.2781, 0.9514, + 0.9829, 0.8609, 0.8372, 0.7281, 0.1977, 0.5304, 0.8741, + 0.9262, 0.4102, 0.3512, 0.1301, 0.6785, 0.7435, 0.8077, + 0.0054, 0.4434, 0.2501, 0.4500, 0.1285, 0.3015, 0.7139, + 0.3419, 0.9442, 0.0252, 0.9060, 0.2412, 0.9194, 0.6338, + 0.9581, 0.4535, 0.9501, 0.2388, 0.0183, 0.7894, 0.8822, + 0.2473, 0.1064, 0.3979, 0.4847, 0.1764, 0.6258, 0.6423, + 0.3027, 0.0291, 0.5673, 0.6365, 0.2921, 0.8467, 0.9302, + 0.1491, 0.4870, 0.1244, 0.1446, 0.6191, 0.4937, 0.4839, + 0.1760, 0.6423, 0.3785, 0.0507, 0.0136, 0.9314, 0.0413, + 0.3361, 0.4356, 0.5253, 0.6406, 0.4027, 0.9081, 0.1976, + 0.3900, 0.0793, 0.7130, 0.8448, 0.5262, 0.7891, 0.7115, + 0.6106, 0.6129, 0.7191, 0.5413, 0.0599, 0.7188, 0.5771, + 0.0563, 0.3318, 0.9427, 0.2488, 0.9866, 0.3747, 0.0846, + 0.0192, 0.4736, 0.2442, 0.1761, 0.6562, 0.5303, 0.6790, + 0.1108, 0.2154, 0.1110, 0.8123, 0.5187, 0.6447, 0.0812, + 0.9661, 0.6681, 0.7909, 0.7698, 0.3579, 0.0744, 0.7481, + 0.7668, 0.1431, 0.3334, 0.7738, 0.6145, 0.1853, 0.3382, + 0.9412, 0.2181, 0.4791, 0.9681, 0.4770, 0.7934, 0.5690, + 0.8437, 0.3696, 0.3543, 0.2772, 0.9915, 0.2554, 0.9890, + 0.8207, 0.6499, 0.3341, 0.5876, 0.3016, 0.6988, 0.6689, + 0.3669, 0.6637, 0.1413, 0.9395, 0.0606, 0.9514, 0.3260, + 0.5061, 0.3513, 0.7068, 0.0974, 0.3633, 0.4546, 0.4583, + 0.7655, 0.9906, 0.2955, 0.9989, 0.3375, 0.3790, 0.8979, + 0.7939, 0.5902, 0.8694, 0.1432, 0.9009, 0.8808, 0.4242, + 0.5135, 0.6896, 0.0119, 0.4685, 0.6706, 0.4189, 0.9546, + 0.3617, 0.0920, 0.5116, 0.4792, 0.7116, 0.0251, 0.0758, + 0.8562, 0.6360, 0.7029, 0.1478, 0.9535, 0.5155, 0.1408, + 0.0802, 0.2040, 0.4593, 0.1577, 0.2618, 0.9670, 0.0535, + 0.4637, 0.0191, 0.9906, 0.0035, 0.4768, 0.3059, 0.5236, + 0.8961, 0.9634, 0.9692, 0.1734, 0.8440, 0.6503, 0.5804, + 0.0330, 0.2159, 0.9849, 0.0927, 0.0050, 0.5841, 0.8372, + 0.4469, 0.6234, 0.3858, 0.3558, 0.2944, 0.5086, 0.3990, + 0.0024, 0.2265, 0.9175, 0.2927, 0.9591, 0.5760, 0.7312, + 0.8004, 0.8632, 0.6707, 0.3346, 0.8718, 0.4368, 0.8040, + 0.7086, 0.4515, 0.4407, 0.9257, 0.9829, 0.8739, 0.9325, + 0.3778, 0.1118, 0.7555, 0.1293, 0.6795, 0.0619, 0.8626, + 0.7469, 0.8528, 0.2420, 0.3647, 0.2182, 0.8272, 0.9530, + 0.5556, 0.3834, 0.2160, 0.8095, 0.1449, 0.1013]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.9668, 0.9511, 0.4479, ..., 0.0607, 0.6261, 0.2898]) +tensor([0.7754, 0.7775, 0.1646, ..., 0.9136, 0.8074, 0.4658]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -375,378 +268,378 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 0.04503059387207031 seconds +Time: 0.1263408660888672 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '233174', '-ss', '10000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 7.088708162307739} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '83108', '-ss', '10000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 2.457648515701294} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([6339, 4641, 8767, 9629, 5248, 7593, 5215, 4233, 3907, - 8989, 5838, 7934, 891, 5239, 5411, 4963, 7963, 8173, - 4172, 4464, 4601, 9580, 2260, 6961, 5855, 1265, 1349, - 2190, 6418, 252, 5026, 6780, 5825, 9726, 7411, 4666, - 5731, 7839, 3753, 9206, 4521, 3044, 7848, 4653, 8995, - 579, 4725, 4836, 8826, 2364, 9710, 713, 7544, 9065, - 7816, 8496, 3385, 1467, 5199, 2666, 5229, 7632, 1859, - 2358, 9351, 6205, 2016, 380, 3677, 1279, 8529, 4708, - 600, 8708, 262, 2780, 7779, 4205, 2568, 2645, 4928, - 4767, 5127, 4130, 3518, 487, 2778, 3925, 1657, 1278, - 7068, 3351, 3630, 7719, 3614, 1109, 142, 4337, 7018, - 7816, 7494, 2297, 4786, 6789, 6911, 449, 6163, 812, - 8883, 3887, 726, 6261, 3381, 1211, 7361, 2658, 4836, - 2934, 3551, 5047, 7903, 3714, 2712, 9272, 1649, 9481, - 7845, 4115, 8011, 4904, 3857, 2157, 8222, 3091, 2952, - 5530, 893, 406, 656, 4883, 2559, 4176, 7319, 3087, - 1684, 1065, 2850, 6576, 7935, 6475, 7649, 2355, 6637, - 4645, 9422, 6130, 5581, 5702, 6904, 7945, 8119, 2180, - 408, 8239, 4419, 6214, 6023, 9693, 7635, 1127, 4320, - 8861, 4722, 2049, 6422, 104, 6661, 9156, 8586, 392, - 7446, 3109, 5971, 4358, 5232, 4896, 9459, 1771, 6550, - 3011, 5959, 9478, 4621, 388, 6640, 2764, 9283, 7190, - 464, 9583, 625, 4106, 2013, 1349, 4445, 446, 543, - 1545, 2963, 5230, 7111, 6095, 3395, 6571, 3059, 7732, - 2990, 7387, 3662, 925, 9527, 7839, 3262, 9035, 8871, - 7566, 6916, 7858, 9944, 1520, 9421, 9, 871, 3003, - 5962, 6872, 133, 5941, 4856, 8270, 8264, 8771, 26, - 6480, 1894, 8486, 8443, 6907, 6247, 7564, 9817, 5311, - 1169, 8939, 1744, 876, 5934, 7137, 7223, 9877, 4333, - 1508, 4152, 4899, 151, 6716, 2952, 5055, 9815, 9749, - 2132, 1495, 2274, 7975, 4840, 9431, 3088, 5808, 3007, - 5129, 656, 5845, 3672, 167, 3471, 6259, 9418, 9097, - 1751, 3334, 4912, 4687, 7215, 2905, 4983, 1876, 7556, - 6104, 8703, 1783, 3362, 306, 1557, 8547, 6285, 9713, - 5770, 8443, 3707, 6968, 8382, 3037, 4556, 6574, 3174, - 7983, 6141, 705, 7981, 1230, 4689, 8155, 4575, 6941, - 8183, 7079, 4053, 4770, 5978, 9450, 9971, 6548, 4713, - 7330, 2381, 8428, 895, 4263, 2382, 2494, 3102, 377, - 6580, 5769, 8240, 1818, 9244, 8064, 4348, 5859, 4637, - 6796, 5807, 1227, 5655, 3159, 4788, 5904, 5163, 8643, - 7455, 9087, 2599, 3470, 1933, 2052, 5737, 5929, 6787, - 7900, 3028, 1592, 2681, 3990, 5150, 5906, 9765, 3388, - 7902, 4412, 4202, 4919, 3936, 5854, 4278, 6226, 5795, - 9155, 8242, 9017, 8429, 1184, 3102, 3843, 6277, 8971, - 1912, 9612, 741, 4252, 7482, 3503, 2726, 2811, 3559, - 399, 7677, 5195, 4059, 9354, 906, 3533, 5066, 5261, - 3193, 4483, 4886, 4103, 4597, 9977, 5946, 8230, 4992, - 8472, 3117, 1248, 1710, 9801, 7084, 8399, 7976, 5881, - 2831, 8380, 740, 5933, 5902, 2054, 7730, 9220, 2421, - 3042, 8571, 558, 5545, 9248, 6314, 831, 8532, 164, - 6327, 8191, 6708, 3472, 8296, 987, 1978, 7731, 184, - 8077, 7024, 4373, 6476, 709, 2001, 3591, 7951, 674, - 3179, 4761, 6258, 404, 102, 7565, 1320, 4048, 8520, - 488, 1749, 3313, 4083, 5307, 2810, 390, 2437, 5305, - 7386, 1557, 9970, 2342, 3191, 9986, 178, 4626, 9267, - 4860, 4858, 1720, 8846, 6265, 9655, 9638, 9181, 6900, - 1613, 6595, 142, 2317, 8578, 5353, 2948, 3226, 9646, - 2814, 9928, 1619, 9514, 7599, 4491, 2351, 8435, 4286, - 2796, 6697, 2767, 215, 2381, 3251, 4594, 7945, 4553, - 5908, 1906, 8132, 4651, 9740, 9380, 6421, 885, 7449, - 4730, 8470, 4933, 6284, 274, 9613, 8769, 2849, 3054, - 647, 9306, 8076, 8650, 8558, 619, 7329, 8117, 5585, - 2935, 8965, 2239, 4899, 6916, 7529, 2944, 8894, 4693, - 883, 854, 9970, 4346, 9382, 5968, 4320, 5784, 8058, - 9957, 7797, 6604, 7854, 3200, 6586, 5581, 688, 7140, - 8461, 5640, 7421, 2, 3035, 1530, 3928, 6756, 9405, - 1862, 7698, 730, 1312, 4426, 4655, 8042, 4112, 8077, - 9757, 9198, 8803, 73, 3273, 649, 4257, 714, 5026, - 7230, 7355, 3577, 3243, 7112, 2281, 390, 231, 1780, - 450, 2570, 9716, 8058, 9251, 1372, 6520, 6388, 9274, - 3849, 3159, 5752, 4576, 1614, 9466, 5115, 2113, 5253, - 3480, 1193, 6699, 1791, 1597, 784, 3587, 6483, 2652, - 1195, 2433, 1554, 6460, 858, 9065, 6225, 8438, 4741, - 5530, 7845, 5905, 350, 2785, 2578, 237, 9882, 9324, - 5864, 8426, 2099, 7651, 5581, 8116, 4379, 6300, 3252, - 2820, 7305, 9535, 3471, 717, 5704, 219, 7072, 515, - 2227, 6380, 975, 8463, 1410, 7715, 116, 2199, 1775, - 688, 8980, 3787, 7284, 1628, 9292, 2018, 2821, 9192, - 3868, 5882, 8330, 4272, 5658, 483, 6593, 7399, 6269, - 8456, 6417, 3495, 5945, 2918, 3750, 159, 7898, 1407, - 9367, 564, 5300, 5013, 8346, 7601, 8865, 9223, 5944, - 901, 3124, 8400, 9741, 2948, 4858, 4999, 8774, 4713, - 2278, 4373, 2465, 6418, 8336, 2776, 4920, 5521, 7535, - 9154, 6696, 6660, 7005, 3240, 2019, 4528, 5313, 8399, - 4692, 1293, 2442, 3422, 1700, 953, 4080, 5160, 1845, - 6240, 4122, 6751, 9296, 4353, 9549, 3356, 1816, 5195, - 6398, 5782, 1694, 7383, 8555, 468, 5643, 437, 6323, - 363, 6907, 1437, 8290, 6802, 9750, 8431, 9749, 3998, - 7798, 2853, 5360, 7863, 4272, 4940, 991, 103, 7350, - 8318, 5850, 445, 1960, 6235, 731, 2056, 647, 4575, - 2714, 7085, 940, 6566, 9433, 3767, 9844, 8314, 8025, - 2745, 9939, 6053, 2095, 8416, 672, 2673, 9124, 1946, - 4995, 3271, 9749, 4543, 1749, 8419, 9156, 9258, 1109, - 3152, 5640, 9524, 6105, 832, 8218, 4229, 1259, 4638, - 7427, 3307, 2538, 4411, 4276, 5847, 9892, 5432, 127, - 747, 2773, 2394, 5071, 2979, 6115, 8303, 972, 1077, - 766, 1747, 4452, 7344, 5742, 5149, 2427, 4401, 2357, - 4741, 4890, 1768, 6984, 4719, 350, 4811, 539, 2475, - 8551, 7532, 194, 524, 6642, 3188, 5951, 7068, 6404, - 6696, 5171, 3898, 4152, 641, 9676, 8690, 6198, 148, - 1499, 6722, 8913, 9480, 5036, 5003, 7746, 3508, 9026, - 6763, 8327, 796, 2437, 7286, 634, 9036, 5938, 5483, - 9959, 3102, 8844, 1491, 6873, 7042, 5255, 6216, 4772, - 3446, 5933, 1150, 3106, 9009, 753, 4173, 8223, 6077, - 6054, 2576, 2721, 7916, 4263, 4368, 5827, 4016, 5554, - 2188, 1725, 9744, 5504, 128, 2481, 9845, 4266, 4938, - 5254, 3436, 6223, 8530, 4577, 7941, 4287, 814, 5792, - 5956, 3575, 3730, 5063, 3004, 6623, 7598, 6478, 3605, - 664, 4036, 424, 505, 5369, 4716, 2475, 8670, 5421, - 6595, 5235, 7757, 9306, 257, 8679, 6475, 8505, 7441, - 9960, 3498, 7408, 8594, 2914, 4652, 8512, 5905, 7800, - 8736]), - values=tensor([5.9797e-02, 3.1586e-01, 8.9348e-01, 2.2301e-03, - 7.5774e-01, 8.1473e-01, 6.9005e-01, 8.3764e-01, - 1.9388e-01, 3.7028e-01, 6.6707e-01, 7.5631e-01, - 9.6784e-01, 7.9670e-02, 4.9775e-02, 9.3745e-01, - 4.2617e-01, 5.6650e-01, 2.1143e-01, 5.6781e-01, - 3.2773e-01, 8.0071e-01, 7.1744e-01, 4.9315e-01, - 5.1779e-01, 8.3231e-01, 5.6317e-01, 8.3789e-01, - 1.6867e-01, 7.2317e-01, 3.5205e-01, 9.2952e-01, - 1.0560e-01, 2.0319e-01, 5.6453e-03, 1.2691e-01, - 6.7515e-01, 2.8654e-02, 1.4929e-01, 7.5179e-01, - 4.0274e-01, 8.6652e-01, 5.7557e-01, 3.4649e-01, - 5.1560e-01, 4.4271e-02, 4.8277e-01, 9.8128e-01, - 8.8709e-01, 5.1241e-01, 9.3991e-01, 2.9700e-02, - 9.9598e-01, 2.4645e-01, 1.5525e-01, 4.4033e-01, - 1.9020e-01, 3.4616e-01, 9.3560e-02, 5.1798e-01, - 1.0148e-01, 9.2502e-01, 7.5875e-02, 1.2222e-01, - 1.1586e-01, 8.5650e-01, 1.0749e-01, 8.9669e-01, - 5.2196e-01, 8.1933e-01, 4.5721e-01, 4.2455e-01, - 2.8674e-01, 1.4298e-01, 5.8864e-01, 4.0229e-01, - 1.6698e-01, 9.8795e-01, 7.4193e-01, 6.4737e-01, - 6.1647e-01, 5.1303e-01, 1.1406e-02, 1.4521e-01, - 3.3852e-01, 9.9773e-01, 6.8121e-01, 6.6374e-01, - 2.3301e-02, 5.1369e-01, 9.5374e-01, 4.0945e-01, - 6.7895e-01, 1.6201e-01, 7.1449e-01, 7.0733e-01, - 2.0865e-01, 5.7703e-01, 2.1282e-01, 9.8079e-01, - 3.0126e-01, 9.2204e-01, 3.5855e-01, 7.4006e-01, - 4.0260e-01, 2.4895e-01, 6.6630e-01, 4.3873e-01, - 6.9750e-01, 3.3585e-02, 2.6784e-01, 1.5943e-01, - 8.7140e-01, 1.7321e-01, 4.9353e-01, 4.7190e-01, - 2.4903e-01, 5.5852e-01, 3.0917e-01, 8.0618e-01, - 8.9951e-01, 1.5071e-01, 1.3120e-01, 6.7374e-01, - 8.9328e-01, 6.2104e-01, 3.0012e-02, 7.5397e-01, - 1.8498e-01, 2.9066e-01, 5.3134e-01, 8.4205e-01, - 2.4603e-01, 2.7883e-01, 7.0780e-01, 4.1218e-01, - 6.1176e-01, 7.8633e-01, 7.8208e-01, 8.0649e-01, - 7.2209e-01, 1.8549e-01, 3.9376e-01, 6.8222e-01, - 6.1499e-01, 7.8684e-02, 9.7657e-01, 9.2285e-01, - 2.2260e-01, 3.0372e-01, 7.9009e-01, 2.6687e-01, - 8.1037e-01, 6.1251e-01, 8.1966e-01, 3.9223e-02, - 4.6587e-01, 8.8132e-01, 1.8547e-01, 4.0486e-01, - 8.9928e-01, 8.8617e-01, 6.3218e-01, 6.5771e-01, - 8.5686e-02, 1.5945e-01, 6.0071e-01, 6.8548e-02, - 1.0579e-01, 2.6830e-01, 5.1166e-01, 9.3279e-01, - 1.1958e-01, 3.1685e-01, 6.8231e-01, 2.8457e-01, - 1.1497e-01, 1.5250e-01, 4.4321e-01, 6.0155e-01, - 7.1790e-01, 7.1978e-01, 6.7682e-01, 9.4540e-02, - 1.9797e-01, 9.5781e-01, 3.2506e-01, 2.7244e-01, - 8.3613e-01, 7.0382e-01, 7.3152e-01, 6.9588e-01, - 4.4820e-01, 4.3112e-01, 4.7823e-01, 4.6768e-01, - 1.8954e-01, 7.5206e-01, 8.3795e-01, 5.3464e-01, - 8.1446e-01, 4.3659e-01, 2.4537e-01, 3.1279e-01, - 7.1447e-01, 4.1738e-01, 1.4099e-01, 8.8032e-01, - 6.3535e-01, 7.2871e-01, 9.8657e-01, 4.6749e-01, - 2.8313e-01, 2.0200e-02, 7.7779e-02, 3.7336e-01, - 2.8724e-01, 9.4718e-01, 7.2532e-01, 5.1829e-01, - 5.5683e-01, 7.0625e-01, 7.3414e-01, 7.5766e-01, - 9.7259e-01, 3.1026e-01, 2.8144e-01, 1.0492e-01, - 2.9981e-01, 5.5736e-01, 5.5069e-01, 9.9987e-01, - 6.6277e-01, 7.9977e-01, 9.2426e-01, 4.0849e-01, - 6.8562e-01, 6.7766e-01, 1.4272e-01, 5.1420e-01, - 2.1391e-01, 1.4413e-01, 5.9377e-01, 2.5969e-01, - 4.4712e-01, 6.7579e-01, 9.0170e-01, 9.4087e-01, - 3.7861e-01, 5.2494e-01, 8.0595e-01, 8.6009e-01, - 2.1255e-02, 2.3412e-01, 2.6344e-01, 6.6887e-01, - 2.2484e-01, 9.2906e-01, 2.8464e-01, 7.0818e-01, - 4.3693e-01, 1.9227e-01, 9.7000e-01, 3.8882e-01, - 7.6479e-01, 8.0671e-01, 7.2264e-01, 4.7033e-01, - 3.2987e-01, 4.5877e-01, 8.9559e-01, 5.3586e-02, - 5.1775e-01, 9.6398e-01, 4.8668e-01, 2.8733e-01, - 8.4007e-02, 6.6030e-01, 4.5053e-01, 6.4219e-01, - 5.3442e-01, 8.9204e-02, 1.3479e-02, 6.8398e-01, - 4.5496e-01, 8.4411e-03, 6.9036e-01, 6.8798e-01, - 6.2433e-01, 6.9259e-01, 3.0161e-01, 3.7995e-02, - 6.3095e-01, 3.1976e-01, 2.3167e-01, 4.6570e-01, - 5.4730e-01, 8.9851e-01, 1.4343e-01, 1.9744e-02, - 2.5263e-01, 6.6548e-01, 9.5704e-02, 8.6671e-01, - 1.1189e-01, 6.8093e-02, 5.9177e-01, 6.4241e-01, - 7.4142e-01, 7.7089e-01, 9.2055e-01, 9.1185e-01, - 6.3925e-02, 9.1064e-01, 1.9802e-01, 8.8399e-01, - 7.6479e-02, 8.8093e-01, 7.9982e-01, 3.9832e-02, - 8.7614e-01, 6.9758e-01, 2.9838e-01, 2.4498e-01, - 2.5100e-04, 2.6495e-01, 8.6896e-02, 9.6968e-01, - 9.2855e-01, 3.0162e-01, 3.1537e-02, 9.2488e-01, - 6.8346e-01, 5.9580e-03, 9.0425e-01, 6.2726e-01, - 8.8373e-01, 7.3457e-02, 4.6862e-01, 8.1256e-01, - 4.4884e-01, 3.7307e-01, 7.0977e-01, 2.7326e-01, - 8.5745e-01, 1.9742e-01, 5.8825e-02, 3.3454e-01, - 1.6380e-01, 9.1259e-01, 7.6648e-01, 9.0361e-01, - 6.7038e-01, 2.1151e-02, 3.1773e-01, 5.8561e-01, - 8.1034e-01, 2.7378e-01, 5.1434e-01, 7.2376e-01, - 2.4184e-02, 3.6510e-01, 5.5591e-01, 7.7663e-01, - 8.6330e-01, 5.8496e-01, 8.9626e-01, 6.8770e-01, - 9.9476e-01, 7.7159e-01, 3.0655e-01, 1.8860e-01, - 8.6828e-01, 1.7995e-01, 3.4978e-01, 6.9750e-01, - 4.8250e-01, 6.8868e-01, 8.4389e-01, 7.4827e-01, - 9.9878e-01, 8.8959e-01, 5.4737e-01, 3.2379e-01, - 5.7137e-01, 8.5146e-01, 4.5064e-01, 2.3267e-01, - 8.1115e-01, 9.2106e-01, 1.2482e-02, 5.1706e-01, - 9.9087e-01, 7.0910e-01, 8.1717e-01, 5.8383e-01, - 9.1973e-01, 4.5580e-01, 1.3484e-01, 3.2765e-01, - 5.7245e-01, 9.2212e-01, 9.9063e-01, 9.0249e-01, - 6.8693e-01, 6.5675e-01, 6.1077e-01, 9.2089e-01, - 6.6177e-01, 3.3438e-01, 4.2984e-01, 3.1007e-01, - 8.2846e-01, 8.8850e-01, 2.1110e-01, 8.1829e-01, - 4.7551e-01, 6.2000e-01, 4.1692e-01, 5.2092e-01, - 7.6786e-01, 9.1542e-01, 5.4392e-01, 8.8456e-01, - 8.4469e-01, 3.8102e-01, 4.3214e-01, 5.2472e-01, - 9.7229e-01, 1.4086e-01, 9.4005e-01, 9.6540e-02, - 2.5348e-01, 4.7892e-01, 2.9356e-01, 9.6241e-01, - 2.4363e-01, 6.5549e-01, 3.4664e-01, 1.5140e-01, - 2.0653e-01, 3.4251e-01, 6.5092e-02, 6.7425e-02, - 4.7641e-01, 9.7564e-01, 9.0565e-01, 2.1639e-01, - 7.5697e-01, 5.7759e-01, 8.0824e-02, 8.1807e-01, - 9.3574e-01, 9.2358e-01, 1.1971e-01, 9.0508e-01, - 4.8221e-01, 5.6793e-01, 2.4712e-01, 4.4724e-01, - 6.0687e-01, 6.0084e-01, 3.9543e-01, 6.5026e-01, - 5.4289e-01, 7.3458e-01, 7.2942e-01, 4.0512e-01, - 6.8863e-01, 4.1767e-02, 5.5293e-01, 5.0961e-02, - 5.8265e-01, 6.2885e-01, 3.2344e-01, 3.7086e-01, - 9.4513e-02, 5.2906e-01, 5.0949e-01, 4.6165e-01, - 8.7258e-01, 3.4318e-01, 9.4513e-01, 8.1701e-01, - 3.7745e-01, 7.0468e-01, 2.0683e-01, 9.8588e-01, - 9.8906e-01, 5.5614e-01, 3.2419e-01, 8.3236e-01, - 6.5764e-02, 7.5798e-01, 7.6410e-01, 4.6352e-01, - 6.7976e-01, 3.6415e-01, 3.3778e-01, 6.7228e-01, - 3.1395e-01, 6.7979e-01, 9.9630e-01, 7.7595e-01, - 4.2823e-01, 1.8224e-01, 2.5108e-01, 8.4732e-01, - 9.5807e-02, 7.4592e-01, 9.1690e-01, 5.4894e-01, - 6.4604e-01, 4.2867e-02, 8.9503e-02, 2.3008e-01, - 9.3091e-02, 6.8898e-01, 7.2641e-01, 7.2536e-01, - 5.1182e-01, 3.8685e-01, 4.0557e-01, 2.3397e-01, - 3.3095e-01, 1.8926e-01, 5.0915e-01, 9.4197e-01, - 8.3461e-01, 2.1076e-01, 2.2497e-01, 3.2464e-01, - 6.6271e-01, 7.4466e-01, 9.1499e-01, 6.5195e-01, - 4.1717e-01, 8.6507e-01, 6.2078e-01, 9.4500e-02, - 1.3955e-01, 6.5948e-01, 8.8222e-01, 3.5343e-02, - 6.7590e-01, 7.0646e-01, 2.8159e-03, 7.0543e-01, - 2.7916e-01, 5.2058e-01, 3.5094e-01, 5.5246e-01, - 9.8389e-01, 6.0747e-01, 5.1724e-02, 8.4717e-01, - 8.4455e-01, 7.4994e-01, 9.9018e-01, 5.0871e-01, - 3.4972e-02, 9.2650e-01, 2.0510e-01, 4.5592e-01, - 6.3049e-01, 9.0046e-01, 4.4398e-02, 4.5015e-01, - 1.5364e-01, 1.0539e-01, 1.5162e-01, 3.4784e-01, - 7.2666e-01, 8.2500e-01, 1.4279e-01, 7.1443e-01, - 9.1664e-02, 2.2943e-01, 5.6659e-01, 3.0540e-02, - 2.7326e-01, 8.6386e-01, 2.3049e-01, 3.1528e-01, - 8.7637e-01, 1.9402e-02, 6.0174e-02, 5.1567e-01, - 9.4692e-01, 6.7348e-01, 2.1911e-01, 7.7383e-01, - 4.9676e-01, 4.9372e-01, 2.0545e-01, 7.8261e-03, - 8.7264e-01, 1.1286e-01, 9.2203e-01, 5.8348e-01, - 7.5578e-01, 3.5804e-01, 9.9899e-01, 8.3314e-02, - 2.1027e-01, 6.1831e-01, 2.3938e-01, 8.7485e-01, - 1.0222e-01, 2.9796e-01, 3.2359e-01, 3.7060e-01, - 2.5884e-01, 7.3045e-01, 5.9759e-01, 4.6175e-01, - 6.3187e-01, 4.0925e-01, 8.1417e-01, 6.2798e-01, - 2.3297e-02, 3.5433e-01, 4.9332e-01, 7.2145e-01, - 2.4077e-01, 6.8467e-02, 6.2307e-01, 1.3042e-01, - 2.8138e-01, 7.5976e-01, 6.9432e-01, 9.1672e-01, - 4.3510e-01, 3.5067e-02, 4.6770e-01, 6.7232e-01, - 2.5544e-01, 5.2293e-01, 8.7619e-01, 6.8424e-01, - 7.2771e-01, 9.3104e-01, 1.9051e-01, 9.5274e-01, - 6.9828e-01, 6.3012e-01, 6.4071e-01, 5.7960e-01, - 7.9925e-01, 9.4804e-02, 8.5019e-01, 3.5468e-01, - 8.9442e-01, 5.7501e-01, 9.7295e-01, 8.7263e-01, - 6.6304e-01, 5.1660e-01, 8.7113e-01, 6.8326e-01, - 2.0836e-01, 8.6064e-01, 6.5932e-01, 9.8303e-01, - 3.2765e-01, 7.4126e-01, 4.0225e-01, 5.8329e-01, - 3.4728e-01, 6.8208e-01, 8.6642e-01, 8.1105e-01, - 6.6790e-01, 3.5371e-01, 2.5556e-01, 9.3910e-01, - 4.0198e-01, 2.6802e-01, 3.5899e-01, 9.4529e-01, - 1.2365e-01, 5.1593e-01, 7.8345e-01, 4.2735e-01, - 9.3898e-01, 1.0231e-01, 7.9945e-01, 2.7850e-01, - 8.8009e-01, 8.4160e-01, 7.1094e-02, 8.0370e-01, - 6.0738e-01, 5.7333e-01, 6.5365e-01, 2.4782e-01, - 9.3323e-01, 1.3642e-01, 5.4412e-01, 3.2749e-01, - 2.7686e-01, 8.5186e-02, 1.3004e-01, 5.3547e-01, - 2.8639e-01, 8.8546e-01, 3.9756e-01, 5.6358e-01, - 7.6909e-01, 3.9548e-01, 4.0084e-01, 4.9522e-02, - 5.9447e-01, 5.6995e-01, 4.6947e-01, 8.7215e-01, - 6.1837e-01, 2.0034e-01, 3.8326e-01, 2.9975e-01, - 9.3635e-01, 8.5490e-01, 2.1056e-01, 6.8588e-01, - 5.1627e-01, 1.5068e-01, 9.1650e-01, 7.6046e-01, - 9.1249e-02, 1.7569e-01, 8.6032e-02, 9.4366e-01, - 1.8402e-01, 1.5197e-01, 5.1237e-01, 7.1106e-01, - 4.9457e-01, 5.9639e-01, 4.1015e-01, 5.9283e-01, - 3.4980e-01, 1.8764e-01, 3.0065e-01, 2.2116e-01, - 7.7094e-01, 5.3471e-01, 2.2148e-01, 8.0810e-01, - 3.4982e-02, 5.0035e-01, 8.2215e-01, 3.5867e-01, - 2.3880e-01, 2.4411e-01, 1.9415e-01, 7.9336e-01, - 4.7965e-01, 5.7440e-01, 1.1058e-01, 7.7221e-01, - 5.9623e-02, 6.8718e-01, 6.7513e-01, 4.8359e-01, - 2.0212e-01, 5.6448e-01, 5.4760e-01, 1.7598e-01, - 8.0921e-02, 1.3933e-01, 8.2620e-01, 3.7105e-01, - 3.5407e-01, 9.9007e-01, 8.6032e-01, 3.5567e-02, - 8.0722e-01, 7.2664e-01, 8.0544e-01, 8.6209e-01, - 5.1098e-01, 1.1634e-01, 6.5967e-02, 3.7816e-01, - 3.1122e-01, 5.0939e-01, 6.4653e-01, 4.7552e-01, - 9.7382e-01, 6.3824e-02, 5.1772e-01, 5.2606e-01, - 8.2232e-02, 4.4973e-01, 1.7601e-01, 8.1348e-01, - 7.3394e-01, 4.8243e-01, 7.1250e-01, 6.0531e-01, - 5.8494e-01, 2.4806e-01, 4.2528e-02, 3.0321e-01, - 1.1283e-01, 2.8491e-01, 5.7582e-01, 7.3199e-02, - 1.4029e-01, 3.2314e-01, 6.7951e-01, 3.4563e-01, - 1.8275e-01, 1.3782e-01, 7.7150e-01, 9.6866e-01, - 2.8417e-01, 3.2633e-01, 1.0866e-02, 2.1688e-01, - 5.9029e-01, 4.9274e-01, 5.4133e-01, 9.5045e-01, - 1.7733e-02, 1.1028e-01, 9.1602e-02, 9.1443e-01, - 9.3053e-01, 1.2892e-01, 5.8345e-01, 1.7120e-01, - 2.6217e-01, 9.0790e-01, 1.2331e-01, 1.0606e-02, - 8.9049e-02, 4.5886e-01, 8.2053e-01, 6.3672e-01, - 4.3700e-01, 9.4300e-01, 6.9414e-02, 3.6752e-02, - 7.0922e-01, 7.6619e-01, 1.7020e-01, 4.9363e-01, - 9.5185e-01, 1.8337e-02, 8.6529e-01, 3.7850e-01, - 6.5840e-01, 8.1267e-02, 6.8175e-01, 2.6105e-01, - 1.1958e-02, 8.5399e-01, 9.5227e-01, 9.4308e-01, - 3.4087e-01, 6.8046e-01, 4.0880e-01, 4.2278e-01, - 8.0349e-01, 8.2544e-01, 3.1626e-01, 2.1483e-01, - 5.8703e-01, 8.1015e-01, 2.6026e-01, 4.0984e-01, - 7.1906e-01, 7.5444e-02, 3.1672e-02, 6.2157e-01, - 4.7690e-01, 7.3978e-01, 4.6289e-01, 5.0697e-01, - 5.8991e-01, 7.3411e-02, 9.8459e-01, 9.2059e-01, - 7.7297e-02, 3.9565e-01, 5.9330e-02, 1.6467e-01, - 4.6085e-02, 9.8394e-01, 4.7420e-01, 1.7527e-01, - 8.6071e-01, 8.5043e-01, 5.4641e-01, 3.1303e-01, - 7.6572e-02, 3.3688e-01, 5.5810e-01, 7.8291e-01, - 7.5715e-01, 8.5556e-02, 3.8632e-01, 8.7833e-01, - 6.0417e-01, 9.5578e-01, 8.2911e-01, 9.1077e-01, - 4.0255e-02, 2.7065e-01, 4.4519e-01, 1.5818e-01, - 7.0510e-01, 4.7119e-01, 9.3945e-01, 1.6621e-01, - 8.9156e-01, 4.8768e-01, 2.1252e-01, 3.1250e-01, - 2.4778e-01, 5.0132e-01, 6.3727e-02, 8.8073e-01, - 8.1067e-02, 2.3802e-01, 4.1777e-01, 2.9406e-01, - 9.7699e-01, 1.2461e-01, 3.4226e-01, 8.2877e-01, - 5.2795e-01, 6.3498e-01, 2.0711e-01, 1.6407e-01, - 4.3654e-01, 7.6900e-02, 2.4319e-01, 7.7992e-01, - 7.2403e-01, 7.0366e-01, 1.8225e-01, 9.3758e-01, - 1.8038e-01, 3.1075e-01, 4.3956e-02, 6.7919e-01, - 9.8304e-02, 4.9273e-01, 8.1425e-01, 3.7930e-02, - 2.7202e-01, 2.9840e-01, 8.2132e-01, 1.1324e-01, - 8.1103e-01, 4.8281e-01, 5.5185e-01, 3.3551e-01, - 7.4731e-01, 4.6340e-01, 6.1275e-01, 9.5266e-01, - 4.6123e-01, 1.0211e-01, 2.1856e-01, 3.1378e-01, - 4.5652e-01, 5.2280e-01, 7.3852e-01, 9.0511e-01, - 9.1503e-02, 8.9445e-01, 8.6502e-01, 7.1965e-01, - 8.6239e-01, 5.9947e-01, 3.0871e-01, 5.1635e-01, - 6.8166e-01, 6.5746e-02, 3.2846e-02, 3.8413e-01, - 2.7706e-01, 7.8206e-01, 2.4514e-01, 5.7823e-01, - 5.2040e-01, 6.2925e-01, 9.7887e-01, 8.4393e-01, - 9.8090e-01, 7.5437e-01, 6.8239e-01, 2.4410e-01, - 9.6134e-01, 1.6346e-01, 4.8181e-01, 1.9722e-01]), + col_indices=tensor([4728, 8270, 5747, 4190, 8816, 3485, 4267, 3845, 4253, + 2618, 2135, 6577, 5998, 3566, 6668, 6250, 1667, 8462, + 7617, 2323, 1453, 8003, 978, 8366, 6285, 648, 5463, + 899, 962, 5301, 3365, 7884, 106, 876, 6897, 4624, + 3877, 5749, 885, 9596, 3094, 5238, 2047, 6203, 1988, + 6479, 5863, 8063, 9929, 1527, 7334, 5835, 7266, 1117, + 88, 7369, 187, 9919, 4908, 7345, 2216, 1390, 4360, + 1957, 6131, 5807, 1391, 6755, 403, 7788, 1046, 7242, + 8970, 3414, 2560, 3707, 1132, 5998, 2036, 626, 2206, + 662, 803, 355, 8106, 9004, 6769, 794, 1420, 1896, + 164, 6812, 8998, 3729, 4484, 9825, 9211, 80, 6244, + 171, 4461, 8160, 1174, 8528, 3606, 3793, 2231, 4130, + 8667, 3710, 7197, 8752, 5434, 288, 6181, 7067, 2653, + 6236, 4202, 2795, 9441, 988, 6792, 6650, 3693, 8141, + 633, 4796, 5947, 1017, 4612, 8207, 7600, 8820, 4805, + 5145, 5635, 7409, 2070, 2726, 9324, 4538, 4774, 6228, + 6405, 6664, 101, 8899, 9017, 2316, 5471, 6613, 9928, + 3154, 3872, 1358, 6338, 598, 6342, 2583, 372, 2809, + 8751, 6740, 414, 5987, 811, 4894, 8831, 2958, 849, + 4717, 5526, 5955, 4799, 9595, 2203, 2336, 2864, 900, + 517, 3162, 9187, 5832, 79, 1363, 8975, 4939, 4776, + 3026, 126, 7583, 1393, 3238, 9223, 4283, 6288, 4095, + 9555, 5898, 7504, 294, 2051, 3299, 7802, 1236, 8275, + 9057, 482, 5171, 9240, 610, 9718, 1315, 6804, 5988, + 9836, 8876, 3060, 6067, 5389, 9613, 7797, 1833, 8290, + 3253, 9124, 772, 905, 2464, 1648, 1802, 9539, 1938, + 9314, 5521, 1856, 9872, 7296, 4233, 1675, 4423, 3765, + 530, 1445, 7731, 8494, 7965, 0, 4426, 2768, 1429, + 5780, 4619, 4566, 5778, 8756, 5631, 6012, 3678, 195, + 4299, 8811, 827, 4862, 5158, 779, 257, 9859, 496, + 8123, 4212, 8989, 7565, 832, 2190, 6349, 8903, 8585, + 9454, 7152, 2491, 2068, 3034, 6396, 4013, 4079, 2129, + 1057, 3425, 7658, 3901, 323, 2106, 7522, 8996, 214, + 6051, 521, 8387, 8594, 951, 7288, 1886, 798, 8073, + 5756, 962, 648, 2200, 6621, 2408, 8644, 820, 5897, + 1302, 7386, 5499, 7248, 4268, 9998, 8174, 48, 1222, + 911, 5368, 7298, 3994, 9752, 5709, 9631, 2037, 4076, + 147, 8400, 9031, 6673, 7864, 3772, 4943, 2120, 7990, + 749, 8147, 9103, 6842, 7112, 347, 6916, 2387, 1033, + 8333, 137, 9028, 8664, 609, 4033, 9225, 9988, 7227, + 6350, 2396, 5202, 959, 3458, 7981, 2845, 7704, 7620, + 2366, 3506, 5248, 1328, 5219, 1396, 8032, 5060, 9027, + 6404, 4256, 13, 7415, 492, 5018, 9275, 7320, 8740, + 1212, 8342, 203, 7700, 1872, 8033, 3202, 5665, 5618, + 5243, 1992, 1313, 9776, 4244, 4659, 8572, 8384, 1900, + 2452, 309, 8732, 9325, 6127, 3754, 5287, 2908, 9968, + 1564, 8645, 1458, 5251, 2397, 6743, 2787, 1107, 8412, + 1912, 1868, 5064, 2697, 6044, 9958, 9120, 3637, 752, + 2395, 4893, 8307, 3191, 2132, 6579, 7573, 678, 1757, + 934, 7799, 8651, 9099, 1608, 6252, 2496, 1751, 964, + 3799, 7058, 8019, 6968, 2001, 8981, 8280, 2655, 1332, + 5288, 3116, 5327, 9741, 2200, 7246, 4826, 6568, 941, + 4296, 424, 9268, 5983, 6867, 5616, 615, 6917, 2807, + 4522, 4363, 9180, 2468, 5136, 5360, 1996, 9623, 8213, + 1353, 4959, 5038, 9143, 8657, 3806, 4254, 3978, 8856, + 9028, 9345, 7894, 9103, 8376, 2325, 4728, 3288, 5244, + 817, 1115, 8090, 8514, 7061, 1694, 9988, 3140, 4508, + 8374, 3717, 6894, 5623, 3924, 8919, 2712, 1905, 7855, + 101, 603, 6800, 6088, 9794, 902, 5124, 2720, 194, + 4409, 9393, 5764, 382, 5189, 9606, 8711, 3326, 2054, + 2346, 8955, 8304, 4253, 857, 871, 877, 7772, 6261, + 5351, 3769, 1919, 1190, 4603, 6432, 4804, 8328, 6405, + 6260, 8139, 3784, 4728, 3745, 6229, 4272, 3048, 3739, + 391, 1317, 2263, 1528, 4045, 6698, 2408, 7600, 1641, + 5468, 5010, 6595, 9797, 4417, 4678, 5642, 3473, 6802, + 7679, 6581, 1250, 3250, 7679, 9306, 9173, 8943, 1361, + 5871, 6491, 8139, 2569, 8640, 6095, 3558, 9320, 5441, + 4058, 8414, 8001, 5789, 1378, 236, 3166, 7474, 9358, + 1872, 9802, 1836, 9444, 4942, 2817, 2221, 1498, 3015, + 9081, 6724, 3663, 9949, 8181, 7882, 5444, 4451, 5151, + 2938, 8122, 7057, 760, 6565, 522, 3456, 114, 2176, + 1409, 3297, 6255, 382, 6867, 4469, 6249, 9488, 6695, + 4525, 1513, 1456, 4932, 5896, 5431, 6658, 618, 7608, + 7059, 4493, 5078, 9747, 1254, 8432, 2209, 6437, 287, + 7845, 6774, 1536, 5778, 8752, 9981, 3357, 7627, 8514, + 9749, 5745, 709, 9181, 7127, 7219, 9135, 8960, 7086, + 8500, 3343, 4421, 2198, 9137, 7200, 9677, 758, 4403, + 1663, 263, 5301, 2340, 2528, 3341, 9896, 754, 303, + 4338, 6663, 4676, 8299, 8165, 4111, 6099, 6910, 9133, + 149, 7699, 7265, 3192, 12, 4893, 4274, 3845, 5434, + 4857, 6744, 6474, 8679, 8435, 64, 1372, 8713, 2383, + 5222, 1429, 227, 9064, 3808, 7218, 4259, 6870, 4121, + 964, 3758, 6339, 5151, 5529, 1206, 7793, 3379, 5407, + 2115, 621, 3265, 5029, 6979, 6753, 3468, 1953, 973, + 3884, 4597, 2650, 5951, 3194, 6637, 9203, 775, 3106, + 6543, 7319, 8082, 4365, 4317, 9668, 5644, 4484, 9470, + 9345, 6258, 5267, 258, 8022, 2268, 7033, 9730, 1344, + 2632, 3400, 8195, 7331, 308, 940, 6777, 6684, 955, + 1662, 1609, 1001, 7894, 4010, 8682, 5171, 3529, 2249, + 6606, 7341, 7061, 2498, 7431, 3350, 4368, 187, 2790, + 6143, 1806, 2057, 948, 2664, 2863, 5457, 9485, 166, + 7599, 2042, 6286, 6956, 2830, 3383, 521, 3429, 1612, + 2530, 8834, 7882, 6081, 9589, 4022, 3337, 896, 9604, + 1379, 2096, 2994, 3646, 4656, 6269, 9000, 9623, 4417, + 2530, 1580, 3685, 9445, 4234, 6915, 2366, 1795, 5960, + 9972, 9148, 2563, 677, 6974, 8126, 4242, 3908, 709, + 812, 9816, 5895, 9741, 6441, 4207, 9611, 2366, 9999, + 3859, 28, 2022, 5040, 3596, 9298, 4397, 8833, 1782, + 2546, 4531, 8457, 4288, 9450, 751, 1368, 4486, 3134, + 820, 1634, 3781, 6687, 5351, 2267, 5285, 4353, 2826, + 842, 5552, 3176, 6735, 7289, 5790, 1641, 2185, 3872, + 6922, 1185, 539, 4195, 7745, 5818, 1148, 5206, 406, + 2739, 2292, 4567, 307, 5392, 2594, 6093, 6301, 2671, + 5525, 3581, 9253, 8828, 120, 8598, 9352, 8586, 9282, + 6935, 2049, 4984, 2928, 237, 1648, 1531, 4395, 163, + 9717, 2686, 7274, 9365, 2344, 691, 207, 9076, 9804, + 3773, 763, 3434, 6002, 9369, 3942, 9958, 6602, 6695, + 3089, 1216, 9129, 5343, 878, 277, 9132, 9713, 7572, + 6990, 6272, 8328, 9531, 6351, 6810, 943, 9876, 2301, + 1427, 3214, 7959, 636, 7815, 6985, 7948, 4, 2286, + 4963, 2772, 2512, 140, 6356, 3274, 6636, 7009, 3163, + 3441]), + values=tensor([5.7349e-02, 6.0520e-01, 7.4610e-01, 4.9648e-01, + 6.6084e-01, 8.4281e-01, 9.8351e-01, 1.1932e-01, + 7.9624e-01, 3.1631e-02, 4.8731e-01, 2.7670e-01, + 1.4398e-01, 5.7083e-01, 6.6021e-01, 9.3176e-01, + 8.4064e-01, 2.6199e-01, 8.0614e-01, 7.1364e-01, + 3.8717e-01, 8.7973e-01, 2.2513e-01, 7.5757e-01, + 7.1990e-01, 7.4262e-01, 1.6723e-01, 2.6194e-01, + 1.6880e-01, 1.5329e-01, 9.5630e-01, 9.0724e-01, + 5.7985e-01, 3.6528e-01, 1.4746e-01, 2.3785e-01, + 5.7477e-01, 9.9097e-01, 7.3309e-01, 4.9341e-01, + 7.5995e-01, 5.2460e-01, 1.1234e-01, 7.6582e-01, + 1.9802e-01, 5.0065e-01, 9.0415e-01, 9.0624e-01, + 2.7270e-01, 8.7014e-01, 8.7173e-01, 8.4816e-01, + 3.7082e-01, 2.3076e-01, 6.3971e-01, 9.6196e-01, + 7.6304e-01, 4.5946e-01, 6.8993e-01, 1.8872e-01, + 8.5552e-02, 6.7591e-01, 3.1363e-01, 7.4996e-01, + 6.1732e-01, 2.0616e-01, 8.1042e-01, 1.5502e-01, + 7.4401e-01, 5.6555e-01, 2.3709e-01, 1.6243e-03, + 4.0712e-01, 8.2553e-01, 6.8817e-01, 8.8626e-01, + 1.9201e-01, 8.9392e-01, 3.7456e-01, 1.5467e-01, + 4.6341e-01, 8.7956e-01, 8.7951e-01, 8.1808e-01, + 6.2855e-01, 2.0562e-01, 8.4727e-01, 2.3024e-01, + 9.8194e-01, 1.3180e-01, 9.4571e-01, 5.5181e-01, + 2.5577e-01, 6.3354e-02, 5.6363e-01, 5.7728e-01, + 8.6215e-01, 3.6134e-01, 1.0993e-01, 6.9147e-01, + 7.0548e-01, 2.0109e-01, 8.0350e-01, 9.1127e-02, + 8.8324e-01, 8.5007e-01, 9.7385e-01, 3.2630e-01, + 9.1510e-02, 6.2704e-01, 9.6498e-01, 4.3122e-01, + 1.8876e-01, 1.8546e-01, 1.5921e-01, 6.0448e-01, + 2.6401e-02, 7.6600e-01, 7.7403e-01, 6.8782e-01, + 2.2167e-01, 6.9738e-01, 6.9051e-01, 4.0731e-01, + 9.5390e-01, 8.2240e-01, 2.5268e-01, 6.1971e-01, + 2.2285e-01, 3.5261e-01, 6.1576e-01, 3.8103e-01, + 1.7878e-01, 6.9825e-01, 7.4642e-01, 1.3243e-01, + 7.6438e-01, 5.2666e-01, 3.5198e-01, 3.9390e-01, + 3.4830e-01, 4.2415e-01, 6.7425e-01, 5.0800e-01, + 6.9370e-01, 1.3284e-01, 8.2518e-01, 5.5404e-01, + 1.9060e-01, 5.3190e-01, 7.7345e-01, 4.7333e-01, + 5.5131e-01, 5.6819e-01, 1.9312e-01, 1.9793e-01, + 8.0842e-01, 9.1269e-01, 4.3852e-01, 8.2275e-01, + 5.9302e-01, 4.8414e-01, 7.0946e-01, 6.3678e-01, + 3.0012e-01, 1.3387e-01, 1.0296e-01, 3.8371e-01, + 3.1823e-02, 1.3170e-01, 5.5641e-01, 2.4509e-01, + 2.8726e-01, 5.6142e-01, 3.5921e-01, 6.3419e-01, + 4.5296e-01, 3.9137e-01, 8.7153e-01, 6.4410e-01, + 3.4242e-01, 5.8550e-02, 8.3537e-01, 6.9325e-03, + 2.1655e-01, 2.0812e-01, 5.8923e-01, 6.2286e-01, + 6.0295e-01, 4.3688e-01, 3.0794e-01, 8.1413e-01, + 9.1541e-01, 5.0398e-01, 7.6450e-01, 9.3853e-01, + 2.1574e-01, 9.5748e-01, 5.0510e-02, 6.5968e-01, + 3.6976e-02, 1.3826e-01, 3.5605e-02, 8.1983e-01, + 2.9555e-01, 8.9177e-01, 2.7546e-01, 9.0976e-01, + 2.4686e-01, 6.5916e-01, 1.7223e-01, 6.4443e-01, + 4.2233e-01, 7.4591e-01, 4.9411e-01, 7.3164e-03, + 6.4142e-01, 1.8610e-01, 8.3657e-02, 9.4015e-01, + 6.3847e-01, 3.6196e-01, 7.9346e-02, 1.4634e-01, + 2.2837e-01, 7.4911e-01, 4.2836e-01, 1.3955e-01, + 6.8993e-01, 1.1877e-01, 8.1097e-01, 9.2462e-01, + 7.2318e-01, 5.8865e-01, 2.8308e-01, 7.7808e-01, + 7.0938e-02, 6.9274e-01, 2.6040e-01, 2.8821e-01, + 6.2277e-01, 3.3750e-01, 9.5916e-02, 5.6025e-01, + 1.8587e-01, 1.3430e-01, 8.1898e-01, 2.6743e-02, + 6.4900e-01, 5.8362e-01, 4.0559e-01, 2.9547e-01, + 6.7433e-01, 5.2823e-01, 4.7262e-01, 9.3995e-01, + 8.9339e-02, 1.6943e-01, 5.8800e-01, 4.4367e-05, + 2.2097e-01, 3.7423e-01, 2.2539e-01, 4.4003e-01, + 9.8915e-01, 4.4091e-01, 1.0656e-01, 1.5344e-01, + 7.2519e-01, 5.9699e-01, 7.1965e-02, 4.0052e-01, + 2.7171e-01, 4.7895e-01, 9.1613e-01, 4.6335e-01, + 4.5974e-01, 1.2335e-01, 4.4615e-01, 3.5254e-01, + 4.5704e-01, 4.1470e-01, 1.5857e-01, 7.0657e-01, + 9.5276e-01, 3.0298e-01, 3.9274e-01, 7.8468e-01, + 8.8958e-01, 1.9448e-01, 7.1879e-01, 4.1030e-02, + 9.0238e-01, 6.3671e-01, 9.5405e-02, 1.0168e-01, + 4.1058e-01, 9.0160e-01, 6.4259e-01, 5.0151e-01, + 2.6655e-01, 7.4902e-03, 7.2363e-02, 8.7077e-01, + 5.8534e-01, 7.2233e-01, 8.6421e-01, 3.8163e-01, + 9.6462e-01, 3.3331e-01, 5.8701e-01, 5.4928e-02, + 8.2445e-01, 9.2590e-01, 3.6658e-01, 8.8588e-02, + 7.3756e-01, 9.0145e-01, 4.9996e-02, 1.5543e-01, + 9.8218e-01, 2.2848e-02, 8.4637e-01, 6.8505e-01, + 1.2738e-01, 6.5202e-01, 4.3422e-01, 3.4103e-01, + 9.1012e-01, 2.6009e-01, 6.6379e-01, 3.7802e-01, + 8.9283e-01, 1.3918e-01, 2.3091e-01, 6.8330e-01, + 2.3452e-01, 1.6740e-01, 2.1514e-01, 7.5849e-03, + 2.0830e-01, 8.2586e-02, 5.3533e-01, 2.5587e-01, + 9.5023e-02, 3.7023e-01, 8.7677e-01, 4.8681e-01, + 5.3719e-01, 4.3967e-01, 8.1698e-02, 7.3267e-01, + 5.7365e-01, 3.3573e-02, 2.2364e-01, 2.5862e-01, + 9.2231e-01, 5.1216e-02, 7.3406e-01, 8.4074e-01, + 4.5638e-01, 7.2777e-02, 1.4137e-01, 3.8913e-01, + 8.1923e-01, 1.7828e-01, 9.2424e-02, 7.2027e-01, + 2.0692e-01, 2.7893e-01, 8.8651e-01, 7.4276e-01, + 4.1232e-01, 8.8395e-01, 4.0159e-01, 5.2647e-02, + 1.9717e-01, 8.8898e-02, 3.1357e-01, 7.5650e-02, + 9.4360e-01, 7.7138e-01, 2.5687e-01, 1.0858e-01, + 9.6672e-01, 8.5342e-01, 9.8925e-01, 5.5603e-01, + 5.3944e-01, 9.9116e-01, 6.8455e-01, 7.3095e-01, + 1.9429e-01, 8.9368e-01, 3.9650e-01, 4.0964e-01, + 2.9362e-01, 6.9302e-01, 3.7717e-01, 1.2692e-01, + 1.2605e-01, 9.5997e-01, 8.7540e-01, 2.8375e-01, + 8.5874e-01, 1.4864e-01, 2.6346e-01, 6.0958e-01, + 5.7458e-01, 6.7394e-01, 6.0050e-01, 2.3763e-01, + 6.6314e-01, 3.3365e-01, 6.1765e-01, 4.0620e-01, + 8.7438e-01, 9.8532e-01, 5.9766e-01, 6.7120e-01, + 1.7506e-02, 5.4051e-01, 4.9830e-01, 6.5003e-02, + 7.8513e-01, 3.9005e-01, 9.8076e-01, 3.3505e-02, + 9.6726e-01, 9.1984e-01, 3.9086e-01, 1.7735e-01, + 1.6361e-01, 6.1098e-01, 9.0407e-01, 5.5329e-01, + 8.2079e-01, 1.0082e-01, 3.4522e-01, 9.9419e-01, + 6.2236e-01, 1.9698e-01, 9.9778e-01, 1.7484e-01, + 5.3118e-02, 5.1942e-01, 2.2343e-01, 6.0286e-01, + 7.0850e-01, 2.4440e-02, 9.6202e-01, 8.4690e-01, + 6.2085e-01, 8.2319e-01, 5.1762e-01, 1.5657e-01, + 1.7979e-01, 3.6044e-01, 2.1564e-01, 4.1937e-01, + 7.9791e-01, 9.2225e-02, 5.0107e-01, 1.9959e-02, + 7.0551e-01, 9.0273e-01, 4.2007e-01, 6.4904e-01, + 2.5778e-01, 3.0008e-01, 3.0499e-01, 1.2697e-01, + 7.6639e-01, 8.6367e-01, 1.6151e-01, 7.9701e-01, + 7.5332e-01, 7.4266e-01, 8.8710e-01, 7.2489e-01, + 4.5146e-02, 8.1022e-02, 5.1179e-01, 7.7185e-01, + 8.8204e-01, 4.5546e-01, 1.4545e-01, 2.3573e-01, + 5.2241e-01, 4.8154e-01, 4.6868e-01, 1.8867e-01, + 9.4973e-01, 4.1278e-01, 5.4941e-01, 8.8122e-01, + 6.2457e-01, 8.8240e-01, 8.5057e-01, 8.0378e-01, + 7.0767e-01, 5.8087e-02, 2.2950e-01, 6.0973e-01, + 2.8229e-01, 6.0527e-01, 8.2599e-01, 6.3028e-01, + 7.4384e-01, 3.7934e-02, 4.6792e-02, 9.4567e-01, + 1.8478e-02, 1.9046e-01, 3.5354e-01, 7.0889e-01, + 8.0807e-01, 7.8863e-01, 4.0104e-02, 9.9163e-01, + 1.0451e-01, 9.8761e-01, 3.8392e-01, 7.1545e-01, + 6.4733e-02, 7.6189e-01, 6.4056e-01, 5.3299e-01, + 2.5434e-01, 1.6542e-01, 6.6668e-01, 7.0847e-01, + 6.5601e-01, 3.2315e-01, 6.8014e-01, 4.3103e-01, + 8.1626e-01, 4.3562e-01, 2.8644e-01, 1.1637e-01, + 5.0225e-01, 1.0793e-01, 6.6681e-01, 5.5534e-01, + 9.7527e-01, 2.1960e-01, 5.8603e-01, 7.2856e-01, + 1.3710e-01, 9.0989e-01, 7.7713e-01, 1.0524e-01, + 9.3601e-01, 3.8951e-01, 3.7576e-01, 7.8278e-01, + 2.4394e-01, 9.4350e-02, 8.1028e-01, 3.9615e-01, + 2.4646e-01, 5.8460e-01, 6.2453e-01, 4.7787e-01, + 9.6104e-01, 8.6966e-01, 9.2255e-01, 5.1309e-01, + 3.4540e-01, 8.6788e-01, 9.6635e-01, 3.5763e-01, + 6.3967e-01, 4.3588e-01, 6.5554e-01, 9.3966e-01, + 8.0961e-01, 4.6060e-01, 3.5694e-01, 6.5838e-01, + 7.2948e-01, 9.2952e-01, 8.8462e-01, 9.8863e-01, + 2.7250e-01, 3.8098e-01, 4.2554e-01, 6.4624e-01, + 3.3367e-01, 4.5869e-01, 2.0218e-01, 4.8516e-01, + 9.1256e-01, 8.1345e-01, 3.4407e-02, 3.8499e-01, + 4.7365e-01, 9.8248e-01, 2.4915e-01, 7.9528e-01, + 9.3202e-01, 4.4084e-01, 8.0428e-01, 9.8393e-01, + 7.2865e-01, 3.9531e-01, 1.3198e-01, 8.7497e-01, + 7.3354e-01, 4.7996e-01, 7.4129e-01, 8.9665e-01, + 6.5044e-01, 9.2218e-01, 1.3893e-01, 1.5238e-01, + 9.1674e-01, 6.4542e-01, 6.5605e-01, 9.1138e-01, + 6.7099e-01, 1.4650e-01, 6.7595e-01, 9.1416e-01, + 5.8905e-01, 7.4013e-01, 3.2994e-01, 7.1220e-01, + 2.2133e-01, 7.4522e-01, 2.8559e-01, 6.9445e-01, + 4.3987e-01, 5.6830e-01, 2.1607e-01, 5.5145e-01, + 6.3936e-01, 6.6648e-01, 5.1068e-01, 7.8827e-01, + 4.5319e-01, 4.9099e-01, 4.3654e-01, 1.7597e-01, + 7.3977e-01, 6.5678e-01, 3.5071e-01, 1.7823e-01, + 1.9225e-03, 2.3355e-01, 7.5460e-01, 8.5149e-01, + 5.0014e-01, 7.4755e-01, 7.8464e-01, 5.6952e-01, + 3.8927e-01, 1.6926e-01, 9.4110e-01, 1.4769e-01, + 4.9230e-01, 2.2608e-01, 9.5321e-01, 3.1963e-01, + 7.3129e-01, 8.8107e-01, 7.8814e-01, 1.3253e-01, + 3.5124e-01, 6.1196e-01, 2.3859e-02, 5.7859e-01, + 5.2680e-01, 9.4944e-01, 6.6865e-01, 1.7788e-02, + 9.7777e-02, 8.4559e-01, 4.6765e-01, 9.7345e-01, + 3.5074e-01, 4.2051e-01, 5.1999e-01, 5.5933e-02, + 8.9492e-01, 6.0676e-01, 7.5846e-01, 6.9766e-01, + 8.4243e-02, 3.6752e-01, 6.4419e-01, 7.2871e-01, + 5.2863e-01, 4.7718e-01, 6.2234e-01, 7.5486e-01, + 8.2678e-01, 2.6559e-01, 3.7937e-01, 6.2271e-01, + 6.1191e-01, 2.2419e-01, 1.1080e-01, 8.5299e-01, + 2.1380e-01, 5.0685e-01, 1.7212e-01, 7.9519e-01, + 3.0855e-01, 5.6472e-01, 2.4490e-01, 5.7116e-01, + 2.1887e-01, 8.7629e-01, 2.2446e-01, 7.5860e-01, + 1.1173e-01, 9.9116e-01, 4.0299e-01, 2.4565e-02, + 4.6394e-01, 8.2716e-01, 3.1623e-01, 9.6053e-01, + 4.2343e-01, 7.5982e-01, 7.1614e-01, 8.9924e-01, + 5.9298e-01, 2.7547e-01, 1.3911e-02, 4.9724e-02, + 7.8454e-01, 5.4003e-01, 2.6747e-01, 2.8900e-01, + 6.0666e-01, 8.2443e-01, 1.7262e-01, 1.9843e-01, + 6.7698e-01, 2.2390e-01, 3.3380e-01, 8.8920e-01, + 1.8312e-01, 4.9051e-01, 5.5171e-01, 4.6751e-01, + 1.6009e-01, 1.7873e-01, 2.7351e-01, 8.2085e-01, + 4.3688e-01, 2.9450e-01, 1.5102e-01, 9.0274e-01, + 5.8125e-01, 3.3812e-01, 7.1143e-02, 4.9248e-02, + 6.7488e-01, 8.4185e-01, 6.7579e-01, 3.0722e-01, + 8.1113e-01, 1.3143e-01, 3.4878e-02, 8.7344e-01, + 5.2302e-01, 2.0611e-01, 1.1065e-01, 3.1310e-01, + 9.7115e-01, 1.7777e-01, 6.8523e-01, 4.0319e-02, + 2.5683e-01, 9.1754e-01, 5.2761e-01, 2.3040e-01, + 1.4104e-01, 4.3748e-01, 1.7774e-01, 7.8482e-01, + 6.3153e-01, 6.8113e-01, 1.1915e-01, 8.8419e-01, + 7.0372e-02, 5.8728e-02, 1.3189e-01, 3.7442e-01, + 3.4836e-01, 1.6173e-01, 7.1367e-01, 5.7775e-01, + 8.4291e-02, 9.1419e-01, 3.8721e-01, 7.2911e-01, + 3.6578e-01, 8.4340e-02, 6.7193e-01, 6.4038e-01, + 6.6484e-01, 6.7190e-01, 3.3007e-01, 6.8112e-01, + 6.6154e-01, 4.0065e-01, 9.2030e-01, 2.9421e-01, + 7.1666e-01, 7.6127e-01, 4.4054e-01, 4.6397e-02, + 3.9147e-01, 9.5775e-01, 9.4039e-01, 9.2082e-01, + 7.7141e-01, 4.9383e-01, 5.5952e-01, 4.2341e-01, + 1.2114e-01, 3.4486e-01, 3.0565e-01, 7.3858e-01, + 3.2649e-01, 2.1871e-01, 5.1476e-02, 1.2585e-01, + 6.7879e-01, 2.2109e-01, 5.7336e-02, 1.0944e-01, + 4.5828e-01, 9.5739e-01, 8.8833e-01, 2.6773e-01, + 4.2673e-01, 9.7341e-01, 6.5900e-01, 9.1656e-01, + 1.9783e-01, 9.3794e-01, 4.7842e-01, 2.3651e-01, + 2.0630e-01, 1.7482e-01, 4.8137e-01, 9.4729e-01, + 5.9278e-01, 2.0579e-01, 5.7233e-01, 5.8067e-01, + 7.1405e-01, 2.8675e-01, 1.4210e-01, 3.3760e-01, + 9.5835e-01, 4.1999e-01, 9.5016e-01, 4.9618e-01, + 2.0561e-01, 5.5421e-01, 7.9957e-01, 2.9215e-01, + 8.8780e-02, 7.2738e-01, 2.8825e-01, 8.3364e-01, + 8.1758e-01, 8.1613e-01, 5.7465e-01, 3.4749e-01, + 2.6068e-01, 5.0085e-02, 4.0493e-02, 4.0561e-01, + 9.9517e-01, 4.5374e-01, 5.6224e-01, 5.8467e-01, + 3.3923e-01, 8.5094e-01, 5.8962e-01, 9.0608e-01, + 8.1241e-01, 4.1188e-01, 4.4184e-01, 4.8829e-01, + 2.0133e-01, 3.0879e-01, 7.5869e-01, 3.6173e-01, + 8.2590e-01, 9.7541e-01, 5.9211e-02, 7.2655e-01, + 8.4075e-01, 4.1180e-01, 8.2284e-01, 4.3276e-01, + 9.1362e-01, 6.3286e-01, 4.8679e-01, 5.2383e-01, + 3.0250e-01, 3.1090e-01, 7.1126e-01, 1.1967e-01, + 2.8672e-01, 9.4853e-01, 7.6572e-01, 9.9962e-01, + 8.0372e-01, 5.2288e-02, 7.3023e-01, 3.8461e-02, + 6.7985e-01, 7.0171e-01, 3.4371e-01, 5.5361e-01, + 2.1542e-01, 5.2181e-01, 4.7751e-02, 4.6895e-01, + 9.3919e-01, 6.5012e-02, 9.1724e-01, 4.2956e-01, + 9.7120e-01, 8.3026e-01, 8.2377e-01, 3.5846e-01, + 2.5343e-01, 7.3931e-01, 6.9367e-01, 5.4053e-01, + 5.3158e-01, 9.9439e-01, 2.2552e-01, 6.7537e-01, + 2.7969e-02, 6.2516e-01, 6.7665e-04, 9.6077e-01, + 1.8786e-02, 4.7861e-01, 5.1413e-01, 3.7258e-02, + 8.7413e-01, 7.4163e-01, 4.3697e-01, 4.5234e-01, + 9.3942e-01, 6.8835e-01, 6.8624e-01, 2.9234e-01, + 4.5552e-01, 8.5784e-01, 3.4883e-02, 6.1283e-01, + 5.9439e-01, 6.0411e-01, 5.3983e-01, 3.1071e-01, + 6.3268e-01, 3.8947e-01, 4.3129e-01, 2.2361e-01, + 7.1195e-01, 5.3228e-01, 5.4826e-01, 7.8709e-02, + 8.6637e-01, 2.1416e-01, 8.5027e-01, 9.4266e-01, + 7.0691e-01, 3.1370e-01, 6.6442e-01, 4.7991e-01, + 2.8416e-01, 5.1812e-01, 6.1281e-01, 2.3190e-01, + 2.7740e-01, 2.0817e-01, 4.0091e-01, 2.2402e-01, + 6.0020e-01, 5.5782e-01, 7.3112e-01, 8.1508e-01, + 7.3674e-01, 5.8085e-01, 3.4811e-01, 4.7061e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.2611, 0.6478, 0.1138, ..., 0.3633, 0.3210, 0.9692]) +tensor([0.8610, 0.2031, 0.4276, ..., 0.7862, 0.1485, 0.1233]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -754,378 +647,378 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 7.088708162307739 seconds +Time: 2.457648515701294 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '345384', '-ss', '10000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.645604133605957} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '355068', '-ss', '10000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.454679489135742} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([8386, 6687, 1336, 4006, 2889, 63, 3271, 8876, 5809, - 2512, 7832, 2733, 3356, 4778, 2140, 779, 9751, 7240, - 1181, 7321, 2435, 1700, 1145, 7058, 2671, 9573, 9448, - 6201, 8201, 4132, 8088, 4935, 4564, 1801, 2140, 3767, - 4154, 3041, 6652, 3892, 6804, 8117, 8836, 7838, 9227, - 9815, 3957, 6041, 6513, 836, 4077, 5740, 165, 6693, - 9253, 4488, 6697, 8121, 527, 1601, 2341, 3820, 1804, - 1657, 7490, 6245, 3372, 1433, 9979, 3717, 6873, 3081, - 6306, 2907, 8882, 4044, 1805, 6070, 7397, 6632, 4430, - 9050, 4939, 4243, 9520, 9436, 8610, 3565, 1962, 5009, - 8292, 6355, 4785, 739, 8013, 464, 9981, 613, 9648, - 3885, 9065, 7010, 9621, 9406, 7765, 5572, 4542, 1690, - 8782, 2394, 9222, 6205, 3475, 4880, 6672, 2424, 2888, - 27, 9101, 629, 9556, 9408, 9624, 5180, 5403, 1419, - 4216, 6980, 2180, 5532, 8492, 8354, 6966, 2817, 5491, - 9780, 1196, 1142, 513, 3689, 9725, 6732, 2318, 541, - 5288, 2162, 6430, 4641, 4471, 3290, 5215, 9767, 356, - 9383, 2801, 9362, 483, 5422, 8381, 8545, 7037, 2671, - 6918, 5905, 7099, 9397, 3320, 5586, 6686, 6253, 8170, - 1197, 8081, 5276, 5498, 3973, 5719, 6, 4018, 191, - 4997, 6404, 6570, 1242, 2990, 3024, 8214, 1565, 8672, - 3559, 8708, 9395, 4507, 8669, 837, 566, 8699, 9769, - 5768, 6122, 297, 9572, 8148, 5082, 9475, 4877, 3777, - 4155, 1866, 5437, 4014, 6374, 9767, 6440, 4173, 9779, - 5678, 9986, 850, 3482, 5179, 1673, 2169, 1701, 1621, - 7569, 6430, 5599, 766, 4733, 4123, 3418, 9264, 4032, - 2763, 7503, 4004, 8112, 9531, 4528, 8162, 5697, 3229, - 4414, 7514, 8270, 843, 4470, 4332, 5154, 3197, 4663, - 9688, 1640, 3977, 6550, 4553, 285, 8771, 4769, 6381, - 2228, 177, 200, 4136, 5330, 3954, 4570, 1664, 9310, - 8177, 1261, 2114, 5886, 4871, 3431, 3184, 6547, 1994, - 9561, 2057, 4598, 9502, 775, 6930, 1345, 1396, 7979, - 1291, 7315, 8897, 7290, 6128, 6702, 6424, 1921, 3017, - 5642, 9895, 7073, 7559, 9240, 1936, 2501, 6299, 608, - 5270, 2518, 4117, 8638, 2432, 3241, 9239, 7588, 1119, - 5807, 8194, 1738, 2710, 900, 8311, 6519, 3889, 5525, - 701, 1556, 9224, 3079, 421, 4773, 6421, 4754, 9479, - 946, 4519, 8732, 8739, 3687, 2147, 9435, 6643, 6070, - 9961, 8652, 7149, 4024, 4961, 2799, 9377, 6950, 4754, - 1607, 3552, 4467, 6564, 8030, 8888, 7159, 8104, 9195, - 4046, 669, 3298, 5514, 1164, 8888, 9763, 5866, 785, - 4912, 1495, 5911, 723, 2112, 6187, 6103, 6303, 2239, - 3394, 202, 3549, 5133, 9224, 6796, 2158, 3007, 66, - 6648, 2818, 9175, 222, 4026, 1450, 8886, 4161, 5975, - 5093, 7330, 8430, 8812, 9647, 5384, 6958, 3246, 3443, - 3297, 9214, 1567, 9171, 7883, 4, 2600, 8920, 7351, - 3561, 511, 431, 4742, 360, 4012, 9154, 9443, 1893, - 647, 8408, 6428, 9486, 1044, 4008, 8951, 3809, 6695, - 3190, 8347, 2767, 2862, 9877, 4391, 6129, 5145, 2648, - 5993, 9470, 9740, 4774, 5299, 758, 8660, 5614, 3517, - 5221, 7009, 6408, 3602, 5959, 8924, 6010, 183, 2123, - 7038, 7450, 5520, 9014, 9762, 3893, 601, 4959, 7847, - 8395, 2093, 3516, 8653, 4637, 9242, 5975, 2428, 9155, - 4790, 9967, 3585, 6959, 2255, 1940, 1872, 1334, 557, - 3303, 2554, 252, 2660, 8388, 9592, 3342, 776, 8623, - 5559, 4701, 915, 7527, 5851, 1127, 3295, 5763, 7854, - 3449, 8075, 8679, 3865, 4017, 7580, 4179, 9968, 8205, - 8961, 6891, 9528, 5448, 7376, 5409, 7292, 3025, 2583, - 798, 274, 2832, 4249, 8478, 4127, 1427, 8032, 1706, - 3923, 9454, 4032, 2055, 7464, 3870, 2191, 8209, 8833, - 951, 3366, 6698, 2651, 5729, 6233, 9876, 345, 2080, - 1961, 2056, 9615, 7116, 3378, 6741, 9711, 6611, 5804, - 3857, 2845, 3064, 7397, 3292, 1927, 375, 258, 507, - 5816, 1045, 3749, 140, 65, 839, 3183, 2539, 6798, - 4858, 3962, 8021, 3576, 2833, 5206, 2400, 53, 7269, - 344, 7207, 8614, 9878, 8164, 9935, 2088, 8322, 7527, - 2494, 4692, 9827, 3198, 8779, 4379, 9590, 4943, 5298, - 7600, 9706, 161, 2936, 1872, 5424, 6061, 3910, 6493, - 7134, 859, 6556, 2108, 9208, 2826, 8911, 7637, 2277, - 7818, 6551, 5383, 5848, 1311, 2850, 6099, 2899, 3442, - 3741, 2999, 9298, 4174, 5137, 3721, 5034, 1145, 8317, - 7760, 2913, 8185, 7805, 6895, 8374, 9590, 6432, 6211, - 2561, 5506, 1334, 9822, 3888, 8038, 5700, 9940, 4005, - 6003, 3679, 2471, 6546, 7004, 5845, 3046, 3106, 3341, - 5524, 2222, 5512, 6022, 8010, 6825, 7806, 4113, 2314, - 5262, 6523, 6068, 8946, 1825, 1879, 6836, 9616, 4280, - 1425, 3261, 7045, 7340, 3532, 3422, 4765, 9947, 4451, - 9548, 2530, 3820, 6439, 2506, 9035, 3455, 7606, 8145, - 7636, 944, 8667, 2412, 6377, 7529, 3464, 2829, 9995, - 4498, 5471, 9804, 6400, 6994, 201, 4088, 7913, 2286, - 5146, 227, 1273, 1530, 1711, 8128, 8446, 264, 4022, - 8969, 9086, 5311, 973, 7787, 723, 8657, 2955, 2811, - 2639, 4614, 3599, 7519, 6130, 2197, 4829, 2091, 5673, - 5890, 9151, 1202, 7003, 7094, 3621, 5930, 3380, 929, - 1268, 2618, 9111, 7451, 910, 2630, 1171, 468, 3532, - 2742, 2900, 3348, 9764, 279, 8526, 4256, 2631, 2433, - 3407, 7106, 6292, 6411, 605, 8948, 9518, 4058, 2871, - 9753, 8385, 3453, 54, 9069, 774, 9503, 5739, 3336, - 5685, 3281, 5922, 9268, 4952, 2110, 6128, 4237, 2923, - 1004, 7408, 5996, 9922, 197, 9011, 941, 4794, 6293, - 7217, 8380, 3191, 6876, 283, 7219, 5517, 9763, 3917, - 9506, 7449, 1090, 5685, 7532, 6175, 1033, 5898, 5296, - 248, 4028, 381, 8671, 8441, 9519, 1965, 9373, 343, - 6129, 7428, 699, 5758, 4156, 2123, 4283, 3672, 8841, - 9768, 5587, 7925, 9795, 7576, 5312, 4264, 9232, 1577, - 8112, 8888, 2020, 962, 6489, 6113, 8186, 8288, 5361, - 1657, 9290, 6369, 96, 6744, 3295, 5988, 4803, 6098, - 7212, 2158, 4555, 8419, 3522, 4247, 7810, 3991, 1688, - 4864, 6169, 5001, 3335, 5725, 5380, 6774, 3434, 3293, - 727, 7777, 8150, 6940, 7624, 9509, 8401, 5224, 458, - 1726, 1075, 634, 8148, 1860, 6600, 9480, 7224, 1113, - 507, 5583, 5234, 5683, 5021, 43, 3034, 5881, 4346, - 552, 3514, 9781, 3543, 947, 2793, 9652, 1303, 6412, - 7971, 6721, 4145, 2593, 872, 7294, 1734, 3649, 2027, - 4695, 6819, 7797, 77, 4704, 9113, 8525, 763, 8776, - 2665, 5466, 9938, 9238, 7420, 855, 3191, 1211, 7740, - 4459, 8636, 5775, 9982, 6488, 4165, 9104, 544, 3173, - 6388, 6443, 2172, 6403, 3423, 7217, 5140, 8244, 1124, - 7418, 6948, 5153, 349, 4447, 6505, 4664, 3676, 522, - 2819, 9391, 1519, 9711, 2096, 811, 8845, 9535, 9072, - 1990, 9399, 4253, 928, 737, 3812, 6615, 2772, 4948, - 5722]), - values=tensor([9.0115e-01, 8.8050e-01, 3.5987e-01, 7.3636e-01, - 9.7083e-01, 7.9217e-01, 3.2045e-04, 5.0357e-01, - 9.7648e-01, 5.8740e-01, 3.2874e-01, 1.5283e-01, - 2.7996e-01, 5.0139e-01, 6.4518e-01, 4.0183e-01, - 6.1647e-01, 7.5213e-01, 5.2459e-01, 1.4687e-01, - 7.8967e-01, 7.1005e-01, 9.1450e-01, 9.1708e-01, - 9.9913e-01, 3.4558e-02, 8.6930e-01, 2.0159e-01, - 5.8830e-01, 9.0024e-01, 6.6306e-01, 4.4206e-01, - 1.7272e-01, 4.1850e-01, 5.1069e-01, 7.3974e-01, - 1.9908e-01, 6.3116e-01, 7.2987e-01, 8.1148e-01, - 2.1141e-01, 4.0869e-01, 3.6606e-01, 1.1063e-01, - 5.8883e-01, 2.2442e-01, 7.9393e-01, 7.9164e-01, - 4.4063e-01, 9.3353e-01, 6.7292e-01, 5.6572e-01, - 6.3369e-02, 8.5884e-01, 5.5282e-01, 8.8989e-01, - 9.0849e-01, 5.6683e-01, 8.6469e-01, 3.8254e-01, - 6.6273e-01, 6.6861e-01, 7.2263e-01, 4.8825e-01, - 3.4264e-01, 6.7193e-01, 4.4842e-01, 8.3390e-02, - 7.2084e-02, 7.4399e-01, 8.0451e-01, 7.1423e-01, - 9.9102e-01, 2.1979e-01, 8.9540e-01, 3.4973e-01, - 3.1231e-01, 2.7371e-01, 6.2965e-01, 9.9944e-01, - 6.9894e-01, 5.6744e-01, 6.8540e-01, 3.5404e-01, - 3.5127e-01, 4.0329e-01, 6.3087e-02, 6.5223e-01, - 2.2813e-01, 5.1441e-01, 1.8304e-01, 8.1642e-01, - 2.1854e-01, 5.6520e-01, 5.2881e-01, 7.9212e-01, - 3.9420e-01, 9.5741e-01, 5.5907e-01, 1.4001e-01, - 5.4957e-01, 8.4842e-01, 4.6425e-01, 1.0595e-01, - 4.5967e-01, 6.4599e-02, 9.1911e-01, 6.5753e-01, - 6.2962e-01, 5.6694e-01, 6.0193e-01, 1.4403e-01, - 4.9246e-01, 5.7669e-01, 2.9098e-01, 6.4498e-01, - 5.3745e-01, 4.2348e-01, 3.1712e-02, 4.3666e-01, - 8.1193e-01, 6.0611e-01, 4.6310e-01, 8.2270e-02, - 5.6178e-01, 9.5139e-01, 1.2013e-01, 7.4826e-01, - 3.7768e-01, 7.2530e-01, 8.7615e-02, 4.0503e-02, - 1.2463e-01, 6.4181e-02, 8.3595e-01, 1.9189e-01, - 3.9129e-01, 3.3246e-01, 5.7192e-01, 2.1101e-01, - 6.0817e-01, 9.7003e-02, 9.8184e-01, 9.7105e-01, - 4.7401e-01, 2.1217e-02, 7.9624e-01, 9.8049e-01, - 9.2013e-02, 8.1205e-02, 4.2661e-01, 7.2309e-01, - 7.9917e-02, 5.2711e-01, 6.0374e-01, 4.9148e-01, - 2.6376e-02, 4.2384e-01, 5.9763e-01, 2.6367e-01, - 2.5356e-01, 9.6932e-01, 5.1626e-01, 7.7903e-01, - 7.1553e-01, 9.6858e-01, 9.5479e-01, 6.4522e-01, - 5.7068e-01, 7.8164e-01, 5.2133e-02, 6.7311e-01, - 9.8581e-01, 6.5925e-03, 7.9162e-01, 2.9944e-01, - 6.0891e-01, 3.9472e-01, 2.7363e-01, 1.1914e-01, - 6.3504e-01, 2.5627e-02, 4.2015e-01, 1.6135e-01, - 3.6526e-01, 8.2710e-01, 8.6576e-01, 3.9387e-02, - 5.5228e-01, 5.2163e-01, 6.4411e-01, 3.3785e-01, - 6.1344e-01, 4.3780e-01, 8.5527e-01, 3.2033e-01, - 6.0259e-01, 5.5267e-01, 7.1983e-01, 1.3968e-01, - 4.5329e-02, 8.0719e-01, 7.1793e-01, 8.8356e-03, - 7.8471e-01, 9.1273e-01, 2.5071e-01, 8.3248e-02, - 7.5072e-01, 7.9566e-01, 3.7983e-01, 5.5128e-01, - 5.2776e-01, 7.0286e-01, 2.0685e-02, 6.5538e-01, - 5.6470e-01, 6.2072e-01, 9.8032e-01, 9.3015e-01, - 5.2162e-01, 9.8471e-02, 6.0975e-01, 8.9178e-01, - 5.3071e-01, 8.2338e-01, 1.0002e-01, 7.4946e-02, - 7.7185e-01, 9.7394e-01, 1.7129e-01, 6.4447e-01, - 2.5502e-02, 1.8622e-01, 5.1805e-01, 3.2572e-01, - 2.8875e-01, 6.2592e-01, 6.5948e-01, 7.2621e-01, - 6.7227e-01, 8.0332e-01, 1.2657e-01, 5.0462e-01, - 4.6989e-01, 6.8348e-01, 7.4530e-01, 7.5445e-01, - 5.5883e-01, 8.6002e-01, 5.5667e-01, 6.6355e-01, - 9.3600e-01, 1.4695e-01, 7.1804e-01, 3.3960e-01, - 8.8807e-01, 6.0725e-01, 8.2818e-01, 1.2540e-01, - 4.6094e-01, 1.1959e-01, 6.8050e-02, 9.6128e-01, - 1.7638e-01, 8.9032e-01, 7.1829e-01, 6.2017e-01, - 7.3900e-01, 1.4830e-02, 7.5134e-03, 3.8669e-01, - 5.9239e-01, 9.3017e-01, 8.3787e-01, 1.8825e-02, - 6.8285e-01, 6.7458e-02, 7.0504e-02, 4.9046e-01, - 3.7606e-01, 3.0865e-01, 1.8170e-01, 3.2952e-01, - 5.8782e-01, 8.1236e-01, 7.7799e-01, 7.8201e-01, - 1.0326e-01, 5.3681e-01, 8.0955e-01, 7.2573e-01, - 8.4076e-01, 5.6924e-01, 5.7448e-01, 7.8479e-01, - 1.7150e-01, 9.9653e-02, 2.8890e-01, 8.2325e-01, - 2.7212e-01, 4.3725e-01, 1.2614e-01, 3.0866e-01, - 1.2090e-01, 2.5676e-01, 1.8373e-01, 4.9961e-01, - 1.3768e-01, 3.3008e-01, 1.6190e-01, 8.7026e-03, - 4.9084e-01, 7.1519e-01, 6.1316e-01, 9.8670e-01, - 6.0605e-01, 7.7251e-01, 9.9157e-01, 6.8954e-01, - 1.2070e-01, 6.3810e-01, 6.1906e-01, 8.1502e-01, - 9.2001e-01, 4.3969e-01, 7.2756e-01, 8.7875e-01, - 4.3653e-01, 6.9847e-01, 3.1380e-01, 1.0810e-02, - 6.9225e-02, 9.5456e-01, 8.1509e-01, 1.8573e-01, - 1.7263e-01, 9.4344e-01, 6.8440e-01, 5.6809e-01, - 6.1681e-01, 8.5756e-01, 3.9344e-01, 2.9940e-01, - 1.9684e-01, 6.1325e-02, 6.9474e-01, 1.0074e-01, - 7.6320e-01, 1.4226e-01, 7.4760e-01, 9.4379e-01, - 5.2194e-01, 8.8193e-01, 3.2323e-01, 3.0987e-01, - 3.5587e-01, 4.5229e-01, 3.0776e-01, 2.4086e-01, - 8.4018e-01, 6.9500e-02, 7.2262e-01, 1.6828e-01, - 7.5949e-01, 8.9556e-01, 2.8170e-02, 6.4718e-01, - 1.2163e-01, 1.5797e-01, 3.4644e-01, 2.3799e-01, - 7.4442e-01, 2.2091e-01, 2.3291e-01, 1.5846e-01, - 7.8926e-01, 9.4641e-01, 6.5959e-01, 5.6732e-02, - 5.9274e-01, 4.3890e-03, 3.4973e-01, 3.9735e-01, - 1.8059e-01, 7.0888e-01, 5.5119e-01, 1.3779e-01, - 6.4749e-01, 1.3662e-01, 1.0865e-01, 1.3147e-01, - 5.0075e-01, 5.2678e-01, 2.4781e-01, 4.1504e-01, - 1.1935e-01, 8.3008e-01, 3.0970e-02, 6.8251e-01, - 1.6886e-01, 6.1165e-01, 5.0473e-01, 3.9204e-01, - 1.6855e-01, 3.3378e-01, 6.5922e-01, 1.1903e-01, - 5.3665e-02, 9.2576e-01, 2.0670e-01, 2.4041e-01, - 3.2132e-01, 6.7659e-01, 5.2846e-01, 5.8656e-01, - 8.8374e-01, 4.9666e-01, 7.7909e-01, 9.4623e-01, - 2.8933e-01, 1.5404e-01, 6.7852e-01, 8.1786e-01, - 9.4597e-01, 8.5793e-01, 3.3160e-01, 7.5230e-01, - 6.0446e-01, 6.4710e-01, 4.1057e-01, 6.7625e-01, - 8.8298e-01, 8.4069e-01, 3.9020e-01, 8.4527e-02, - 3.9151e-01, 1.5840e-01, 2.2059e-01, 5.8245e-01, - 1.7197e-01, 7.6726e-01, 8.8227e-03, 6.6955e-01, - 7.9928e-02, 1.8556e-01, 4.1794e-01, 6.5818e-01, - 4.6296e-01, 3.2847e-01, 8.2173e-01, 9.2977e-01, - 4.9292e-01, 3.8914e-01, 8.7533e-01, 5.3061e-01, - 5.5412e-01, 5.6525e-01, 7.2016e-01, 4.2913e-01, - 5.3712e-01, 8.5796e-01, 3.7029e-02, 5.1389e-01, - 4.6832e-01, 6.7621e-01, 5.3004e-01, 4.0844e-01, - 1.1041e-01, 2.7438e-01, 9.8368e-01, 5.2663e-01, - 9.6305e-01, 7.2215e-01, 9.3935e-01, 6.3998e-01, - 7.0704e-01, 1.1377e-01, 4.1005e-01, 3.2696e-01, - 1.0026e-01, 2.8994e-01, 3.5055e-01, 9.0988e-01, - 3.6443e-01, 8.3417e-01, 7.7405e-01, 5.0774e-02, - 3.3797e-01, 3.3466e-01, 8.9527e-01, 8.0208e-01, - 2.6681e-02, 7.1682e-01, 5.9444e-02, 8.6531e-01, - 4.3369e-01, 6.1949e-01, 3.8697e-01, 8.8910e-01, - 3.1565e-01, 6.4832e-01, 3.4978e-01, 5.2312e-01, - 7.1400e-01, 7.5351e-01, 9.0160e-01, 2.4848e-01, - 3.8887e-01, 3.5081e-01, 9.4599e-01, 1.7879e-01, - 6.0495e-01, 7.3665e-01, 9.2823e-01, 4.9776e-01, - 5.3755e-01, 9.1204e-01, 2.6898e-01, 9.3739e-01, - 1.4367e-01, 3.6463e-01, 5.8079e-02, 6.7584e-01, - 3.4121e-03, 9.6325e-02, 8.6028e-01, 4.8736e-01, - 5.4316e-01, 1.7502e-02, 8.2095e-01, 5.4301e-01, - 8.4924e-01, 4.9184e-01, 9.6464e-01, 8.9516e-01, - 3.8089e-01, 8.8603e-01, 3.1658e-01, 8.6537e-01, - 5.1600e-01, 9.0927e-01, 3.0592e-01, 7.3833e-01, - 9.7473e-01, 3.8865e-01, 7.6546e-01, 2.5279e-01, - 3.3127e-02, 7.8531e-01, 1.8327e-01, 2.5612e-01, - 2.0669e-01, 4.9848e-01, 7.1653e-01, 1.9062e-02, - 1.0880e-01, 4.7684e-01, 9.3279e-01, 1.6869e-01, - 2.6717e-01, 3.3740e-01, 3.2854e-01, 8.8305e-02, - 3.8199e-01, 8.1604e-01, 7.2703e-01, 5.8406e-01, - 5.9930e-01, 8.6077e-01, 1.7066e-02, 9.8540e-01, - 2.7795e-01, 7.2662e-01, 6.5484e-01, 7.1277e-01, - 6.5119e-01, 1.2851e-01, 4.7065e-01, 8.9167e-01, - 9.9947e-02, 2.1288e-01, 6.0742e-01, 3.4325e-01, - 1.5077e-01, 7.6764e-01, 6.5939e-02, 4.6402e-01, - 7.2707e-01, 9.1168e-01, 9.3561e-01, 6.0751e-02, - 4.5006e-01, 4.4882e-01, 5.3932e-01, 5.1565e-01, - 2.3385e-01, 6.9360e-01, 9.7167e-01, 2.0001e-01, - 3.5852e-01, 6.0614e-02, 9.7567e-01, 3.4254e-01, - 1.9393e-01, 4.9862e-01, 3.0823e-01, 1.2389e-01, - 8.7236e-01, 7.5087e-02, 5.2167e-01, 8.0826e-01, - 1.0438e-01, 7.9152e-01, 4.6284e-01, 8.8711e-01, - 5.8942e-01, 7.7338e-01, 5.1972e-01, 2.6555e-03, - 7.6202e-01, 5.3881e-01, 6.4357e-01, 8.0505e-01, - 5.6126e-01, 5.6111e-01, 8.4910e-01, 9.6083e-01, - 2.9755e-01, 6.7317e-01, 7.8388e-01, 3.7047e-01, - 2.9249e-02, 1.8628e-01, 2.7018e-01, 2.7893e-01, - 4.8369e-02, 9.2188e-01, 2.1911e-02, 1.9599e-01, - 5.0704e-01, 4.4787e-01, 8.2433e-01, 1.6686e-01, - 8.7724e-01, 2.9033e-01, 5.8332e-01, 2.3699e-01, - 4.5496e-01, 3.5785e-01, 9.8532e-01, 6.4439e-01, - 4.4912e-01, 4.8356e-01, 2.7958e-01, 4.8668e-01, - 7.1707e-02, 8.5048e-01, 8.6171e-01, 1.6321e-02, - 9.3929e-01, 6.7710e-02, 3.2336e-02, 7.6459e-02, - 5.2305e-01, 7.6182e-01, 8.3472e-01, 5.2988e-01, - 7.0034e-01, 9.4743e-01, 5.9486e-01, 5.1122e-01, - 6.8446e-04, 2.0202e-02, 9.8175e-01, 9.4232e-01, - 5.2271e-01, 3.0729e-01, 3.1359e-01, 6.8320e-02, - 2.5836e-02, 9.9227e-01, 7.7773e-01, 6.0685e-01, - 6.7938e-01, 3.2782e-01, 3.1725e-01, 2.6244e-01, - 6.7564e-01, 3.7335e-02, 7.7889e-02, 5.0003e-01, - 1.7315e-01, 9.7719e-01, 9.6079e-01, 7.2794e-01, - 1.0881e-01, 4.4640e-01, 7.8867e-01, 1.9848e-01, - 6.0195e-01, 3.8139e-01, 7.1983e-01, 7.7696e-01, - 8.1739e-01, 1.7853e-01, 4.4990e-01, 6.1366e-01, - 5.2893e-01, 7.0284e-01, 5.6232e-01, 1.8886e-01, - 4.4881e-01, 1.5936e-01, 7.5306e-01, 4.0777e-01, - 8.5523e-01, 2.7170e-01, 4.4524e-01, 2.9691e-03, - 3.2916e-01, 6.2114e-01, 3.4808e-01, 5.4781e-01, - 9.2626e-01, 2.2532e-01, 5.8168e-01, 1.5934e-01, - 1.2946e-01, 6.6174e-01, 3.5593e-01, 1.3698e-01, - 7.3235e-02, 2.5540e-01, 9.9694e-01, 1.3705e-01, - 6.1872e-01, 9.6207e-01, 9.5219e-01, 2.8191e-01, - 9.1440e-02, 3.1180e-01, 6.9635e-03, 2.5435e-01, - 9.1899e-01, 6.7879e-01, 7.5487e-01, 1.7226e-01, - 3.7427e-01, 9.1988e-01, 2.3870e-01, 9.3252e-01, - 5.7403e-01, 6.7265e-01, 2.0904e-01, 3.7595e-01, - 9.3516e-02, 8.3802e-01, 3.6882e-01, 4.8034e-01, - 6.5282e-01, 3.5730e-02, 3.2261e-01, 4.9454e-01, - 2.2186e-02, 3.4044e-01, 1.6303e-01, 4.5287e-01, - 6.0998e-01, 5.0775e-01, 3.8602e-01, 3.4840e-01, - 3.1037e-01, 5.5778e-02, 5.6720e-01, 1.3354e-01, - 4.9769e-01, 1.1604e-01, 8.2515e-01, 8.0195e-01, - 6.7554e-01, 3.4584e-01, 6.5775e-01, 1.0032e-01, - 3.8742e-01, 5.4175e-01, 9.3981e-01, 2.9224e-01, - 3.5257e-01, 8.9192e-02, 2.7028e-02, 4.7090e-01, - 9.5903e-01, 2.5803e-01, 4.7281e-01, 5.2437e-01, - 5.9106e-01, 8.4471e-01, 9.0420e-01, 4.0432e-02, - 8.1010e-01, 3.7143e-02, 6.7355e-01, 1.5820e-01, - 2.5496e-01, 1.3753e-01, 3.7765e-01, 3.9838e-01, - 3.5075e-01, 5.5563e-01, 4.7392e-01, 8.7443e-01, - 5.6379e-01, 6.4811e-01, 2.3482e-01, 4.7936e-01, - 2.4676e-01, 2.6263e-01, 2.5426e-01, 2.7892e-01, - 2.8513e-01, 1.0752e-01, 1.3344e-01, 6.0327e-01, - 4.8809e-01, 8.0322e-01, 1.7330e-01, 9.7564e-01, - 4.1668e-01, 1.4600e-01, 7.6508e-02, 3.7961e-02, - 8.6266e-01, 1.6828e-01, 7.6288e-01, 9.3571e-01, - 9.1052e-01, 5.1313e-01, 4.6865e-01, 6.0842e-02, - 4.1447e-01, 3.8370e-01, 6.4880e-01, 3.7351e-01, - 4.3286e-01, 1.4471e-01, 4.6189e-01, 5.9934e-01, - 8.9889e-01, 2.8845e-01, 1.8585e-01, 4.2197e-01, - 1.5149e-01, 5.9919e-02, 6.2940e-01, 8.5199e-01, - 5.5759e-02, 1.6348e-01, 3.4757e-01, 5.3078e-01, - 8.3393e-01, 4.3723e-01, 8.9504e-02, 4.3079e-02, - 2.8489e-01, 2.4196e-01, 4.7086e-01, 1.8450e-01, - 9.9173e-01, 5.6553e-01, 2.5036e-01, 3.5301e-01, - 2.7440e-01, 8.3964e-01, 2.5246e-01, 6.6661e-01, - 8.2671e-01, 1.3135e-01, 9.1725e-01, 6.2427e-01, - 4.4502e-01, 2.5897e-01, 2.7876e-01, 6.4101e-01, - 6.7482e-01, 1.8311e-01, 1.6083e-01, 1.8583e-01, - 5.4458e-01, 2.9069e-01, 8.2224e-01, 6.2075e-01, - 3.9232e-01, 2.1246e-01, 3.4202e-01, 1.2238e-01, - 9.6849e-01, 1.9917e-01, 5.4351e-01, 4.7916e-02, - 5.0439e-01, 3.5896e-01, 3.5784e-02, 6.6932e-01, - 9.5056e-01, 2.9698e-01, 2.6797e-01, 2.6664e-01, - 3.8807e-01, 9.9453e-01, 8.1949e-01, 6.8321e-01, - 2.3754e-01, 7.9641e-02, 1.7435e-01, 5.9529e-01, - 9.3165e-02, 6.5469e-01, 5.0478e-02, 2.0361e-02, - 8.6962e-01, 7.4207e-01, 1.5179e-01, 2.1719e-01, - 2.6114e-01, 3.4410e-01, 3.7211e-01, 6.8090e-01, - 9.8753e-01, 9.6870e-01, 4.6431e-01, 1.2912e-01, - 5.2046e-01, 1.6160e-01, 7.6828e-01, 4.4862e-01, - 8.2709e-01, 3.1929e-01, 4.9581e-01, 4.2626e-01, - 2.4334e-01, 7.2818e-01, 2.9872e-01, 1.1291e-01, - 4.6972e-01, 6.6826e-01, 6.1663e-01, 1.3006e-01, - 6.0978e-01, 4.5701e-01, 9.5261e-01, 2.9689e-02, - 4.7773e-01, 9.8330e-01, 6.2423e-01, 4.6928e-01, - 1.1704e-01, 2.9244e-01, 3.2726e-01, 8.8208e-01, - 4.8767e-02, 5.9852e-01, 4.0702e-01, 2.0990e-01, - 2.6724e-01, 6.9050e-01, 4.1816e-01, 4.0625e-01, - 2.2784e-01, 2.5273e-01, 7.8303e-03, 2.4529e-01, - 9.5090e-01, 6.2446e-01, 7.3078e-01, 6.5680e-01, - 6.0812e-01, 7.0013e-01, 2.7493e-01, 5.6124e-02, - 6.2114e-01, 6.0714e-01, 7.3406e-01, 6.5069e-01, - 5.3137e-01, 8.6755e-01, 8.4930e-01, 6.6233e-01, - 6.9737e-01, 8.9394e-01, 3.9952e-01, 6.6654e-01, - 3.0713e-01, 6.5818e-01, 2.1786e-01, 7.3671e-01, - 7.9441e-01, 1.8102e-01, 1.2417e-01, 7.0777e-01]), + col_indices=tensor([ 992, 6197, 9017, 9363, 9552, 813, 7391, 7914, 9771, + 3659, 3074, 9783, 1812, 3296, 6743, 6738, 702, 3841, + 3424, 7401, 8317, 3607, 6860, 362, 9639, 2551, 6043, + 7518, 3032, 3130, 3636, 7660, 8439, 4568, 5092, 2436, + 3187, 8837, 3544, 4899, 7429, 2524, 4785, 2134, 2572, + 6350, 9886, 256, 2298, 8028, 70, 8411, 4974, 7198, + 862, 3332, 6593, 8182, 7945, 9825, 6160, 4988, 2031, + 6068, 6614, 9084, 2004, 1721, 3147, 3182, 3118, 5709, + 8746, 4710, 9620, 9376, 140, 8642, 3065, 3887, 1723, + 6936, 4731, 2055, 6480, 1766, 8773, 4579, 5639, 2215, + 5779, 5381, 4350, 7435, 8765, 6377, 799, 8953, 5360, + 9549, 3017, 8743, 487, 1241, 3723, 8122, 2611, 4227, + 6659, 2898, 5722, 11, 7624, 8169, 4785, 7800, 4631, + 3130, 4863, 9634, 5871, 800, 3594, 8741, 4205, 3101, + 403, 2763, 6841, 5270, 2637, 4779, 7008, 3042, 8955, + 2132, 3726, 4189, 3779, 5125, 687, 8127, 1804, 800, + 4029, 5445, 9633, 4529, 3236, 8927, 7147, 154, 8438, + 5636, 8527, 2578, 3718, 5040, 103, 1957, 7063, 2451, + 2886, 8775, 5823, 4409, 7988, 4492, 8452, 3536, 6016, + 1726, 2580, 5789, 8639, 54, 8903, 8623, 9065, 418, + 1054, 6406, 6818, 9993, 9619, 1884, 7180, 8499, 5628, + 4305, 3227, 510, 9047, 1678, 8341, 5598, 4299, 1692, + 8734, 8635, 2110, 8248, 9893, 1356, 6575, 7421, 6363, + 4297, 3484, 1169, 1241, 1425, 4649, 5883, 6180, 8669, + 4201, 788, 1097, 4297, 6034, 8866, 2786, 645, 1246, + 167, 6015, 4888, 6451, 9244, 1083, 9764, 2561, 5089, + 8184, 4035, 1167, 3589, 3443, 7780, 2766, 3560, 6969, + 8333, 8748, 9457, 3481, 2796, 4766, 3738, 4973, 1369, + 9893, 5697, 1546, 5501, 8115, 5773, 2637, 7430, 7145, + 9082, 312, 559, 9552, 9655, 1249, 329, 3273, 2053, + 2825, 1961, 5469, 2719, 2587, 9942, 8019, 2559, 124, + 6716, 2095, 5210, 2684, 8843, 6904, 8672, 3894, 1387, + 9061, 9272, 6124, 2456, 8493, 1887, 9266, 3449, 7919, + 6211, 5260, 5307, 416, 9086, 9227, 2089, 7248, 8083, + 736, 7718, 2907, 6348, 379, 6932, 4292, 5415, 9040, + 6461, 7684, 161, 2395, 2926, 8759, 4212, 7577, 2170, + 2842, 7996, 7763, 7275, 6393, 6387, 7037, 6038, 3200, + 9689, 5174, 7963, 922, 2697, 4385, 7305, 2388, 2882, + 7902, 8391, 9072, 465, 842, 8275, 3927, 402, 6723, + 4395, 7064, 1153, 6667, 4093, 515, 9382, 8872, 80, + 6685, 107, 2838, 695, 5247, 6095, 4724, 4280, 7476, + 2216, 2653, 7304, 8752, 7937, 4352, 5517, 6003, 2890, + 5868, 3874, 1071, 5868, 504, 8200, 5663, 2083, 8059, + 5885, 2704, 6554, 3795, 8168, 5707, 1435, 5436, 547, + 4674, 6558, 8024, 9883, 2937, 8276, 708, 3652, 9117, + 6415, 1746, 4168, 7306, 1890, 7445, 8049, 4043, 6422, + 8028, 4753, 279, 552, 1928, 7780, 3761, 9126, 5063, + 8243, 8819, 9608, 8907, 7215, 3984, 3222, 232, 6482, + 5518, 1304, 2970, 6697, 8420, 6280, 3556, 2455, 4432, + 7836, 2142, 8294, 4706, 2188, 3414, 4993, 2971, 749, + 8344, 4214, 7934, 8210, 701, 4237, 5917, 8563, 1985, + 2163, 1461, 3724, 4049, 4376, 5458, 6579, 1327, 2486, + 8667, 105, 2470, 425, 8314, 425, 9772, 4139, 6257, + 1691, 5611, 9904, 7172, 4768, 7051, 311, 7563, 2588, + 4556, 1076, 5750, 9387, 6607, 3505, 7299, 5162, 3677, + 6232, 3623, 8118, 4209, 6379, 2137, 7114, 2750, 727, + 1150, 9020, 6095, 8781, 964, 3401, 4498, 8869, 8433, + 4064, 657, 9799, 1100, 900, 968, 103, 252, 4771, + 8818, 5131, 9850, 7676, 9736, 7043, 6168, 1582, 4069, + 6126, 658, 3236, 5817, 4964, 9455, 1229, 1018, 2768, + 8936, 2667, 5314, 9838, 2738, 7500, 5606, 5093, 8502, + 6298, 1310, 4420, 1366, 445, 8350, 1331, 9081, 6878, + 6716, 5203, 9542, 600, 2635, 5221, 473, 3212, 9608, + 6118, 6306, 2169, 4202, 1311, 65, 4991, 1280, 9130, + 4214, 4841, 4766, 9189, 9111, 831, 6351, 3710, 7914, + 8990, 6031, 2495, 4023, 664, 4119, 8829, 3665, 7182, + 834, 5668, 3157, 6886, 7474, 7456, 7457, 5700, 5968, + 8752, 8280, 3581, 3363, 1028, 7420, 7605, 8189, 121, + 6923, 5205, 8333, 1027, 9553, 996, 9473, 6716, 3807, + 6199, 1329, 9080, 5266, 9046, 7133, 2644, 4177, 1736, + 5969, 6489, 8323, 8156, 4535, 3154, 6753, 2311, 1301, + 9560, 5364, 1311, 5827, 817, 1581, 9851, 6340, 4555, + 4149, 2665, 4030, 4378, 9486, 2986, 7488, 8663, 1098, + 8643, 9528, 5753, 3076, 1923, 6655, 4701, 2564, 2759, + 2614, 8312, 5369, 8644, 4278, 1089, 5880, 7934, 5654, + 9290, 529, 8307, 6094, 8613, 9753, 1317, 8719, 190, + 7897, 5396, 3284, 1788, 5467, 3357, 9704, 5015, 9457, + 9250, 4832, 8383, 5572, 6280, 1996, 88, 956, 9998, + 1522, 9106, 5381, 3536, 8254, 7345, 5671, 8230, 5557, + 7077, 4918, 3339, 425, 2028, 3163, 2588, 2424, 8060, + 4231, 8290, 1425, 1292, 1812, 7720, 2121, 440, 4305, + 3592, 3508, 6973, 1918, 3252, 1660, 7498, 5382, 4944, + 5506, 9328, 4854, 3012, 7765, 3368, 8118, 2049, 2553, + 9755, 5902, 6417, 1245, 6475, 5125, 6931, 355, 9409, + 765, 4975, 1840, 7347, 9096, 357, 2097, 7182, 9205, + 7495, 5566, 9152, 8183, 4955, 5578, 7022, 5514, 5444, + 6267, 8614, 5088, 3022, 6603, 9398, 7901, 9286, 3676, + 22, 1556, 106, 2739, 2806, 5294, 1354, 758, 8713, + 7422, 9061, 3417, 6963, 6041, 7734, 6184, 3868, 9643, + 5902, 6482, 1297, 4950, 9725, 9931, 7629, 8271, 9486, + 27, 9302, 9426, 8838, 7517, 2974, 1849, 2435, 6414, + 9829, 5071, 1041, 7434, 2451, 9172, 1803, 6122, 7707, + 6607, 6135, 3951, 3504, 6516, 4155, 1980, 6024, 8612, + 5327, 6044, 2155, 1648, 2058, 6766, 6976, 7736, 467, + 6836, 6292, 7090, 356, 9671, 9450, 5630, 9326, 3917, + 2056, 2251, 5784, 7410, 5124, 2541, 5740, 6770, 416, + 5387, 4977, 7567, 7754, 3358, 3577, 5938, 8258, 7308, + 6329, 4625, 8734, 2615, 2718, 5600, 116, 3518, 1951, + 5520, 8934, 148, 8573, 4159, 3406, 6214, 3760, 6900, + 1693, 2034, 3016, 9088, 8300, 9059, 2264, 3846, 3681, + 4154, 8527, 1029, 1780, 2482, 6046, 4466, 99, 4992, + 2888, 7277, 8577, 3498, 7469, 3793, 8496, 7986, 601, + 3094, 7023, 7883, 7345, 1271, 9517, 1909, 7362, 9121, + 1135, 7623, 2399, 4704, 7096, 5341, 4961, 5464, 2648, + 4405, 2528, 6055, 2121, 4870, 9037, 1532, 2739, 7180, + 3297, 9018, 4479, 7484, 2676, 2125, 5337, 1610, 4725, + 3239, 3524, 1732, 4592, 9536, 2480, 9753, 9282, 5967, + 7240, 8320, 8032, 3929, 2959, 4292, 8204, 2208, 6636, + 1874, 3136, 5027, 385, 4087, 9579, 4067, 3660, 1477, + 4241, 5719, 9858, 422, 1706, 8163, 1620, 4996, 2023, + 1900, 6432, 6172, 184, 1375, 9699, 1668, 252, 7117, + 5147]), + values=tensor([4.3537e-01, 3.9981e-01, 3.9884e-01, 6.8960e-01, + 4.8973e-01, 1.4335e-01, 5.3666e-01, 3.0947e-01, + 5.1729e-01, 5.7026e-01, 7.4978e-01, 2.6557e-01, + 2.1578e-01, 9.6159e-01, 3.3602e-02, 1.9291e-01, + 3.4528e-01, 7.2294e-01, 8.8444e-01, 7.8295e-01, + 2.1309e-01, 6.4920e-01, 1.7645e-01, 1.5566e-01, + 6.3153e-01, 3.7064e-01, 6.8757e-01, 1.3889e-01, + 8.4305e-01, 2.6216e-01, 4.0551e-01, 7.0589e-01, + 9.3137e-01, 4.2583e-01, 4.6336e-01, 2.6140e-01, + 7.3049e-01, 6.7786e-01, 1.8448e-01, 1.6195e-01, + 2.9220e-01, 3.2207e-01, 1.0179e-01, 8.4428e-01, + 5.1327e-01, 2.5118e-01, 4.7234e-01, 9.2641e-01, + 9.1870e-01, 7.5642e-01, 6.0164e-01, 8.1116e-01, + 4.0225e-01, 6.1931e-01, 9.5682e-01, 7.2107e-01, + 3.5447e-01, 8.2000e-01, 6.7449e-01, 7.6617e-01, + 6.7468e-01, 3.9826e-02, 1.8946e-01, 8.3726e-02, + 8.4893e-01, 5.4004e-01, 5.3572e-03, 1.5513e-01, + 3.2889e-01, 6.7577e-01, 4.1663e-01, 1.3433e-01, + 2.1308e-01, 4.0621e-01, 1.7773e-01, 4.6669e-01, + 7.0394e-01, 9.5283e-01, 9.6965e-01, 3.5330e-01, + 7.8285e-01, 9.6208e-01, 8.1898e-01, 3.8443e-01, + 7.3563e-01, 1.5837e-01, 4.3445e-01, 2.2030e-01, + 1.1584e-01, 2.8973e-01, 4.9374e-01, 1.8947e-01, + 2.0179e-01, 4.4464e-01, 2.9576e-02, 8.4870e-01, + 9.1479e-01, 1.2898e-01, 3.7882e-01, 9.1046e-01, + 9.8325e-01, 2.1736e-01, 8.3977e-01, 2.4636e-02, + 5.9983e-01, 4.8508e-01, 1.0133e-01, 9.2427e-01, + 3.8742e-01, 9.1992e-01, 2.1236e-01, 3.7146e-02, + 4.4093e-01, 1.4010e-01, 5.5600e-01, 9.8858e-01, + 3.9218e-01, 5.6724e-01, 4.2599e-01, 5.8652e-01, + 9.5954e-01, 2.4908e-01, 6.9651e-01, 1.5425e-01, + 3.0458e-01, 9.4539e-01, 2.6284e-01, 1.7345e-02, + 7.5959e-01, 8.9099e-01, 3.1513e-01, 4.9067e-01, + 2.5794e-01, 8.9944e-01, 6.1980e-02, 9.2183e-01, + 4.6175e-01, 9.7935e-01, 1.7864e-01, 6.2861e-01, + 1.1111e-01, 1.0233e-01, 2.3174e-02, 3.6532e-01, + 7.6410e-01, 9.0695e-01, 3.9577e-01, 6.8919e-01, + 9.8700e-01, 3.1611e-01, 8.0436e-02, 1.9359e-01, + 5.1339e-01, 5.3702e-01, 7.5160e-01, 8.1859e-01, + 5.0804e-02, 3.9069e-01, 2.6045e-01, 7.6514e-01, + 2.5050e-01, 9.1500e-01, 5.6797e-01, 5.5091e-01, + 4.2969e-01, 4.9864e-01, 8.4568e-01, 8.8357e-01, + 2.4968e-01, 7.3277e-02, 2.8443e-01, 7.5808e-01, + 9.4787e-01, 7.9605e-01, 8.9312e-01, 9.9357e-01, + 9.0836e-01, 3.4784e-01, 7.5111e-01, 7.1483e-01, + 2.9800e-01, 1.6662e-01, 7.0094e-01, 3.6745e-01, + 1.2262e-01, 7.3403e-01, 9.2937e-01, 3.8392e-01, + 5.4464e-01, 5.1342e-01, 8.9046e-01, 9.8258e-01, + 5.2964e-01, 6.4014e-01, 8.1913e-01, 6.2432e-01, + 9.6651e-01, 4.7961e-01, 5.0049e-01, 7.8570e-01, + 1.7991e-01, 3.4938e-01, 2.3127e-01, 7.4863e-01, + 2.9275e-01, 6.4424e-04, 4.4849e-01, 5.8097e-01, + 7.3619e-01, 7.2211e-01, 2.5817e-01, 4.3577e-01, + 7.5637e-01, 2.9790e-01, 5.4852e-01, 8.1898e-02, + 4.6703e-01, 9.8959e-01, 5.2950e-01, 6.1777e-01, + 1.1500e-01, 7.0529e-01, 3.6459e-01, 9.7895e-01, + 3.2305e-01, 2.5527e-01, 8.7675e-01, 1.0971e-01, + 5.0946e-01, 3.9227e-01, 9.7367e-01, 6.3673e-01, + 9.5168e-02, 7.2298e-01, 1.3574e-01, 5.7008e-01, + 9.6100e-01, 5.5062e-01, 8.5493e-01, 2.6996e-01, + 7.2852e-01, 5.0724e-03, 7.0486e-01, 3.7174e-01, + 4.6905e-01, 1.6221e-01, 6.1008e-01, 1.7684e-01, + 7.1183e-01, 5.0400e-01, 2.9966e-01, 7.0806e-01, + 1.9128e-01, 1.0265e-01, 2.0013e-01, 9.0258e-01, + 4.2371e-01, 3.9347e-01, 3.4903e-01, 4.3909e-01, + 7.1615e-01, 8.3353e-02, 8.2383e-01, 1.3046e-01, + 1.6372e-02, 5.4070e-01, 8.6800e-01, 5.0743e-01, + 9.6013e-01, 3.3358e-01, 4.5913e-01, 6.2492e-01, + 1.7392e-01, 8.0187e-01, 9.5935e-01, 8.0293e-01, + 7.3448e-01, 9.6838e-01, 4.6566e-01, 7.5415e-01, + 1.7773e-01, 4.6671e-01, 1.9277e-01, 1.9528e-01, + 3.7586e-01, 9.7197e-01, 7.1717e-01, 7.5047e-01, + 2.4187e-01, 1.9655e-01, 9.9432e-01, 9.7618e-01, + 2.8431e-01, 6.9296e-01, 1.9736e-01, 9.6645e-01, + 1.6096e-02, 3.4791e-01, 4.1191e-01, 2.0523e-01, + 6.2665e-01, 4.2102e-01, 2.3025e-02, 1.6970e-01, + 9.9316e-01, 7.6355e-01, 2.0605e-02, 1.2488e-01, + 4.0790e-01, 5.6708e-01, 8.2480e-01, 7.9370e-01, + 9.2119e-01, 8.2742e-01, 7.5665e-01, 2.5768e-01, + 4.3114e-01, 6.6583e-01, 7.7116e-01, 7.6309e-01, + 7.1228e-01, 8.7464e-01, 8.7019e-01, 9.3966e-01, + 4.2135e-01, 5.7594e-01, 6.2870e-01, 9.1797e-01, + 9.6945e-01, 7.2933e-01, 8.8178e-01, 1.8302e-01, + 2.9607e-01, 4.6905e-02, 8.6133e-01, 6.0869e-02, + 1.5112e-02, 3.4687e-01, 5.7238e-01, 2.9485e-01, + 9.3423e-01, 7.1957e-01, 1.0934e-01, 7.9192e-01, + 6.5331e-01, 4.3923e-01, 6.0516e-01, 5.7033e-01, + 6.0523e-01, 8.7734e-01, 9.9250e-01, 4.1305e-01, + 7.8645e-01, 5.1140e-01, 4.3184e-01, 4.2612e-01, + 2.3541e-01, 6.0161e-01, 2.3674e-01, 6.7388e-01, + 8.0508e-01, 6.7986e-01, 8.2585e-01, 8.4326e-01, + 5.9880e-01, 2.6417e-01, 2.6762e-01, 1.7359e-01, + 3.0395e-01, 6.5434e-02, 8.1628e-01, 9.8237e-01, + 3.0332e-01, 8.1081e-01, 9.1721e-01, 5.4623e-04, + 7.0012e-01, 5.1769e-01, 6.0702e-01, 3.4591e-01, + 2.2090e-01, 4.2492e-01, 7.1125e-01, 9.1429e-03, + 3.3527e-01, 8.6846e-01, 6.7073e-01, 7.9698e-01, + 5.3968e-01, 7.8654e-01, 4.4203e-03, 3.3520e-01, + 3.0764e-01, 7.6575e-01, 2.1197e-02, 5.3096e-01, + 3.9667e-01, 7.2879e-03, 2.9349e-01, 4.5592e-01, + 1.5381e-01, 9.7142e-01, 6.6952e-01, 3.8729e-01, + 8.1140e-01, 6.7995e-01, 7.1422e-01, 2.4486e-01, + 7.5636e-01, 2.3310e-01, 9.5289e-01, 4.8022e-01, + 8.5813e-01, 5.9732e-01, 5.3636e-01, 3.9112e-01, + 1.2996e-01, 3.2122e-01, 4.8399e-01, 2.6184e-01, + 2.1104e-01, 4.9276e-01, 3.8299e-01, 7.5953e-01, + 3.4044e-01, 3.1825e-02, 1.5820e-01, 8.7087e-01, + 4.3387e-02, 6.2383e-01, 4.5976e-01, 4.9470e-01, + 1.0368e-01, 9.6785e-01, 7.1734e-01, 2.4049e-01, + 8.7258e-02, 7.1240e-01, 8.6015e-01, 1.8576e-01, + 5.0698e-01, 6.5363e-01, 3.8025e-01, 8.7343e-01, + 5.1193e-01, 5.3928e-01, 1.8814e-01, 2.6343e-02, + 7.7821e-01, 1.7044e-01, 4.0618e-02, 4.1934e-01, + 6.3190e-01, 9.4536e-01, 6.1824e-01, 9.8729e-01, + 2.7125e-01, 2.6378e-03, 1.0222e-01, 2.1086e-01, + 6.0947e-01, 3.0901e-01, 5.5343e-01, 8.9244e-01, + 8.2691e-01, 3.0989e-01, 8.3851e-02, 8.8393e-01, + 7.4166e-01, 8.8516e-01, 7.3579e-01, 2.6421e-01, + 3.2478e-01, 9.1496e-02, 8.1097e-01, 4.7135e-01, + 6.3379e-01, 3.0775e-01, 2.7515e-01, 5.6137e-01, + 3.0909e-02, 5.1584e-01, 7.5795e-01, 2.7899e-01, + 8.0581e-01, 6.4873e-01, 7.2416e-01, 7.1340e-01, + 2.0196e-01, 3.9555e-01, 2.0446e-01, 2.4934e-01, + 1.5430e-01, 7.4440e-01, 8.0057e-01, 6.0388e-01, + 7.1343e-01, 3.5035e-01, 4.2040e-01, 8.6602e-01, + 9.4478e-02, 5.4928e-01, 4.0094e-01, 7.7335e-01, + 3.0423e-01, 1.3574e-02, 2.3620e-01, 7.1720e-01, + 9.1173e-01, 2.1903e-01, 8.9596e-01, 4.5128e-01, + 3.5343e-01, 2.1663e-01, 6.2211e-01, 4.7255e-01, + 3.7282e-01, 6.8342e-01, 9.0827e-01, 4.2800e-01, + 1.9686e-01, 3.0599e-01, 1.4444e-01, 8.8002e-01, + 4.9084e-01, 8.8924e-01, 6.3872e-01, 8.2477e-01, + 9.7932e-02, 4.9532e-01, 1.8488e-01, 2.9399e-01, + 8.4609e-01, 4.2726e-01, 7.2259e-01, 1.8113e-01, + 5.6825e-01, 6.8508e-02, 6.4229e-01, 2.9274e-01, + 2.6580e-01, 4.9086e-01, 7.3508e-01, 6.2433e-02, + 6.8630e-01, 6.1930e-01, 5.2008e-01, 2.3177e-01, + 9.0737e-01, 5.2188e-02, 8.9481e-01, 9.4065e-01, + 7.5609e-01, 5.2533e-01, 7.8363e-01, 2.1739e-01, + 7.1809e-01, 5.5801e-01, 2.6908e-01, 2.2939e-03, + 5.4688e-01, 4.5970e-01, 3.0997e-01, 1.0388e-01, + 3.7488e-03, 1.3490e-01, 7.4331e-01, 6.6803e-01, + 2.2280e-01, 8.7632e-01, 7.6913e-01, 8.4630e-01, + 6.0233e-01, 5.3270e-01, 3.2736e-01, 9.5029e-01, + 4.2112e-02, 1.4441e-01, 5.9972e-01, 6.4757e-01, + 7.4805e-01, 6.0193e-01, 1.2325e-01, 8.1393e-01, + 6.0128e-01, 9.4638e-01, 7.9005e-02, 2.7930e-01, + 5.6736e-01, 8.5472e-01, 8.2769e-01, 7.6410e-01, + 1.2565e-02, 4.7651e-01, 9.0884e-02, 4.3338e-01, + 3.5385e-01, 2.3199e-01, 8.4134e-01, 6.4207e-01, + 9.5298e-01, 9.7793e-01, 2.2580e-01, 9.9070e-01, + 6.2713e-01, 6.2690e-01, 7.4899e-01, 1.3724e-01, + 1.4670e-01, 9.2675e-01, 5.3520e-01, 1.8449e-01, + 2.0527e-01, 2.8306e-01, 3.1045e-01, 5.8558e-01, + 3.1540e-01, 1.1982e-01, 8.8444e-01, 7.3545e-01, + 4.2022e-01, 8.1509e-03, 3.1302e-01, 8.7632e-01, + 7.9319e-01, 1.7007e-01, 1.1811e-01, 8.2777e-01, + 5.1290e-02, 3.3333e-02, 3.9343e-01, 8.2624e-01, + 8.3659e-01, 9.0654e-01, 5.0174e-01, 8.3985e-01, + 3.9016e-01, 8.3937e-01, 2.7243e-01, 5.7402e-01, + 9.3932e-01, 8.4938e-01, 7.6935e-01, 1.0589e-01, + 6.0844e-01, 1.3969e-01, 4.8714e-01, 2.3100e-01, + 1.6273e-01, 1.8331e-01, 6.7573e-01, 3.2264e-01, + 4.1490e-01, 1.0348e-01, 8.0008e-01, 2.9827e-01, + 9.1387e-01, 1.0812e-01, 4.5369e-01, 4.3080e-01, + 8.4733e-01, 1.7047e-01, 7.7975e-01, 2.8628e-01, + 5.4017e-01, 2.9737e-01, 6.4224e-01, 8.3954e-01, + 4.7604e-01, 4.3814e-02, 6.9739e-01, 4.8277e-01, + 7.6740e-01, 9.8853e-01, 6.9194e-01, 3.4640e-01, + 3.0884e-01, 9.5212e-01, 3.2579e-01, 6.1841e-01, + 4.0543e-01, 9.3466e-01, 4.8328e-02, 7.5863e-01, + 9.6793e-01, 9.2495e-01, 2.8997e-01, 2.9497e-01, + 1.9820e-02, 2.5772e-01, 2.8827e-01, 8.1536e-01, + 4.1273e-01, 9.0435e-01, 3.4102e-01, 2.8340e-01, + 5.2361e-01, 8.9342e-01, 9.9789e-01, 9.4787e-01, + 4.5467e-02, 4.3271e-01, 6.5239e-01, 5.8289e-01, + 8.7290e-01, 6.7020e-01, 6.4508e-01, 4.1572e-01, + 6.3199e-01, 1.3701e-01, 4.8627e-01, 5.3563e-01, + 9.9708e-01, 3.9874e-01, 8.7427e-01, 3.5048e-01, + 5.0099e-01, 6.8373e-02, 8.5572e-02, 9.7413e-01, + 9.1182e-01, 3.6097e-01, 1.5867e-01, 1.1895e-02, + 6.6486e-01, 7.5586e-01, 3.0038e-01, 7.0781e-01, + 7.0691e-01, 9.3527e-01, 8.7719e-01, 6.8604e-01, + 5.9175e-02, 1.8325e-01, 3.0814e-01, 2.1317e-01, + 1.4056e-01, 3.1655e-02, 2.9670e-01, 3.2601e-01, + 3.5245e-01, 1.1448e-01, 7.1016e-01, 5.8561e-01, + 7.2949e-01, 4.6438e-02, 9.4722e-01, 8.1716e-01, + 8.3412e-01, 8.6405e-01, 7.0565e-01, 2.2713e-01, + 3.2024e-01, 8.6006e-01, 9.1130e-01, 7.1546e-01, + 1.1726e-01, 2.6812e-01, 7.0313e-01, 9.0525e-01, + 6.6813e-01, 6.0951e-01, 4.2406e-01, 6.6197e-01, + 3.4051e-01, 5.0594e-01, 5.6568e-01, 6.5500e-01, + 3.9133e-02, 6.8784e-02, 1.6962e-01, 1.9637e-01, + 3.6856e-01, 8.2962e-01, 7.4094e-01, 2.4617e-01, + 9.3733e-01, 3.7437e-01, 3.0715e-01, 2.8827e-02, + 9.9026e-01, 9.8403e-01, 8.8298e-01, 6.6462e-03, + 5.4973e-01, 6.3904e-01, 6.8939e-01, 7.5675e-01, + 6.4261e-01, 1.9556e-01, 3.6342e-01, 9.3446e-02, + 5.8282e-01, 9.4687e-01, 8.8520e-01, 2.8238e-01, + 7.1927e-01, 5.7538e-01, 1.2304e-01, 4.4104e-01, + 5.8638e-01, 4.4160e-01, 7.9530e-01, 4.3712e-01, + 8.3982e-01, 3.7432e-01, 4.7695e-01, 4.5874e-01, + 8.8004e-01, 7.2230e-01, 1.1009e-01, 6.5993e-01, + 9.8603e-01, 3.2649e-01, 4.1843e-01, 9.7294e-01, + 2.0848e-01, 9.6740e-01, 4.9238e-01, 6.1925e-01, + 5.1499e-01, 5.4071e-01, 9.9466e-01, 8.4870e-01, + 2.2469e-01, 7.3287e-02, 1.1009e-01, 9.8605e-01, + 1.3893e-01, 5.6160e-01, 9.1012e-01, 1.6216e-01, + 5.4134e-01, 6.9990e-02, 4.5429e-01, 8.4017e-02, + 9.5044e-02, 8.9018e-01, 4.5031e-01, 5.0410e-01, + 9.2543e-01, 5.2699e-01, 3.1440e-01, 9.0484e-01, + 3.5531e-01, 6.7819e-03, 4.4191e-01, 7.3541e-01, + 1.3631e-01, 6.0004e-01, 1.8558e-01, 5.0108e-01, + 1.6921e-01, 7.4587e-01, 5.7250e-01, 9.1504e-01, + 5.2848e-01, 4.3565e-01, 5.2381e-01, 3.2068e-01, + 4.2602e-01, 1.7899e-01, 1.6693e-01, 4.4754e-01, + 8.0900e-01, 7.0461e-01, 2.9947e-01, 2.6865e-01, + 7.7782e-01, 5.2593e-01, 3.3560e-01, 2.9390e-01, + 1.0731e-01, 5.9751e-01, 3.5259e-01, 8.0940e-01, + 5.4338e-02, 6.2392e-01, 9.8973e-01, 7.3598e-01, + 5.6690e-01, 9.7288e-01, 4.5821e-02, 6.3232e-01, + 6.6311e-01, 5.3438e-01, 1.0203e-01, 9.2045e-01, + 4.1039e-01, 6.9228e-01, 4.8738e-01, 8.0095e-01, + 8.7185e-01, 8.3497e-01, 6.7328e-02, 7.6656e-01, + 2.6182e-01, 4.5312e-01, 5.0198e-01, 2.1211e-02, + 3.8491e-01, 9.4217e-01, 5.0357e-01, 8.3401e-02, + 7.1052e-01, 4.2509e-01, 7.7683e-01, 3.7078e-01, + 5.1501e-01, 2.7444e-01, 9.1630e-01, 7.4677e-01, + 7.2155e-01, 4.0998e-01, 6.4794e-02, 1.7857e-01, + 3.9293e-01, 9.5205e-01, 9.1388e-01, 6.5620e-01, + 5.1912e-01, 1.6899e-01, 8.5856e-01, 4.4368e-01, + 1.5869e-01, 2.2263e-01, 7.5465e-01, 3.6280e-01, + 4.7400e-02, 9.5410e-01, 4.2697e-02, 6.7796e-01, + 9.8275e-01, 2.7874e-01, 2.6106e-01, 3.9901e-01, + 5.5379e-01, 9.3950e-02, 4.9186e-01, 1.1764e-03, + 7.2495e-01, 5.8979e-01, 6.2882e-01, 1.1772e-01, + 5.5400e-01, 3.5765e-01, 7.8510e-01, 3.2378e-01, + 8.8910e-01, 1.5997e-02, 5.5679e-02, 8.9152e-01, + 3.8640e-01, 3.8910e-01, 2.3145e-01, 3.6510e-02, + 9.5621e-01, 1.5088e-01, 4.7550e-02, 6.2587e-01, + 2.8134e-01, 4.9615e-01, 4.5673e-01, 6.4038e-01, + 9.8951e-01, 3.9053e-01, 7.4142e-02, 7.8090e-01, + 2.9839e-01, 2.9025e-01, 9.5796e-01, 1.8690e-01, + 8.4195e-01, 6.6466e-01, 1.0805e-01, 7.5828e-01, + 6.7259e-01, 5.1823e-01, 8.8034e-01, 7.2995e-02, + 5.6715e-01, 5.7986e-01, 1.6088e-01, 1.6596e-02, + 9.6255e-02, 5.8882e-01, 1.3704e-01, 1.8041e-02, + 7.3056e-01, 6.9725e-01, 3.9527e-01, 4.4598e-01, + 9.7556e-01, 3.1195e-01, 1.4884e-01, 1.9870e-01, + 1.2136e-01, 2.2091e-01, 6.2259e-01, 8.8417e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.7495, 0.5234, 0.3924, ..., 0.9829, 0.4941, 0.4696]) +tensor([0.1885, 0.9110, 0.8668, ..., 0.5988, 0.5354, 0.4490]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -1133,378 +1026,375 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 9.645604133605957 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '375977', '-ss', '10000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.999524116516113} +Time: 10.454679489135742 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([7315, 7011, 7905, 1028, 2803, 8634, 5420, 3714, 9961, - 9682, 9462, 3672, 9521, 6357, 2121, 3239, 5077, 5611, - 4819, 9590, 9566, 7110, 9282, 3205, 6562, 8535, 6101, - 4471, 8275, 3563, 3283, 2844, 9366, 4926, 9577, 7356, - 8518, 1230, 10, 3109, 6967, 7024, 3566, 6230, 1306, - 5778, 1783, 7611, 4767, 1036, 2386, 1905, 3222, 7598, - 3813, 6094, 6353, 9093, 5396, 1174, 7424, 6062, 4513, - 177, 8866, 7252, 2860, 4744, 8855, 2227, 299, 9342, - 3509, 1775, 3656, 5550, 9595, 6991, 8012, 9812, 5920, - 3934, 6803, 5774, 7689, 674, 5602, 3014, 6143, 7099, - 663, 4281, 4779, 9464, 8707, 8638, 8538, 5514, 6658, - 4407, 5833, 3387, 3279, 4896, 4259, 2176, 8287, 8834, - 3999, 3877, 1161, 9724, 9738, 238, 3075, 5186, 7486, - 891, 9045, 5190, 5381, 5459, 4110, 1402, 6321, 6193, - 9155, 9992, 7314, 291, 2462, 2852, 6279, 222, 309, - 7313, 8459, 3425, 6366, 652, 7078, 5904, 156, 5860, - 2945, 3690, 8593, 4252, 7597, 5596, 5776, 7114, 5766, - 611, 9442, 2673, 7518, 6593, 859, 8021, 4190, 3626, - 9970, 1168, 1714, 3198, 7142, 1721, 1966, 1005, 3234, - 3374, 8223, 3515, 685, 4449, 3575, 1088, 4518, 6724, - 1106, 5643, 2563, 3222, 3789, 8841, 4328, 5908, 980, - 3354, 5903, 453, 6584, 6902, 1796, 5128, 9193, 4852, - 7599, 2332, 9798, 4331, 6435, 2285, 749, 227, 8974, - 2233, 1060, 9434, 8311, 8983, 7891, 4157, 9048, 9215, - 1244, 1930, 3548, 3700, 5258, 2713, 1101, 6198, 1383, - 485, 1663, 3680, 5007, 2469, 3200, 3534, 5060, 8323, - 5125, 6533, 8536, 7835, 7036, 2925, 4362, 4257, 3178, - 2891, 2108, 3452, 9536, 7002, 616, 6420, 8664, 8815, - 6583, 6609, 1072, 8384, 5169, 1059, 7775, 2477, 5080, - 5419, 1433, 8621, 761, 6118, 3052, 2864, 4101, 5536, - 5114, 2890, 8264, 7375, 8458, 5708, 8141, 9842, 7535, - 9487, 506, 9991, 4912, 6899, 6042, 5987, 8236, 2381, - 2377, 8623, 6899, 4199, 1671, 8263, 3247, 8424, 4377, - 3844, 7302, 4183, 5465, 7916, 7243, 867, 5598, 4757, - 1405, 2390, 6269, 8998, 6806, 1037, 5633, 6240, 2683, - 661, 6248, 4362, 6589, 6881, 7919, 3436, 9778, 9040, - 7754, 6133, 7587, 9613, 1799, 8034, 1412, 569, 7649, - 854, 2162, 975, 1777, 1311, 2293, 7425, 7721, 8243, - 3457, 2497, 8795, 9492, 8514, 1656, 1379, 1684, 2624, - 6150, 9236, 5502, 3220, 4424, 3861, 8284, 3334, 9155, - 9329, 2543, 88, 331, 573, 7250, 1893, 2214, 1117, - 5839, 6679, 2059, 4263, 3884, 5823, 5109, 6317, 8095, - 5772, 3805, 2801, 1284, 3513, 6344, 2050, 8362, 1104, - 3767, 8450, 2746, 7137, 2448, 1034, 910, 1808, 8546, - 7613, 4713, 1419, 7363, 5135, 416, 7410, 1254, 3607, - 5213, 2583, 1030, 7355, 9095, 3057, 2965, 1346, 6518, - 456, 1710, 4150, 606, 3991, 9343, 1922, 1471, 8654, - 8556, 4029, 7374, 5147, 245, 2185, 3615, 6782, 7298, - 4827, 1048, 4435, 5810, 5501, 9685, 936, 1353, 4747, - 6183, 8133, 2032, 6413, 2310, 638, 2549, 9087, 7037, - 9683, 9195, 7009, 3023, 8664, 3999, 7198, 4435, 2636, - 2391, 2637, 6770, 5371, 8323, 5172, 7013, 9841, 3318, - 9942, 9659, 100, 2774, 5785, 1509, 2452, 6366, 3270, - 7991, 2560, 8019, 9904, 251, 6517, 6214, 3419, 1023, - 9453, 641, 7273, 1656, 5351, 4640, 5117, 6185, 1841, - 2653, 7756, 2706, 5419, 3985, 8928, 5908, 8395, 9421, - 5401, 2577, 7369, 8901, 3379, 6155, 3350, 3982, 5664, - 2632, 8161, 9582, 4592, 162, 9591, 1297, 2536, 9311, - 3172, 5585, 8289, 7147, 6739, 4983, 1071, 2040, 1688, - 3636, 7012, 4562, 2335, 5647, 715, 3383, 3263, 4154, - 5053, 6148, 8770, 7782, 6127, 8164, 6802, 5781, 664, - 3307, 3627, 8545, 4737, 7093, 7554, 9661, 1123, 6095, - 7857, 2265, 2364, 9252, 8564, 2821, 5037, 4976, 9852, - 4939, 4236, 323, 6914, 3029, 7213, 1359, 1808, 8226, - 8417, 9064, 9262, 5852, 3099, 5978, 9101, 5008, 9044, - 3805, 679, 4939, 2966, 9288, 8198, 4028, 31, 99, - 296, 8675, 6350, 1078, 4079, 9324, 818, 7786, 2367, - 5106, 7500, 3688, 495, 8439, 9765, 7521, 8162, 4283, - 1827, 1758, 498, 5591, 1635, 6647, 2322, 7481, 7741, - 8801, 1143, 8098, 1520, 1220, 6535, 4958, 3736, 3448, - 9104, 1436, 6505, 4589, 9260, 2728, 1235, 3582, 184, - 8130, 836, 5131, 4649, 3369, 3926, 5032, 4259, 4469, - 3719, 9447, 9115, 47, 3895, 3886, 9444, 2841, 4061, - 3017, 950, 3712, 7167, 5221, 790, 8084, 4463, 144, - 6564, 1773, 9290, 8835, 6735, 5270, 3631, 66, 2756, - 5023, 4534, 3959, 4551, 6473, 5945, 6848, 1690, 7777, - 6894, 3396, 9382, 1836, 441, 8618, 776, 5251, 8749, - 4321, 521, 6968, 7286, 596, 4655, 632, 7619, 1492, - 6316, 699, 7905, 1661, 4395, 9739, 104, 3521, 1753, - 6768, 1527, 3851, 9459, 3343, 5453, 7187, 8372, 997, - 5605, 6632, 6960, 7667, 7471, 2070, 4717, 1707, 1420, - 4300, 7588, 6313, 9723, 219, 5653, 7652, 2470, 9617, - 8449, 7892, 3278, 7246, 7417, 9017, 3709, 3870, 4523, - 9505, 4269, 6241, 9265, 7319, 8501, 753, 8354, 1912, - 1553, 8094, 1317, 5971, 2810, 414, 9472, 3324, 7338, - 9143, 2633, 8788, 9526, 766, 2702, 6774, 7114, 145, - 5175, 9354, 4708, 8663, 375, 3947, 9572, 8290, 5014, - 5394, 7415, 2379, 2168, 1593, 1872, 3501, 5911, 919, - 2321, 4308, 6674, 3273, 7641, 102, 3093, 4396, 9235, - 3771, 320, 755, 3459, 8281, 6984, 5238, 5946, 6352, - 7589, 5255, 4512, 2004, 1268, 5192, 5673, 4505, 9942, - 6596, 5939, 8561, 4262, 210, 1555, 4451, 645, 5242, - 2965, 3429, 8117, 6744, 8237, 9222, 6984, 2092, 2593, - 1178, 9712, 4151, 2676, 4438, 1667, 3002, 5084, 1702, - 2111, 6635, 9280, 2973, 5064, 874, 8288, 9477, 2791, - 1087, 7121, 1852, 1045, 9241, 8406, 813, 8275, 6074, - 8646, 4357, 5764, 661, 9267, 6343, 9809, 4190, 3021, - 5816, 9379, 3340, 6454, 1557, 356, 8328, 8537, 7080, - 7354, 1578, 666, 4817, 1570, 9849, 1827, 7697, 2475, - 6386, 9715, 5209, 9237, 6075, 9469, 583, 462, 3775, - 6440, 2999, 7916, 3754, 9198, 8174, 243, 3114, 2481, - 975, 9771, 5568, 9040, 5301, 3260, 4502, 2126, 5304, - 837, 9354, 258, 3627, 1232, 4503, 6332, 9854, 9142, - 4321, 3920, 3750, 5002, 1898, 5869, 3909, 3405, 600, - 8568, 3715, 3998, 3685, 5953, 2878, 8577, 7035, 2600, - 194, 9795, 6880, 2652, 7730, 3386, 3550, 179, 9688, - 5552, 8778, 6934, 3301, 9299, 7193, 8775, 1828, 7546, - 9311, 4434, 5261, 1742, 8110, 9147, 8767, 3915, 6212, - 5141, 5602, 4694, 7093, 8690, 2073, 7118, 8341, 6600, - 2953, 5097, 8404, 865, 8300, 3366, 8481, 7438, 3656, - 4341, 3597, 8718, 5002, 9403, 144, 8793, 6261, 1341, - 1279]), - values=tensor([6.3376e-01, 7.6730e-01, 7.2919e-02, 5.4427e-02, - 6.0546e-01, 6.0189e-01, 9.2261e-01, 5.7961e-01, - 4.9178e-01, 9.4390e-01, 2.8388e-02, 9.5132e-01, - 9.7425e-01, 5.9400e-01, 5.7570e-01, 9.4685e-01, - 1.1230e-03, 2.8396e-01, 8.0782e-01, 5.4140e-01, - 5.7368e-01, 3.1036e-02, 1.9431e-01, 9.2723e-01, - 8.0731e-01, 9.7470e-01, 6.8939e-01, 9.8405e-02, - 2.9168e-01, 2.5387e-01, 9.5437e-01, 4.1772e-01, - 4.7585e-02, 4.8136e-01, 1.5348e-01, 3.4622e-01, - 2.3877e-01, 2.8323e-03, 2.7343e-01, 1.3764e-01, - 1.2578e-01, 9.8729e-01, 5.9137e-01, 2.2799e-01, - 6.1987e-01, 2.1468e-01, 2.2450e-01, 2.0210e-01, - 9.2984e-01, 7.9165e-01, 2.7490e-01, 6.2165e-01, - 1.6123e-01, 4.0289e-01, 9.3432e-01, 3.9183e-01, - 1.5643e-02, 5.4120e-01, 9.4215e-02, 9.6242e-01, - 7.3975e-01, 5.8198e-01, 5.9396e-01, 8.2065e-01, - 2.0481e-01, 4.1419e-01, 7.9918e-02, 5.7401e-01, - 9.2905e-01, 8.5287e-02, 5.1333e-01, 3.7557e-01, - 2.1503e-01, 3.9680e-01, 9.1255e-02, 6.5433e-02, - 5.2830e-02, 3.3823e-02, 2.3435e-01, 5.5829e-01, - 2.8861e-01, 9.7182e-01, 8.5050e-01, 1.7125e-01, - 3.8665e-01, 9.6698e-01, 1.7106e-01, 1.8865e-01, - 7.2944e-01, 1.4105e-01, 1.9717e-01, 2.3506e-01, - 9.7885e-01, 8.9037e-01, 8.7398e-01, 3.5326e-02, - 5.2638e-01, 2.1680e-01, 1.1436e-01, 9.4880e-01, - 6.7321e-01, 8.4246e-01, 6.5198e-01, 3.1991e-01, - 6.2040e-01, 9.1412e-01, 3.9267e-01, 1.5271e-01, - 9.2351e-01, 6.8775e-01, 2.2448e-01, 8.7632e-01, - 7.8738e-01, 2.0587e-01, 3.7930e-01, 9.9595e-01, - 6.7491e-01, 4.7656e-01, 1.0028e-01, 1.5781e-01, - 5.2293e-01, 9.8016e-01, 9.4054e-01, 7.3715e-01, - 3.4949e-01, 9.2644e-01, 6.3554e-01, 5.1357e-01, - 2.9100e-01, 9.3441e-01, 9.8280e-01, 6.4754e-01, - 2.7015e-01, 8.1047e-01, 8.2692e-01, 7.8373e-01, - 5.5185e-02, 5.3173e-01, 3.3446e-01, 4.6782e-01, - 2.8341e-01, 3.8462e-01, 5.5173e-01, 3.3233e-01, - 1.4466e-01, 9.9674e-02, 5.9887e-01, 3.0987e-01, - 2.0656e-01, 1.4293e-01, 6.2518e-01, 2.2170e-01, - 7.1340e-02, 1.4400e-01, 9.8693e-01, 5.7422e-01, - 2.8495e-01, 2.7480e-01, 3.2319e-01, 8.1503e-01, - 4.9779e-01, 3.7355e-01, 8.0157e-02, 5.6260e-01, - 4.5111e-01, 1.5335e-01, 6.0164e-02, 7.2363e-01, - 2.5203e-01, 7.8959e-01, 1.7399e-01, 6.4875e-01, - 8.3746e-01, 9.9659e-01, 3.2280e-01, 5.4780e-01, - 6.3405e-01, 2.5291e-01, 8.3069e-01, 3.2372e-01, - 9.6184e-01, 6.9730e-01, 6.4462e-02, 8.6435e-01, - 9.8089e-01, 4.8332e-02, 2.0838e-01, 6.1370e-01, - 4.3683e-01, 7.6069e-01, 5.9835e-01, 3.6330e-01, - 3.6132e-01, 4.5351e-01, 5.5867e-01, 8.3977e-01, - 7.7599e-01, 7.6807e-01, 6.1329e-01, 4.6591e-01, - 6.1334e-01, 5.1913e-01, 1.3149e-01, 5.9490e-01, - 5.7945e-02, 2.4761e-01, 1.3468e-01, 8.8630e-02, - 9.3435e-01, 5.7224e-01, 1.4595e-01, 4.8246e-01, - 8.2826e-01, 2.9221e-01, 2.3054e-01, 4.1104e-01, - 9.8005e-01, 9.5886e-01, 6.1801e-01, 9.8291e-01, - 8.7964e-01, 9.2761e-02, 1.8925e-03, 8.9489e-01, - 6.5450e-01, 3.7551e-02, 9.5895e-01, 2.1970e-01, - 5.7556e-01, 1.0274e-01, 6.8162e-01, 7.1993e-01, - 6.0126e-02, 9.6031e-01, 3.1287e-02, 8.9614e-01, - 6.1790e-01, 4.4599e-01, 9.1123e-01, 8.7231e-01, - 4.8551e-01, 8.6899e-01, 3.9527e-01, 6.0987e-01, - 2.4934e-01, 1.2417e-01, 2.4374e-01, 1.5547e-01, - 6.5609e-01, 8.3903e-01, 7.6651e-01, 2.5083e-01, - 2.8586e-01, 8.8381e-01, 7.9996e-01, 4.8057e-01, - 9.0034e-01, 1.7433e-01, 3.1113e-01, 8.5701e-01, - 1.7210e-01, 3.0793e-01, 1.3582e-01, 7.4721e-01, - 6.0021e-01, 4.6952e-01, 8.5957e-01, 9.8856e-01, - 1.1020e-01, 5.5178e-01, 3.5557e-01, 6.0157e-02, - 3.4176e-01, 5.8491e-01, 1.0830e-01, 4.4019e-02, - 5.0292e-01, 8.5915e-01, 9.1769e-01, 1.7949e-01, - 8.4804e-01, 3.1152e-01, 4.2578e-01, 3.4406e-01, - 3.7329e-01, 5.8707e-01, 7.7429e-01, 7.8815e-01, - 3.2826e-01, 5.3079e-01, 3.0037e-01, 2.6109e-01, - 7.4216e-01, 4.7964e-01, 6.5762e-01, 5.6755e-01, - 7.6597e-01, 7.2324e-01, 5.8358e-01, 2.7153e-01, - 3.5332e-01, 9.5097e-01, 7.4019e-02, 2.6906e-01, - 8.8423e-01, 1.3775e-02, 6.1518e-01, 9.2142e-01, - 8.1158e-01, 9.3027e-01, 2.8284e-01, 5.1808e-01, - 1.7706e-01, 5.8911e-01, 6.3706e-01, 1.3509e-01, - 1.8796e-01, 8.3253e-01, 5.2281e-01, 5.8225e-01, - 3.1314e-01, 7.5077e-01, 9.2693e-01, 8.8607e-01, - 1.0153e-01, 3.3197e-01, 2.4983e-01, 7.3707e-01, - 9.3066e-01, 5.5808e-01, 5.9277e-01, 5.6892e-01, - 8.4811e-01, 7.3016e-01, 3.2158e-01, 9.9875e-01, - 8.9341e-01, 6.5226e-01, 2.8142e-02, 3.8748e-01, - 9.9689e-01, 8.4440e-01, 1.3415e-01, 9.1314e-01, - 8.2779e-01, 8.8866e-01, 5.2771e-01, 1.6774e-01, - 1.0001e-01, 3.2182e-02, 7.0240e-01, 4.9489e-01, - 7.3331e-01, 7.6800e-01, 3.9459e-01, 1.2124e-01, - 9.6981e-01, 2.4451e-02, 9.5581e-01, 5.6013e-01, - 8.6836e-01, 3.6457e-01, 6.6173e-01, 4.3691e-01, - 5.0752e-01, 8.9140e-01, 6.2725e-01, 5.1154e-01, - 3.8584e-01, 8.8067e-01, 8.0378e-01, 4.3013e-01, - 1.0393e-01, 3.7228e-01, 6.6680e-01, 6.1334e-01, - 1.5971e-01, 3.5533e-01, 1.8241e-01, 8.3902e-01, - 2.0770e-01, 1.5535e-02, 9.2843e-03, 2.3731e-01, - 3.3058e-01, 4.8184e-01, 1.8423e-01, 4.4116e-01, - 9.3384e-01, 5.0193e-01, 1.6866e-02, 3.8718e-01, - 7.5768e-01, 9.4654e-02, 5.3538e-02, 1.5641e-01, - 4.4035e-02, 9.9726e-01, 4.2594e-01, 5.1388e-01, - 9.1867e-01, 4.4814e-01, 9.5295e-02, 9.4138e-01, - 9.5373e-01, 2.1289e-01, 8.8704e-01, 6.7959e-01, - 5.8117e-01, 8.1876e-01, 3.1571e-01, 9.8089e-01, - 8.9793e-01, 7.2910e-01, 9.9414e-01, 1.1571e-01, - 3.1824e-01, 5.3603e-01, 3.2715e-01, 3.7889e-01, - 7.8753e-01, 2.6621e-01, 4.3075e-01, 4.7258e-01, - 6.2817e-01, 7.7791e-01, 4.8235e-01, 2.3192e-01, - 6.2132e-01, 9.1709e-01, 6.7642e-02, 9.3561e-01, - 5.6446e-01, 9.8651e-01, 5.1759e-01, 9.8335e-01, - 2.3037e-01, 1.9742e-01, 2.0549e-01, 1.4721e-01, - 2.3052e-02, 2.7569e-01, 4.0358e-02, 3.5939e-01, - 1.0842e-01, 7.4946e-01, 8.5014e-01, 5.5189e-01, - 8.4503e-01, 4.6246e-03, 1.6066e-01, 7.2416e-01, - 9.9480e-01, 1.8422e-01, 9.4831e-01, 3.3529e-01, - 9.4494e-01, 3.4365e-01, 2.6628e-01, 5.3290e-01, - 9.3233e-02, 7.2194e-01, 7.6637e-01, 8.0021e-02, - 7.3160e-01, 8.3532e-01, 9.7215e-01, 4.4904e-01, - 7.9254e-02, 8.7417e-01, 9.8399e-01, 8.1783e-01, - 1.4564e-01, 9.6394e-01, 6.3995e-01, 7.3142e-01, - 1.1885e-01, 1.1676e-01, 4.8026e-01, 6.7704e-01, - 2.6034e-01, 6.0302e-01, 6.0501e-01, 5.6108e-01, - 6.2763e-01, 9.5279e-01, 6.6563e-01, 5.9835e-01, - 9.4274e-01, 4.2874e-01, 3.1187e-01, 1.7232e-02, - 9.2523e-01, 7.2337e-01, 2.3001e-01, 4.8959e-01, - 7.4727e-01, 9.9497e-01, 1.6681e-01, 3.5589e-01, - 1.4686e-01, 8.6831e-01, 3.7435e-01, 4.6312e-01, - 7.3891e-01, 3.6720e-01, 8.0284e-01, 7.7236e-01, - 6.5776e-02, 5.3563e-01, 2.2124e-01, 4.9381e-01, - 2.6533e-01, 2.8308e-01, 4.5876e-01, 9.9927e-01, - 5.7074e-01, 4.9419e-02, 5.5013e-02, 5.7653e-01, - 2.9803e-01, 1.5908e-01, 5.9323e-01, 1.2686e-01, - 3.1267e-01, 5.6400e-01, 8.9399e-02, 9.9575e-01, - 4.0021e-01, 8.5003e-01, 3.5400e-01, 9.8075e-01, - 9.3946e-01, 2.1345e-01, 2.9200e-01, 2.7106e-01, - 7.8208e-01, 1.2337e-01, 7.6499e-01, 8.8815e-01, - 9.4963e-01, 6.5762e-02, 1.2987e-01, 8.8306e-02, - 2.9500e-01, 3.0178e-01, 1.7011e-01, 2.0707e-01, - 1.6883e-01, 5.3873e-01, 5.3918e-01, 2.6079e-01, - 2.0780e-01, 1.8459e-01, 6.2376e-01, 6.7532e-01, - 3.6186e-01, 4.6222e-01, 4.5177e-01, 3.0205e-01, - 7.3128e-01, 5.9687e-01, 2.2776e-01, 9.4918e-01, - 9.3331e-01, 5.5633e-01, 6.6717e-01, 7.7574e-01, - 2.2907e-02, 5.4869e-01, 8.5278e-01, 7.7307e-01, - 3.3436e-01, 3.7913e-01, 5.0240e-01, 7.5275e-01, - 5.3813e-01, 8.2946e-02, 3.7859e-01, 7.9385e-01, - 9.1423e-01, 2.2822e-01, 4.3641e-01, 3.0321e-01, - 6.1523e-01, 9.1439e-01, 2.7838e-01, 6.8154e-01, - 6.0011e-01, 3.7820e-01, 1.9577e-01, 2.8305e-02, - 1.5139e-01, 2.8758e-01, 5.7909e-01, 7.1980e-01, - 5.5532e-01, 8.9573e-01, 2.4678e-01, 4.3668e-01, - 9.2720e-01, 5.7365e-01, 5.9780e-01, 8.8441e-01, - 2.1912e-01, 2.6757e-01, 8.0682e-01, 6.0861e-01, - 1.9053e-02, 6.9830e-01, 7.0238e-01, 1.8043e-03, - 6.9298e-01, 3.2843e-01, 4.4586e-01, 1.7567e-02, - 6.2070e-01, 8.5091e-01, 8.3733e-01, 9.4375e-02, - 1.1168e-01, 7.8741e-01, 7.8798e-01, 3.2198e-01, - 5.5968e-01, 9.5879e-01, 4.7578e-01, 3.4492e-02, - 1.8563e-01, 1.9669e-01, 5.0613e-01, 3.5203e-02, - 2.0767e-01, 4.9352e-01, 7.2210e-01, 2.1544e-02, - 9.4704e-01, 7.1352e-01, 4.4836e-01, 7.2969e-01, - 7.7148e-01, 1.2084e-01, 1.7120e-02, 8.4079e-01, - 6.1026e-01, 3.7129e-01, 2.4231e-01, 1.0633e-01, - 7.3717e-01, 9.7235e-02, 5.1810e-01, 5.1054e-01, - 6.1998e-01, 4.0900e-01, 6.3778e-01, 6.8169e-01, - 2.6785e-01, 9.8809e-01, 4.0731e-01, 4.2571e-01, - 4.7580e-01, 4.7162e-01, 3.2020e-01, 3.8089e-01, - 9.7933e-01, 9.2454e-01, 1.2763e-01, 7.7816e-01, - 6.8461e-01, 5.9559e-01, 9.8970e-01, 6.8703e-01, - 3.9614e-01, 6.8764e-01, 3.0704e-01, 6.3408e-01, - 7.2067e-01, 7.3043e-01, 5.8166e-01, 8.5199e-01, - 9.9214e-01, 3.2601e-01, 2.8523e-01, 5.2466e-01, - 1.6880e-01, 7.0266e-01, 8.1714e-02, 9.9022e-01, - 5.1510e-01, 2.3188e-01, 7.0673e-01, 9.3670e-01, - 6.8927e-01, 7.4791e-01, 4.1159e-01, 9.9218e-01, - 3.9861e-01, 5.8315e-01, 3.8248e-01, 9.6070e-01, - 5.2728e-01, 6.8154e-01, 9.6611e-01, 2.6567e-01, - 8.7328e-01, 1.6906e-01, 3.1671e-01, 5.9926e-01, - 9.2459e-01, 4.7512e-01, 5.5365e-01, 5.5367e-01, - 9.7200e-01, 7.8079e-01, 5.0291e-01, 9.2394e-02, - 2.9389e-01, 8.6800e-01, 3.2031e-01, 6.3648e-01, - 9.9521e-01, 1.0258e-01, 5.1779e-01, 6.8992e-01, - 2.1659e-01, 1.7651e-01, 2.3642e-02, 8.4533e-01, - 8.5897e-01, 7.7948e-01, 3.0726e-01, 4.5709e-02, - 5.5568e-01, 2.7976e-01, 6.9800e-01, 2.2745e-01, - 2.7651e-01, 9.0869e-01, 7.2947e-01, 7.3732e-01, - 8.3152e-01, 7.2021e-01, 3.0614e-01, 8.7317e-01, - 5.3468e-01, 8.3123e-01, 3.4375e-01, 4.0532e-02, - 8.3949e-02, 5.0905e-04, 4.0538e-02, 1.2199e-01, - 6.5995e-01, 6.7796e-02, 3.2800e-01, 8.6735e-01, - 4.7209e-01, 9.5358e-02, 1.6026e-01, 7.7149e-01, - 6.2073e-02, 4.7393e-01, 8.5895e-01, 9.7965e-01, - 8.6001e-01, 4.9042e-01, 7.9295e-01, 7.8847e-01, - 5.5936e-01, 3.4604e-01, 6.7556e-01, 6.1583e-01, - 6.4772e-01, 4.0602e-01, 2.8032e-01, 3.9837e-01, - 4.8167e-01, 9.2784e-01, 3.1825e-01, 6.8211e-01, - 7.1275e-01, 6.8629e-01, 5.6396e-01, 8.5622e-01, - 1.9037e-01, 1.9047e-01, 3.2621e-01, 2.3798e-01, - 1.1718e-02, 7.3091e-01, 4.3965e-01, 5.5108e-01, - 5.9347e-01, 3.6512e-02, 8.1292e-01, 6.9857e-01, - 3.0565e-01, 2.6693e-02, 4.0556e-01, 5.1861e-01, - 6.9724e-01, 3.5374e-01, 7.8549e-01, 9.2104e-01, - 5.4024e-01, 6.2665e-01, 5.9591e-01, 3.4322e-01, - 8.2211e-02, 8.2777e-01, 5.0700e-01, 1.2595e-01, - 6.6960e-01, 3.1515e-01, 1.4555e-02, 3.8011e-01, - 8.1554e-01, 7.0168e-01, 7.1629e-01, 7.0360e-01, - 5.8561e-01, 6.1912e-01, 7.9669e-01, 8.7766e-01, - 8.2578e-01, 7.0533e-01, 4.9176e-01, 2.1029e-01, - 7.9780e-01, 7.8577e-02, 6.7550e-01, 9.6215e-02, - 3.7034e-01, 8.4600e-01, 2.1426e-01, 6.8059e-01, - 8.9566e-01, 2.0948e-01, 2.5723e-01, 2.1477e-01, - 7.0287e-01, 8.8856e-01, 6.8210e-01, 2.1374e-01, - 5.9763e-01, 5.0325e-01, 9.7355e-01, 2.5234e-01, - 1.4671e-01, 7.4961e-01, 5.0105e-01, 4.1481e-01, - 3.3586e-01, 3.2885e-01, 4.4906e-01, 9.4966e-01, - 9.7029e-01, 3.6201e-01, 9.9074e-01, 5.6940e-01, - 5.3932e-01, 5.4720e-02, 8.8651e-01, 8.6641e-01, - 6.7084e-01, 6.3171e-01, 1.7082e-02, 9.7000e-01, - 6.0249e-01, 8.5227e-02, 5.1565e-01, 2.8812e-01, - 7.6133e-01, 8.8965e-01, 4.3881e-01, 8.9089e-02, - 2.0705e-02, 6.0003e-01, 6.0058e-01, 2.3583e-01, - 7.0114e-01, 4.6051e-01, 9.3395e-01, 9.1531e-01, - 9.8035e-01, 8.6110e-01, 5.0684e-01, 1.4922e-02, - 3.5417e-01, 6.3192e-01, 6.2847e-01, 2.1922e-01, - 5.2679e-01, 5.0140e-01, 4.1193e-01, 9.3722e-01, - 7.2416e-01, 8.2701e-01, 5.8773e-01, 4.5892e-01, - 8.6080e-01, 7.0796e-01, 7.3090e-01, 2.3398e-01, - 8.2336e-01, 8.1931e-02, 5.6648e-01, 9.2281e-01, - 5.8624e-01, 8.3555e-01, 9.2999e-01, 8.9524e-01, - 9.7694e-01, 1.0057e-01, 9.1836e-01, 1.8716e-01, - 9.1811e-01, 2.5747e-02, 5.2811e-01, 5.9349e-01, - 3.1233e-01, 5.9616e-01, 9.2941e-01, 9.2230e-01, - 7.5079e-01, 2.7152e-01, 6.8334e-01, 1.8560e-01, - 1.9301e-01, 7.6648e-01, 7.1679e-01, 9.8760e-01, - 8.5633e-01, 5.0996e-01, 8.6064e-01, 6.3841e-01, - 6.3329e-01, 2.8584e-02, 4.5359e-01, 6.6242e-01, - 6.9051e-01, 1.3944e-02, 2.4069e-01, 9.0102e-01, - 4.0050e-01, 1.3846e-01, 6.7660e-02, 9.8925e-01, - 6.0724e-01, 2.5858e-01, 1.3118e-01, 6.4780e-02, - 5.2943e-03, 8.5863e-01, 1.0742e-01, 8.2977e-01, - 2.6180e-01, 4.9962e-03, 3.6669e-01, 3.7121e-01, - 9.4605e-01, 9.0263e-01, 3.6751e-01, 2.8743e-01, - 5.2275e-01, 9.3565e-01, 3.4542e-02, 8.3878e-01, - 9.8396e-01, 5.7945e-01, 8.3609e-01, 2.4747e-01, - 5.0297e-01, 6.4252e-01, 4.6862e-01, 1.5146e-01, - 6.8402e-01, 7.2437e-01, 2.6435e-02, 6.6999e-01, - 8.2226e-01, 8.3864e-01, 4.0283e-01, 7.2857e-01, - 8.9768e-02, 8.3236e-01, 5.2385e-01, 2.0285e-01, - 9.6598e-01, 2.8956e-01, 6.8980e-01, 8.3998e-01, - 2.5672e-01, 6.8220e-01, 5.4447e-01, 9.8634e-01, - 4.3737e-01, 4.8044e-02, 6.7611e-01, 9.4603e-01]), + col_indices=tensor([ 992, 6197, 9017, 9363, 9552, 813, 7391, 7914, 9771, + 3659, 3074, 9783, 1812, 3296, 6743, 6738, 702, 3841, + 3424, 7401, 8317, 3607, 6860, 362, 9639, 2551, 6043, + 7518, 3032, 3130, 3636, 7660, 8439, 4568, 5092, 2436, + 3187, 8837, 3544, 4899, 7429, 2524, 4785, 2134, 2572, + 6350, 9886, 256, 2298, 8028, 70, 8411, 4974, 7198, + 862, 3332, 6593, 8182, 7945, 9825, 6160, 4988, 2031, + 6068, 6614, 9084, 2004, 1721, 3147, 3182, 3118, 5709, + 8746, 4710, 9620, 9376, 140, 8642, 3065, 3887, 1723, + 6936, 4731, 2055, 6480, 1766, 8773, 4579, 5639, 2215, + 5779, 5381, 4350, 7435, 8765, 6377, 799, 8953, 5360, + 9549, 3017, 8743, 487, 1241, 3723, 8122, 2611, 4227, + 6659, 2898, 5722, 11, 7624, 8169, 4785, 7800, 4631, + 3130, 4863, 9634, 5871, 800, 3594, 8741, 4205, 3101, + 403, 2763, 6841, 5270, 2637, 4779, 7008, 3042, 8955, + 2132, 3726, 4189, 3779, 5125, 687, 8127, 1804, 800, + 4029, 5445, 9633, 4529, 3236, 8927, 7147, 154, 8438, + 5636, 8527, 2578, 3718, 5040, 103, 1957, 7063, 2451, + 2886, 8775, 5823, 4409, 7988, 4492, 8452, 3536, 6016, + 1726, 2580, 5789, 8639, 54, 8903, 8623, 9065, 418, + 1054, 6406, 6818, 9993, 9619, 1884, 7180, 8499, 5628, + 4305, 3227, 510, 9047, 1678, 8341, 5598, 4299, 1692, + 8734, 8635, 2110, 8248, 9893, 1356, 6575, 7421, 6363, + 4297, 3484, 1169, 1241, 1425, 4649, 5883, 6180, 8669, + 4201, 788, 1097, 4297, 6034, 8866, 2786, 645, 1246, + 167, 6015, 4888, 6451, 9244, 1083, 9764, 2561, 5089, + 8184, 4035, 1167, 3589, 3443, 7780, 2766, 3560, 6969, + 8333, 8748, 9457, 3481, 2796, 4766, 3738, 4973, 1369, + 9893, 5697, 1546, 5501, 8115, 5773, 2637, 7430, 7145, + 9082, 312, 559, 9552, 9655, 1249, 329, 3273, 2053, + 2825, 1961, 5469, 2719, 2587, 9942, 8019, 2559, 124, + 6716, 2095, 5210, 2684, 8843, 6904, 8672, 3894, 1387, + 9061, 9272, 6124, 2456, 8493, 1887, 9266, 3449, 7919, + 6211, 5260, 5307, 416, 9086, 9227, 2089, 7248, 8083, + 736, 7718, 2907, 6348, 379, 6932, 4292, 5415, 9040, + 6461, 7684, 161, 2395, 2926, 8759, 4212, 7577, 2170, + 2842, 7996, 7763, 7275, 6393, 6387, 7037, 6038, 3200, + 9689, 5174, 7963, 922, 2697, 4385, 7305, 2388, 2882, + 7902, 8391, 9072, 465, 842, 8275, 3927, 402, 6723, + 4395, 7064, 1153, 6667, 4093, 515, 9382, 8872, 80, + 6685, 107, 2838, 695, 5247, 6095, 4724, 4280, 7476, + 2216, 2653, 7304, 8752, 7937, 4352, 5517, 6003, 2890, + 5868, 3874, 1071, 5868, 504, 8200, 5663, 2083, 8059, + 5885, 2704, 6554, 3795, 8168, 5707, 1435, 5436, 547, + 4674, 6558, 8024, 9883, 2937, 8276, 708, 3652, 9117, + 6415, 1746, 4168, 7306, 1890, 7445, 8049, 4043, 6422, + 8028, 4753, 279, 552, 1928, 7780, 3761, 9126, 5063, + 8243, 8819, 9608, 8907, 7215, 3984, 3222, 232, 6482, + 5518, 1304, 2970, 6697, 8420, 6280, 3556, 2455, 4432, + 7836, 2142, 8294, 4706, 2188, 3414, 4993, 2971, 749, + 8344, 4214, 7934, 8210, 701, 4237, 5917, 8563, 1985, + 2163, 1461, 3724, 4049, 4376, 5458, 6579, 1327, 2486, + 8667, 105, 2470, 425, 8314, 425, 9772, 4139, 6257, + 1691, 5611, 9904, 7172, 4768, 7051, 311, 7563, 2588, + 4556, 1076, 5750, 9387, 6607, 3505, 7299, 5162, 3677, + 6232, 3623, 8118, 4209, 6379, 2137, 7114, 2750, 727, + 1150, 9020, 6095, 8781, 964, 3401, 4498, 8869, 8433, + 4064, 657, 9799, 1100, 900, 968, 103, 252, 4771, + 8818, 5131, 9850, 7676, 9736, 7043, 6168, 1582, 4069, + 6126, 658, 3236, 5817, 4964, 9455, 1229, 1018, 2768, + 8936, 2667, 5314, 9838, 2738, 7500, 5606, 5093, 8502, + 6298, 1310, 4420, 1366, 445, 8350, 1331, 9081, 6878, + 6716, 5203, 9542, 600, 2635, 5221, 473, 3212, 9608, + 6118, 6306, 2169, 4202, 1311, 65, 4991, 1280, 9130, + 4214, 4841, 4766, 9189, 9111, 831, 6351, 3710, 7914, + 8990, 6031, 2495, 4023, 664, 4119, 8829, 3665, 7182, + 834, 5668, 3157, 6886, 7474, 7456, 7457, 5700, 5968, + 8752, 8280, 3581, 3363, 1028, 7420, 7605, 8189, 121, + 6923, 5205, 8333, 1027, 9553, 996, 9473, 6716, 3807, + 6199, 1329, 9080, 5266, 9046, 7133, 2644, 4177, 1736, + 5969, 6489, 8323, 8156, 4535, 3154, 6753, 2311, 1301, + 9560, 5364, 1311, 5827, 817, 1581, 9851, 6340, 4555, + 4149, 2665, 4030, 4378, 9486, 2986, 7488, 8663, 1098, + 8643, 9528, 5753, 3076, 1923, 6655, 4701, 2564, 2759, + 2614, 8312, 5369, 8644, 4278, 1089, 5880, 7934, 5654, + 9290, 529, 8307, 6094, 8613, 9753, 1317, 8719, 190, + 7897, 5396, 3284, 1788, 5467, 3357, 9704, 5015, 9457, + 9250, 4832, 8383, 5572, 6280, 1996, 88, 956, 9998, + 1522, 9106, 5381, 3536, 8254, 7345, 5671, 8230, 5557, + 7077, 4918, 3339, 425, 2028, 3163, 2588, 2424, 8060, + 4231, 8290, 1425, 1292, 1812, 7720, 2121, 440, 4305, + 3592, 3508, 6973, 1918, 3252, 1660, 7498, 5382, 4944, + 5506, 9328, 4854, 3012, 7765, 3368, 8118, 2049, 2553, + 9755, 5902, 6417, 1245, 6475, 5125, 6931, 355, 9409, + 765, 4975, 1840, 7347, 9096, 357, 2097, 7182, 9205, + 7495, 5566, 9152, 8183, 4955, 5578, 7022, 5514, 5444, + 6267, 8614, 5088, 3022, 6603, 9398, 7901, 9286, 3676, + 22, 1556, 106, 2739, 2806, 5294, 1354, 758, 8713, + 7422, 9061, 3417, 6963, 6041, 7734, 6184, 3868, 9643, + 5902, 6482, 1297, 4950, 9725, 9931, 7629, 8271, 9486, + 27, 9302, 9426, 8838, 7517, 2974, 1849, 2435, 6414, + 9829, 5071, 1041, 7434, 2451, 9172, 1803, 6122, 7707, + 6607, 6135, 3951, 3504, 6516, 4155, 1980, 6024, 8612, + 5327, 6044, 2155, 1648, 2058, 6766, 6976, 7736, 467, + 6836, 6292, 7090, 356, 9671, 9450, 5630, 9326, 3917, + 2056, 2251, 5784, 7410, 5124, 2541, 5740, 6770, 416, + 5387, 4977, 7567, 7754, 3358, 3577, 5938, 8258, 7308, + 6329, 4625, 8734, 2615, 2718, 5600, 116, 3518, 1951, + 5520, 8934, 148, 8573, 4159, 3406, 6214, 3760, 6900, + 1693, 2034, 3016, 9088, 8300, 9059, 2264, 3846, 3681, + 4154, 8527, 1029, 1780, 2482, 6046, 4466, 99, 4992, + 2888, 7277, 8577, 3498, 7469, 3793, 8496, 7986, 601, + 3094, 7023, 7883, 7345, 1271, 9517, 1909, 7362, 9121, + 1135, 7623, 2399, 4704, 7096, 5341, 4961, 5464, 2648, + 4405, 2528, 6055, 2121, 4870, 9037, 1532, 2739, 7180, + 3297, 9018, 4479, 7484, 2676, 2125, 5337, 1610, 4725, + 3239, 3524, 1732, 4592, 9536, 2480, 9753, 9282, 5967, + 7240, 8320, 8032, 3929, 2959, 4292, 8204, 2208, 6636, + 1874, 3136, 5027, 385, 4087, 9579, 4067, 3660, 1477, + 4241, 5719, 9858, 422, 1706, 8163, 1620, 4996, 2023, + 1900, 6432, 6172, 184, 1375, 9699, 1668, 252, 7117, + 5147]), + values=tensor([4.3537e-01, 3.9981e-01, 3.9884e-01, 6.8960e-01, + 4.8973e-01, 1.4335e-01, 5.3666e-01, 3.0947e-01, + 5.1729e-01, 5.7026e-01, 7.4978e-01, 2.6557e-01, + 2.1578e-01, 9.6159e-01, 3.3602e-02, 1.9291e-01, + 3.4528e-01, 7.2294e-01, 8.8444e-01, 7.8295e-01, + 2.1309e-01, 6.4920e-01, 1.7645e-01, 1.5566e-01, + 6.3153e-01, 3.7064e-01, 6.8757e-01, 1.3889e-01, + 8.4305e-01, 2.6216e-01, 4.0551e-01, 7.0589e-01, + 9.3137e-01, 4.2583e-01, 4.6336e-01, 2.6140e-01, + 7.3049e-01, 6.7786e-01, 1.8448e-01, 1.6195e-01, + 2.9220e-01, 3.2207e-01, 1.0179e-01, 8.4428e-01, + 5.1327e-01, 2.5118e-01, 4.7234e-01, 9.2641e-01, + 9.1870e-01, 7.5642e-01, 6.0164e-01, 8.1116e-01, + 4.0225e-01, 6.1931e-01, 9.5682e-01, 7.2107e-01, + 3.5447e-01, 8.2000e-01, 6.7449e-01, 7.6617e-01, + 6.7468e-01, 3.9826e-02, 1.8946e-01, 8.3726e-02, + 8.4893e-01, 5.4004e-01, 5.3572e-03, 1.5513e-01, + 3.2889e-01, 6.7577e-01, 4.1663e-01, 1.3433e-01, + 2.1308e-01, 4.0621e-01, 1.7773e-01, 4.6669e-01, + 7.0394e-01, 9.5283e-01, 9.6965e-01, 3.5330e-01, + 7.8285e-01, 9.6208e-01, 8.1898e-01, 3.8443e-01, + 7.3563e-01, 1.5837e-01, 4.3445e-01, 2.2030e-01, + 1.1584e-01, 2.8973e-01, 4.9374e-01, 1.8947e-01, + 2.0179e-01, 4.4464e-01, 2.9576e-02, 8.4870e-01, + 9.1479e-01, 1.2898e-01, 3.7882e-01, 9.1046e-01, + 9.8325e-01, 2.1736e-01, 8.3977e-01, 2.4636e-02, + 5.9983e-01, 4.8508e-01, 1.0133e-01, 9.2427e-01, + 3.8742e-01, 9.1992e-01, 2.1236e-01, 3.7146e-02, + 4.4093e-01, 1.4010e-01, 5.5600e-01, 9.8858e-01, + 3.9218e-01, 5.6724e-01, 4.2599e-01, 5.8652e-01, + 9.5954e-01, 2.4908e-01, 6.9651e-01, 1.5425e-01, + 3.0458e-01, 9.4539e-01, 2.6284e-01, 1.7345e-02, + 7.5959e-01, 8.9099e-01, 3.1513e-01, 4.9067e-01, + 2.5794e-01, 8.9944e-01, 6.1980e-02, 9.2183e-01, + 4.6175e-01, 9.7935e-01, 1.7864e-01, 6.2861e-01, + 1.1111e-01, 1.0233e-01, 2.3174e-02, 3.6532e-01, + 7.6410e-01, 9.0695e-01, 3.9577e-01, 6.8919e-01, + 9.8700e-01, 3.1611e-01, 8.0436e-02, 1.9359e-01, + 5.1339e-01, 5.3702e-01, 7.5160e-01, 8.1859e-01, + 5.0804e-02, 3.9069e-01, 2.6045e-01, 7.6514e-01, + 2.5050e-01, 9.1500e-01, 5.6797e-01, 5.5091e-01, + 4.2969e-01, 4.9864e-01, 8.4568e-01, 8.8357e-01, + 2.4968e-01, 7.3277e-02, 2.8443e-01, 7.5808e-01, + 9.4787e-01, 7.9605e-01, 8.9312e-01, 9.9357e-01, + 9.0836e-01, 3.4784e-01, 7.5111e-01, 7.1483e-01, + 2.9800e-01, 1.6662e-01, 7.0094e-01, 3.6745e-01, + 1.2262e-01, 7.3403e-01, 9.2937e-01, 3.8392e-01, + 5.4464e-01, 5.1342e-01, 8.9046e-01, 9.8258e-01, + 5.2964e-01, 6.4014e-01, 8.1913e-01, 6.2432e-01, + 9.6651e-01, 4.7961e-01, 5.0049e-01, 7.8570e-01, + 1.7991e-01, 3.4938e-01, 2.3127e-01, 7.4863e-01, + 2.9275e-01, 6.4424e-04, 4.4849e-01, 5.8097e-01, + 7.3619e-01, 7.2211e-01, 2.5817e-01, 4.3577e-01, + 7.5637e-01, 2.9790e-01, 5.4852e-01, 8.1898e-02, + 4.6703e-01, 9.8959e-01, 5.2950e-01, 6.1777e-01, + 1.1500e-01, 7.0529e-01, 3.6459e-01, 9.7895e-01, + 3.2305e-01, 2.5527e-01, 8.7675e-01, 1.0971e-01, + 5.0946e-01, 3.9227e-01, 9.7367e-01, 6.3673e-01, + 9.5168e-02, 7.2298e-01, 1.3574e-01, 5.7008e-01, + 9.6100e-01, 5.5062e-01, 8.5493e-01, 2.6996e-01, + 7.2852e-01, 5.0724e-03, 7.0486e-01, 3.7174e-01, + 4.6905e-01, 1.6221e-01, 6.1008e-01, 1.7684e-01, + 7.1183e-01, 5.0400e-01, 2.9966e-01, 7.0806e-01, + 1.9128e-01, 1.0265e-01, 2.0013e-01, 9.0258e-01, + 4.2371e-01, 3.9347e-01, 3.4903e-01, 4.3909e-01, + 7.1615e-01, 8.3353e-02, 8.2383e-01, 1.3046e-01, + 1.6372e-02, 5.4070e-01, 8.6800e-01, 5.0743e-01, + 9.6013e-01, 3.3358e-01, 4.5913e-01, 6.2492e-01, + 1.7392e-01, 8.0187e-01, 9.5935e-01, 8.0293e-01, + 7.3448e-01, 9.6838e-01, 4.6566e-01, 7.5415e-01, + 1.7773e-01, 4.6671e-01, 1.9277e-01, 1.9528e-01, + 3.7586e-01, 9.7197e-01, 7.1717e-01, 7.5047e-01, + 2.4187e-01, 1.9655e-01, 9.9432e-01, 9.7618e-01, + 2.8431e-01, 6.9296e-01, 1.9736e-01, 9.6645e-01, + 1.6096e-02, 3.4791e-01, 4.1191e-01, 2.0523e-01, + 6.2665e-01, 4.2102e-01, 2.3025e-02, 1.6970e-01, + 9.9316e-01, 7.6355e-01, 2.0605e-02, 1.2488e-01, + 4.0790e-01, 5.6708e-01, 8.2480e-01, 7.9370e-01, + 9.2119e-01, 8.2742e-01, 7.5665e-01, 2.5768e-01, + 4.3114e-01, 6.6583e-01, 7.7116e-01, 7.6309e-01, + 7.1228e-01, 8.7464e-01, 8.7019e-01, 9.3966e-01, + 4.2135e-01, 5.7594e-01, 6.2870e-01, 9.1797e-01, + 9.6945e-01, 7.2933e-01, 8.8178e-01, 1.8302e-01, + 2.9607e-01, 4.6905e-02, 8.6133e-01, 6.0869e-02, + 1.5112e-02, 3.4687e-01, 5.7238e-01, 2.9485e-01, + 9.3423e-01, 7.1957e-01, 1.0934e-01, 7.9192e-01, + 6.5331e-01, 4.3923e-01, 6.0516e-01, 5.7033e-01, + 6.0523e-01, 8.7734e-01, 9.9250e-01, 4.1305e-01, + 7.8645e-01, 5.1140e-01, 4.3184e-01, 4.2612e-01, + 2.3541e-01, 6.0161e-01, 2.3674e-01, 6.7388e-01, + 8.0508e-01, 6.7986e-01, 8.2585e-01, 8.4326e-01, + 5.9880e-01, 2.6417e-01, 2.6762e-01, 1.7359e-01, + 3.0395e-01, 6.5434e-02, 8.1628e-01, 9.8237e-01, + 3.0332e-01, 8.1081e-01, 9.1721e-01, 5.4623e-04, + 7.0012e-01, 5.1769e-01, 6.0702e-01, 3.4591e-01, + 2.2090e-01, 4.2492e-01, 7.1125e-01, 9.1429e-03, + 3.3527e-01, 8.6846e-01, 6.7073e-01, 7.9698e-01, + 5.3968e-01, 7.8654e-01, 4.4203e-03, 3.3520e-01, + 3.0764e-01, 7.6575e-01, 2.1197e-02, 5.3096e-01, + 3.9667e-01, 7.2879e-03, 2.9349e-01, 4.5592e-01, + 1.5381e-01, 9.7142e-01, 6.6952e-01, 3.8729e-01, + 8.1140e-01, 6.7995e-01, 7.1422e-01, 2.4486e-01, + 7.5636e-01, 2.3310e-01, 9.5289e-01, 4.8022e-01, + 8.5813e-01, 5.9732e-01, 5.3636e-01, 3.9112e-01, + 1.2996e-01, 3.2122e-01, 4.8399e-01, 2.6184e-01, + 2.1104e-01, 4.9276e-01, 3.8299e-01, 7.5953e-01, + 3.4044e-01, 3.1825e-02, 1.5820e-01, 8.7087e-01, + 4.3387e-02, 6.2383e-01, 4.5976e-01, 4.9470e-01, + 1.0368e-01, 9.6785e-01, 7.1734e-01, 2.4049e-01, + 8.7258e-02, 7.1240e-01, 8.6015e-01, 1.8576e-01, + 5.0698e-01, 6.5363e-01, 3.8025e-01, 8.7343e-01, + 5.1193e-01, 5.3928e-01, 1.8814e-01, 2.6343e-02, + 7.7821e-01, 1.7044e-01, 4.0618e-02, 4.1934e-01, + 6.3190e-01, 9.4536e-01, 6.1824e-01, 9.8729e-01, + 2.7125e-01, 2.6378e-03, 1.0222e-01, 2.1086e-01, + 6.0947e-01, 3.0901e-01, 5.5343e-01, 8.9244e-01, + 8.2691e-01, 3.0989e-01, 8.3851e-02, 8.8393e-01, + 7.4166e-01, 8.8516e-01, 7.3579e-01, 2.6421e-01, + 3.2478e-01, 9.1496e-02, 8.1097e-01, 4.7135e-01, + 6.3379e-01, 3.0775e-01, 2.7515e-01, 5.6137e-01, + 3.0909e-02, 5.1584e-01, 7.5795e-01, 2.7899e-01, + 8.0581e-01, 6.4873e-01, 7.2416e-01, 7.1340e-01, + 2.0196e-01, 3.9555e-01, 2.0446e-01, 2.4934e-01, + 1.5430e-01, 7.4440e-01, 8.0057e-01, 6.0388e-01, + 7.1343e-01, 3.5035e-01, 4.2040e-01, 8.6602e-01, + 9.4478e-02, 5.4928e-01, 4.0094e-01, 7.7335e-01, + 3.0423e-01, 1.3574e-02, 2.3620e-01, 7.1720e-01, + 9.1173e-01, 2.1903e-01, 8.9596e-01, 4.5128e-01, + 3.5343e-01, 2.1663e-01, 6.2211e-01, 4.7255e-01, + 3.7282e-01, 6.8342e-01, 9.0827e-01, 4.2800e-01, + 1.9686e-01, 3.0599e-01, 1.4444e-01, 8.8002e-01, + 4.9084e-01, 8.8924e-01, 6.3872e-01, 8.2477e-01, + 9.7932e-02, 4.9532e-01, 1.8488e-01, 2.9399e-01, + 8.4609e-01, 4.2726e-01, 7.2259e-01, 1.8113e-01, + 5.6825e-01, 6.8508e-02, 6.4229e-01, 2.9274e-01, + 2.6580e-01, 4.9086e-01, 7.3508e-01, 6.2433e-02, + 6.8630e-01, 6.1930e-01, 5.2008e-01, 2.3177e-01, + 9.0737e-01, 5.2188e-02, 8.9481e-01, 9.4065e-01, + 7.5609e-01, 5.2533e-01, 7.8363e-01, 2.1739e-01, + 7.1809e-01, 5.5801e-01, 2.6908e-01, 2.2939e-03, + 5.4688e-01, 4.5970e-01, 3.0997e-01, 1.0388e-01, + 3.7488e-03, 1.3490e-01, 7.4331e-01, 6.6803e-01, + 2.2280e-01, 8.7632e-01, 7.6913e-01, 8.4630e-01, + 6.0233e-01, 5.3270e-01, 3.2736e-01, 9.5029e-01, + 4.2112e-02, 1.4441e-01, 5.9972e-01, 6.4757e-01, + 7.4805e-01, 6.0193e-01, 1.2325e-01, 8.1393e-01, + 6.0128e-01, 9.4638e-01, 7.9005e-02, 2.7930e-01, + 5.6736e-01, 8.5472e-01, 8.2769e-01, 7.6410e-01, + 1.2565e-02, 4.7651e-01, 9.0884e-02, 4.3338e-01, + 3.5385e-01, 2.3199e-01, 8.4134e-01, 6.4207e-01, + 9.5298e-01, 9.7793e-01, 2.2580e-01, 9.9070e-01, + 6.2713e-01, 6.2690e-01, 7.4899e-01, 1.3724e-01, + 1.4670e-01, 9.2675e-01, 5.3520e-01, 1.8449e-01, + 2.0527e-01, 2.8306e-01, 3.1045e-01, 5.8558e-01, + 3.1540e-01, 1.1982e-01, 8.8444e-01, 7.3545e-01, + 4.2022e-01, 8.1509e-03, 3.1302e-01, 8.7632e-01, + 7.9319e-01, 1.7007e-01, 1.1811e-01, 8.2777e-01, + 5.1290e-02, 3.3333e-02, 3.9343e-01, 8.2624e-01, + 8.3659e-01, 9.0654e-01, 5.0174e-01, 8.3985e-01, + 3.9016e-01, 8.3937e-01, 2.7243e-01, 5.7402e-01, + 9.3932e-01, 8.4938e-01, 7.6935e-01, 1.0589e-01, + 6.0844e-01, 1.3969e-01, 4.8714e-01, 2.3100e-01, + 1.6273e-01, 1.8331e-01, 6.7573e-01, 3.2264e-01, + 4.1490e-01, 1.0348e-01, 8.0008e-01, 2.9827e-01, + 9.1387e-01, 1.0812e-01, 4.5369e-01, 4.3080e-01, + 8.4733e-01, 1.7047e-01, 7.7975e-01, 2.8628e-01, + 5.4017e-01, 2.9737e-01, 6.4224e-01, 8.3954e-01, + 4.7604e-01, 4.3814e-02, 6.9739e-01, 4.8277e-01, + 7.6740e-01, 9.8853e-01, 6.9194e-01, 3.4640e-01, + 3.0884e-01, 9.5212e-01, 3.2579e-01, 6.1841e-01, + 4.0543e-01, 9.3466e-01, 4.8328e-02, 7.5863e-01, + 9.6793e-01, 9.2495e-01, 2.8997e-01, 2.9497e-01, + 1.9820e-02, 2.5772e-01, 2.8827e-01, 8.1536e-01, + 4.1273e-01, 9.0435e-01, 3.4102e-01, 2.8340e-01, + 5.2361e-01, 8.9342e-01, 9.9789e-01, 9.4787e-01, + 4.5467e-02, 4.3271e-01, 6.5239e-01, 5.8289e-01, + 8.7290e-01, 6.7020e-01, 6.4508e-01, 4.1572e-01, + 6.3199e-01, 1.3701e-01, 4.8627e-01, 5.3563e-01, + 9.9708e-01, 3.9874e-01, 8.7427e-01, 3.5048e-01, + 5.0099e-01, 6.8373e-02, 8.5572e-02, 9.7413e-01, + 9.1182e-01, 3.6097e-01, 1.5867e-01, 1.1895e-02, + 6.6486e-01, 7.5586e-01, 3.0038e-01, 7.0781e-01, + 7.0691e-01, 9.3527e-01, 8.7719e-01, 6.8604e-01, + 5.9175e-02, 1.8325e-01, 3.0814e-01, 2.1317e-01, + 1.4056e-01, 3.1655e-02, 2.9670e-01, 3.2601e-01, + 3.5245e-01, 1.1448e-01, 7.1016e-01, 5.8561e-01, + 7.2949e-01, 4.6438e-02, 9.4722e-01, 8.1716e-01, + 8.3412e-01, 8.6405e-01, 7.0565e-01, 2.2713e-01, + 3.2024e-01, 8.6006e-01, 9.1130e-01, 7.1546e-01, + 1.1726e-01, 2.6812e-01, 7.0313e-01, 9.0525e-01, + 6.6813e-01, 6.0951e-01, 4.2406e-01, 6.6197e-01, + 3.4051e-01, 5.0594e-01, 5.6568e-01, 6.5500e-01, + 3.9133e-02, 6.8784e-02, 1.6962e-01, 1.9637e-01, + 3.6856e-01, 8.2962e-01, 7.4094e-01, 2.4617e-01, + 9.3733e-01, 3.7437e-01, 3.0715e-01, 2.8827e-02, + 9.9026e-01, 9.8403e-01, 8.8298e-01, 6.6462e-03, + 5.4973e-01, 6.3904e-01, 6.8939e-01, 7.5675e-01, + 6.4261e-01, 1.9556e-01, 3.6342e-01, 9.3446e-02, + 5.8282e-01, 9.4687e-01, 8.8520e-01, 2.8238e-01, + 7.1927e-01, 5.7538e-01, 1.2304e-01, 4.4104e-01, + 5.8638e-01, 4.4160e-01, 7.9530e-01, 4.3712e-01, + 8.3982e-01, 3.7432e-01, 4.7695e-01, 4.5874e-01, + 8.8004e-01, 7.2230e-01, 1.1009e-01, 6.5993e-01, + 9.8603e-01, 3.2649e-01, 4.1843e-01, 9.7294e-01, + 2.0848e-01, 9.6740e-01, 4.9238e-01, 6.1925e-01, + 5.1499e-01, 5.4071e-01, 9.9466e-01, 8.4870e-01, + 2.2469e-01, 7.3287e-02, 1.1009e-01, 9.8605e-01, + 1.3893e-01, 5.6160e-01, 9.1012e-01, 1.6216e-01, + 5.4134e-01, 6.9990e-02, 4.5429e-01, 8.4017e-02, + 9.5044e-02, 8.9018e-01, 4.5031e-01, 5.0410e-01, + 9.2543e-01, 5.2699e-01, 3.1440e-01, 9.0484e-01, + 3.5531e-01, 6.7819e-03, 4.4191e-01, 7.3541e-01, + 1.3631e-01, 6.0004e-01, 1.8558e-01, 5.0108e-01, + 1.6921e-01, 7.4587e-01, 5.7250e-01, 9.1504e-01, + 5.2848e-01, 4.3565e-01, 5.2381e-01, 3.2068e-01, + 4.2602e-01, 1.7899e-01, 1.6693e-01, 4.4754e-01, + 8.0900e-01, 7.0461e-01, 2.9947e-01, 2.6865e-01, + 7.7782e-01, 5.2593e-01, 3.3560e-01, 2.9390e-01, + 1.0731e-01, 5.9751e-01, 3.5259e-01, 8.0940e-01, + 5.4338e-02, 6.2392e-01, 9.8973e-01, 7.3598e-01, + 5.6690e-01, 9.7288e-01, 4.5821e-02, 6.3232e-01, + 6.6311e-01, 5.3438e-01, 1.0203e-01, 9.2045e-01, + 4.1039e-01, 6.9228e-01, 4.8738e-01, 8.0095e-01, + 8.7185e-01, 8.3497e-01, 6.7328e-02, 7.6656e-01, + 2.6182e-01, 4.5312e-01, 5.0198e-01, 2.1211e-02, + 3.8491e-01, 9.4217e-01, 5.0357e-01, 8.3401e-02, + 7.1052e-01, 4.2509e-01, 7.7683e-01, 3.7078e-01, + 5.1501e-01, 2.7444e-01, 9.1630e-01, 7.4677e-01, + 7.2155e-01, 4.0998e-01, 6.4794e-02, 1.7857e-01, + 3.9293e-01, 9.5205e-01, 9.1388e-01, 6.5620e-01, + 5.1912e-01, 1.6899e-01, 8.5856e-01, 4.4368e-01, + 1.5869e-01, 2.2263e-01, 7.5465e-01, 3.6280e-01, + 4.7400e-02, 9.5410e-01, 4.2697e-02, 6.7796e-01, + 9.8275e-01, 2.7874e-01, 2.6106e-01, 3.9901e-01, + 5.5379e-01, 9.3950e-02, 4.9186e-01, 1.1764e-03, + 7.2495e-01, 5.8979e-01, 6.2882e-01, 1.1772e-01, + 5.5400e-01, 3.5765e-01, 7.8510e-01, 3.2378e-01, + 8.8910e-01, 1.5997e-02, 5.5679e-02, 8.9152e-01, + 3.8640e-01, 3.8910e-01, 2.3145e-01, 3.6510e-02, + 9.5621e-01, 1.5088e-01, 4.7550e-02, 6.2587e-01, + 2.8134e-01, 4.9615e-01, 4.5673e-01, 6.4038e-01, + 9.8951e-01, 3.9053e-01, 7.4142e-02, 7.8090e-01, + 2.9839e-01, 2.9025e-01, 9.5796e-01, 1.8690e-01, + 8.4195e-01, 6.6466e-01, 1.0805e-01, 7.5828e-01, + 6.7259e-01, 5.1823e-01, 8.8034e-01, 7.2995e-02, + 5.6715e-01, 5.7986e-01, 1.6088e-01, 1.6596e-02, + 9.6255e-02, 5.8882e-01, 1.3704e-01, 1.8041e-02, + 7.3056e-01, 6.9725e-01, 3.9527e-01, 4.4598e-01, + 9.7556e-01, 3.1195e-01, 1.4884e-01, 1.9870e-01, + 1.2136e-01, 2.2091e-01, 6.2259e-01, 8.8417e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.7103, 0.5485, 0.7256, ..., 0.6866, 0.8523, 0.5088]) +tensor([0.1885, 0.9110, 0.8668, ..., 0.5988, 0.5354, 0.4490]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -1512,389 +1402,13 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 10.999524116516113 seconds +Time: 10.454679489135742 seconds -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([7315, 7011, 7905, 1028, 2803, 8634, 5420, 3714, 9961, - 9682, 9462, 3672, 9521, 6357, 2121, 3239, 5077, 5611, - 4819, 9590, 9566, 7110, 9282, 3205, 6562, 8535, 6101, - 4471, 8275, 3563, 3283, 2844, 9366, 4926, 9577, 7356, - 8518, 1230, 10, 3109, 6967, 7024, 3566, 6230, 1306, - 5778, 1783, 7611, 4767, 1036, 2386, 1905, 3222, 7598, - 3813, 6094, 6353, 9093, 5396, 1174, 7424, 6062, 4513, - 177, 8866, 7252, 2860, 4744, 8855, 2227, 299, 9342, - 3509, 1775, 3656, 5550, 9595, 6991, 8012, 9812, 5920, - 3934, 6803, 5774, 7689, 674, 5602, 3014, 6143, 7099, - 663, 4281, 4779, 9464, 8707, 8638, 8538, 5514, 6658, - 4407, 5833, 3387, 3279, 4896, 4259, 2176, 8287, 8834, - 3999, 3877, 1161, 9724, 9738, 238, 3075, 5186, 7486, - 891, 9045, 5190, 5381, 5459, 4110, 1402, 6321, 6193, - 9155, 9992, 7314, 291, 2462, 2852, 6279, 222, 309, - 7313, 8459, 3425, 6366, 652, 7078, 5904, 156, 5860, - 2945, 3690, 8593, 4252, 7597, 5596, 5776, 7114, 5766, - 611, 9442, 2673, 7518, 6593, 859, 8021, 4190, 3626, - 9970, 1168, 1714, 3198, 7142, 1721, 1966, 1005, 3234, - 3374, 8223, 3515, 685, 4449, 3575, 1088, 4518, 6724, - 1106, 5643, 2563, 3222, 3789, 8841, 4328, 5908, 980, - 3354, 5903, 453, 6584, 6902, 1796, 5128, 9193, 4852, - 7599, 2332, 9798, 4331, 6435, 2285, 749, 227, 8974, - 2233, 1060, 9434, 8311, 8983, 7891, 4157, 9048, 9215, - 1244, 1930, 3548, 3700, 5258, 2713, 1101, 6198, 1383, - 485, 1663, 3680, 5007, 2469, 3200, 3534, 5060, 8323, - 5125, 6533, 8536, 7835, 7036, 2925, 4362, 4257, 3178, - 2891, 2108, 3452, 9536, 7002, 616, 6420, 8664, 8815, - 6583, 6609, 1072, 8384, 5169, 1059, 7775, 2477, 5080, - 5419, 1433, 8621, 761, 6118, 3052, 2864, 4101, 5536, - 5114, 2890, 8264, 7375, 8458, 5708, 8141, 9842, 7535, - 9487, 506, 9991, 4912, 6899, 6042, 5987, 8236, 2381, - 2377, 8623, 6899, 4199, 1671, 8263, 3247, 8424, 4377, - 3844, 7302, 4183, 5465, 7916, 7243, 867, 5598, 4757, - 1405, 2390, 6269, 8998, 6806, 1037, 5633, 6240, 2683, - 661, 6248, 4362, 6589, 6881, 7919, 3436, 9778, 9040, - 7754, 6133, 7587, 9613, 1799, 8034, 1412, 569, 7649, - 854, 2162, 975, 1777, 1311, 2293, 7425, 7721, 8243, - 3457, 2497, 8795, 9492, 8514, 1656, 1379, 1684, 2624, - 6150, 9236, 5502, 3220, 4424, 3861, 8284, 3334, 9155, - 9329, 2543, 88, 331, 573, 7250, 1893, 2214, 1117, - 5839, 6679, 2059, 4263, 3884, 5823, 5109, 6317, 8095, - 5772, 3805, 2801, 1284, 3513, 6344, 2050, 8362, 1104, - 3767, 8450, 2746, 7137, 2448, 1034, 910, 1808, 8546, - 7613, 4713, 1419, 7363, 5135, 416, 7410, 1254, 3607, - 5213, 2583, 1030, 7355, 9095, 3057, 2965, 1346, 6518, - 456, 1710, 4150, 606, 3991, 9343, 1922, 1471, 8654, - 8556, 4029, 7374, 5147, 245, 2185, 3615, 6782, 7298, - 4827, 1048, 4435, 5810, 5501, 9685, 936, 1353, 4747, - 6183, 8133, 2032, 6413, 2310, 638, 2549, 9087, 7037, - 9683, 9195, 7009, 3023, 8664, 3999, 7198, 4435, 2636, - 2391, 2637, 6770, 5371, 8323, 5172, 7013, 9841, 3318, - 9942, 9659, 100, 2774, 5785, 1509, 2452, 6366, 3270, - 7991, 2560, 8019, 9904, 251, 6517, 6214, 3419, 1023, - 9453, 641, 7273, 1656, 5351, 4640, 5117, 6185, 1841, - 2653, 7756, 2706, 5419, 3985, 8928, 5908, 8395, 9421, - 5401, 2577, 7369, 8901, 3379, 6155, 3350, 3982, 5664, - 2632, 8161, 9582, 4592, 162, 9591, 1297, 2536, 9311, - 3172, 5585, 8289, 7147, 6739, 4983, 1071, 2040, 1688, - 3636, 7012, 4562, 2335, 5647, 715, 3383, 3263, 4154, - 5053, 6148, 8770, 7782, 6127, 8164, 6802, 5781, 664, - 3307, 3627, 8545, 4737, 7093, 7554, 9661, 1123, 6095, - 7857, 2265, 2364, 9252, 8564, 2821, 5037, 4976, 9852, - 4939, 4236, 323, 6914, 3029, 7213, 1359, 1808, 8226, - 8417, 9064, 9262, 5852, 3099, 5978, 9101, 5008, 9044, - 3805, 679, 4939, 2966, 9288, 8198, 4028, 31, 99, - 296, 8675, 6350, 1078, 4079, 9324, 818, 7786, 2367, - 5106, 7500, 3688, 495, 8439, 9765, 7521, 8162, 4283, - 1827, 1758, 498, 5591, 1635, 6647, 2322, 7481, 7741, - 8801, 1143, 8098, 1520, 1220, 6535, 4958, 3736, 3448, - 9104, 1436, 6505, 4589, 9260, 2728, 1235, 3582, 184, - 8130, 836, 5131, 4649, 3369, 3926, 5032, 4259, 4469, - 3719, 9447, 9115, 47, 3895, 3886, 9444, 2841, 4061, - 3017, 950, 3712, 7167, 5221, 790, 8084, 4463, 144, - 6564, 1773, 9290, 8835, 6735, 5270, 3631, 66, 2756, - 5023, 4534, 3959, 4551, 6473, 5945, 6848, 1690, 7777, - 6894, 3396, 9382, 1836, 441, 8618, 776, 5251, 8749, - 4321, 521, 6968, 7286, 596, 4655, 632, 7619, 1492, - 6316, 699, 7905, 1661, 4395, 9739, 104, 3521, 1753, - 6768, 1527, 3851, 9459, 3343, 5453, 7187, 8372, 997, - 5605, 6632, 6960, 7667, 7471, 2070, 4717, 1707, 1420, - 4300, 7588, 6313, 9723, 219, 5653, 7652, 2470, 9617, - 8449, 7892, 3278, 7246, 7417, 9017, 3709, 3870, 4523, - 9505, 4269, 6241, 9265, 7319, 8501, 753, 8354, 1912, - 1553, 8094, 1317, 5971, 2810, 414, 9472, 3324, 7338, - 9143, 2633, 8788, 9526, 766, 2702, 6774, 7114, 145, - 5175, 9354, 4708, 8663, 375, 3947, 9572, 8290, 5014, - 5394, 7415, 2379, 2168, 1593, 1872, 3501, 5911, 919, - 2321, 4308, 6674, 3273, 7641, 102, 3093, 4396, 9235, - 3771, 320, 755, 3459, 8281, 6984, 5238, 5946, 6352, - 7589, 5255, 4512, 2004, 1268, 5192, 5673, 4505, 9942, - 6596, 5939, 8561, 4262, 210, 1555, 4451, 645, 5242, - 2965, 3429, 8117, 6744, 8237, 9222, 6984, 2092, 2593, - 1178, 9712, 4151, 2676, 4438, 1667, 3002, 5084, 1702, - 2111, 6635, 9280, 2973, 5064, 874, 8288, 9477, 2791, - 1087, 7121, 1852, 1045, 9241, 8406, 813, 8275, 6074, - 8646, 4357, 5764, 661, 9267, 6343, 9809, 4190, 3021, - 5816, 9379, 3340, 6454, 1557, 356, 8328, 8537, 7080, - 7354, 1578, 666, 4817, 1570, 9849, 1827, 7697, 2475, - 6386, 9715, 5209, 9237, 6075, 9469, 583, 462, 3775, - 6440, 2999, 7916, 3754, 9198, 8174, 243, 3114, 2481, - 975, 9771, 5568, 9040, 5301, 3260, 4502, 2126, 5304, - 837, 9354, 258, 3627, 1232, 4503, 6332, 9854, 9142, - 4321, 3920, 3750, 5002, 1898, 5869, 3909, 3405, 600, - 8568, 3715, 3998, 3685, 5953, 2878, 8577, 7035, 2600, - 194, 9795, 6880, 2652, 7730, 3386, 3550, 179, 9688, - 5552, 8778, 6934, 3301, 9299, 7193, 8775, 1828, 7546, - 9311, 4434, 5261, 1742, 8110, 9147, 8767, 3915, 6212, - 5141, 5602, 4694, 7093, 8690, 2073, 7118, 8341, 6600, - 2953, 5097, 8404, 865, 8300, 3366, 8481, 7438, 3656, - 4341, 3597, 8718, 5002, 9403, 144, 8793, 6261, 1341, - 1279]), - values=tensor([6.3376e-01, 7.6730e-01, 7.2919e-02, 5.4427e-02, - 6.0546e-01, 6.0189e-01, 9.2261e-01, 5.7961e-01, - 4.9178e-01, 9.4390e-01, 2.8388e-02, 9.5132e-01, - 9.7425e-01, 5.9400e-01, 5.7570e-01, 9.4685e-01, - 1.1230e-03, 2.8396e-01, 8.0782e-01, 5.4140e-01, - 5.7368e-01, 3.1036e-02, 1.9431e-01, 9.2723e-01, - 8.0731e-01, 9.7470e-01, 6.8939e-01, 9.8405e-02, - 2.9168e-01, 2.5387e-01, 9.5437e-01, 4.1772e-01, - 4.7585e-02, 4.8136e-01, 1.5348e-01, 3.4622e-01, - 2.3877e-01, 2.8323e-03, 2.7343e-01, 1.3764e-01, - 1.2578e-01, 9.8729e-01, 5.9137e-01, 2.2799e-01, - 6.1987e-01, 2.1468e-01, 2.2450e-01, 2.0210e-01, - 9.2984e-01, 7.9165e-01, 2.7490e-01, 6.2165e-01, - 1.6123e-01, 4.0289e-01, 9.3432e-01, 3.9183e-01, - 1.5643e-02, 5.4120e-01, 9.4215e-02, 9.6242e-01, - 7.3975e-01, 5.8198e-01, 5.9396e-01, 8.2065e-01, - 2.0481e-01, 4.1419e-01, 7.9918e-02, 5.7401e-01, - 9.2905e-01, 8.5287e-02, 5.1333e-01, 3.7557e-01, - 2.1503e-01, 3.9680e-01, 9.1255e-02, 6.5433e-02, - 5.2830e-02, 3.3823e-02, 2.3435e-01, 5.5829e-01, - 2.8861e-01, 9.7182e-01, 8.5050e-01, 1.7125e-01, - 3.8665e-01, 9.6698e-01, 1.7106e-01, 1.8865e-01, - 7.2944e-01, 1.4105e-01, 1.9717e-01, 2.3506e-01, - 9.7885e-01, 8.9037e-01, 8.7398e-01, 3.5326e-02, - 5.2638e-01, 2.1680e-01, 1.1436e-01, 9.4880e-01, - 6.7321e-01, 8.4246e-01, 6.5198e-01, 3.1991e-01, - 6.2040e-01, 9.1412e-01, 3.9267e-01, 1.5271e-01, - 9.2351e-01, 6.8775e-01, 2.2448e-01, 8.7632e-01, - 7.8738e-01, 2.0587e-01, 3.7930e-01, 9.9595e-01, - 6.7491e-01, 4.7656e-01, 1.0028e-01, 1.5781e-01, - 5.2293e-01, 9.8016e-01, 9.4054e-01, 7.3715e-01, - 3.4949e-01, 9.2644e-01, 6.3554e-01, 5.1357e-01, - 2.9100e-01, 9.3441e-01, 9.8280e-01, 6.4754e-01, - 2.7015e-01, 8.1047e-01, 8.2692e-01, 7.8373e-01, - 5.5185e-02, 5.3173e-01, 3.3446e-01, 4.6782e-01, - 2.8341e-01, 3.8462e-01, 5.5173e-01, 3.3233e-01, - 1.4466e-01, 9.9674e-02, 5.9887e-01, 3.0987e-01, - 2.0656e-01, 1.4293e-01, 6.2518e-01, 2.2170e-01, - 7.1340e-02, 1.4400e-01, 9.8693e-01, 5.7422e-01, - 2.8495e-01, 2.7480e-01, 3.2319e-01, 8.1503e-01, - 4.9779e-01, 3.7355e-01, 8.0157e-02, 5.6260e-01, - 4.5111e-01, 1.5335e-01, 6.0164e-02, 7.2363e-01, - 2.5203e-01, 7.8959e-01, 1.7399e-01, 6.4875e-01, - 8.3746e-01, 9.9659e-01, 3.2280e-01, 5.4780e-01, - 6.3405e-01, 2.5291e-01, 8.3069e-01, 3.2372e-01, - 9.6184e-01, 6.9730e-01, 6.4462e-02, 8.6435e-01, - 9.8089e-01, 4.8332e-02, 2.0838e-01, 6.1370e-01, - 4.3683e-01, 7.6069e-01, 5.9835e-01, 3.6330e-01, - 3.6132e-01, 4.5351e-01, 5.5867e-01, 8.3977e-01, - 7.7599e-01, 7.6807e-01, 6.1329e-01, 4.6591e-01, - 6.1334e-01, 5.1913e-01, 1.3149e-01, 5.9490e-01, - 5.7945e-02, 2.4761e-01, 1.3468e-01, 8.8630e-02, - 9.3435e-01, 5.7224e-01, 1.4595e-01, 4.8246e-01, - 8.2826e-01, 2.9221e-01, 2.3054e-01, 4.1104e-01, - 9.8005e-01, 9.5886e-01, 6.1801e-01, 9.8291e-01, - 8.7964e-01, 9.2761e-02, 1.8925e-03, 8.9489e-01, - 6.5450e-01, 3.7551e-02, 9.5895e-01, 2.1970e-01, - 5.7556e-01, 1.0274e-01, 6.8162e-01, 7.1993e-01, - 6.0126e-02, 9.6031e-01, 3.1287e-02, 8.9614e-01, - 6.1790e-01, 4.4599e-01, 9.1123e-01, 8.7231e-01, - 4.8551e-01, 8.6899e-01, 3.9527e-01, 6.0987e-01, - 2.4934e-01, 1.2417e-01, 2.4374e-01, 1.5547e-01, - 6.5609e-01, 8.3903e-01, 7.6651e-01, 2.5083e-01, - 2.8586e-01, 8.8381e-01, 7.9996e-01, 4.8057e-01, - 9.0034e-01, 1.7433e-01, 3.1113e-01, 8.5701e-01, - 1.7210e-01, 3.0793e-01, 1.3582e-01, 7.4721e-01, - 6.0021e-01, 4.6952e-01, 8.5957e-01, 9.8856e-01, - 1.1020e-01, 5.5178e-01, 3.5557e-01, 6.0157e-02, - 3.4176e-01, 5.8491e-01, 1.0830e-01, 4.4019e-02, - 5.0292e-01, 8.5915e-01, 9.1769e-01, 1.7949e-01, - 8.4804e-01, 3.1152e-01, 4.2578e-01, 3.4406e-01, - 3.7329e-01, 5.8707e-01, 7.7429e-01, 7.8815e-01, - 3.2826e-01, 5.3079e-01, 3.0037e-01, 2.6109e-01, - 7.4216e-01, 4.7964e-01, 6.5762e-01, 5.6755e-01, - 7.6597e-01, 7.2324e-01, 5.8358e-01, 2.7153e-01, - 3.5332e-01, 9.5097e-01, 7.4019e-02, 2.6906e-01, - 8.8423e-01, 1.3775e-02, 6.1518e-01, 9.2142e-01, - 8.1158e-01, 9.3027e-01, 2.8284e-01, 5.1808e-01, - 1.7706e-01, 5.8911e-01, 6.3706e-01, 1.3509e-01, - 1.8796e-01, 8.3253e-01, 5.2281e-01, 5.8225e-01, - 3.1314e-01, 7.5077e-01, 9.2693e-01, 8.8607e-01, - 1.0153e-01, 3.3197e-01, 2.4983e-01, 7.3707e-01, - 9.3066e-01, 5.5808e-01, 5.9277e-01, 5.6892e-01, - 8.4811e-01, 7.3016e-01, 3.2158e-01, 9.9875e-01, - 8.9341e-01, 6.5226e-01, 2.8142e-02, 3.8748e-01, - 9.9689e-01, 8.4440e-01, 1.3415e-01, 9.1314e-01, - 8.2779e-01, 8.8866e-01, 5.2771e-01, 1.6774e-01, - 1.0001e-01, 3.2182e-02, 7.0240e-01, 4.9489e-01, - 7.3331e-01, 7.6800e-01, 3.9459e-01, 1.2124e-01, - 9.6981e-01, 2.4451e-02, 9.5581e-01, 5.6013e-01, - 8.6836e-01, 3.6457e-01, 6.6173e-01, 4.3691e-01, - 5.0752e-01, 8.9140e-01, 6.2725e-01, 5.1154e-01, - 3.8584e-01, 8.8067e-01, 8.0378e-01, 4.3013e-01, - 1.0393e-01, 3.7228e-01, 6.6680e-01, 6.1334e-01, - 1.5971e-01, 3.5533e-01, 1.8241e-01, 8.3902e-01, - 2.0770e-01, 1.5535e-02, 9.2843e-03, 2.3731e-01, - 3.3058e-01, 4.8184e-01, 1.8423e-01, 4.4116e-01, - 9.3384e-01, 5.0193e-01, 1.6866e-02, 3.8718e-01, - 7.5768e-01, 9.4654e-02, 5.3538e-02, 1.5641e-01, - 4.4035e-02, 9.9726e-01, 4.2594e-01, 5.1388e-01, - 9.1867e-01, 4.4814e-01, 9.5295e-02, 9.4138e-01, - 9.5373e-01, 2.1289e-01, 8.8704e-01, 6.7959e-01, - 5.8117e-01, 8.1876e-01, 3.1571e-01, 9.8089e-01, - 8.9793e-01, 7.2910e-01, 9.9414e-01, 1.1571e-01, - 3.1824e-01, 5.3603e-01, 3.2715e-01, 3.7889e-01, - 7.8753e-01, 2.6621e-01, 4.3075e-01, 4.7258e-01, - 6.2817e-01, 7.7791e-01, 4.8235e-01, 2.3192e-01, - 6.2132e-01, 9.1709e-01, 6.7642e-02, 9.3561e-01, - 5.6446e-01, 9.8651e-01, 5.1759e-01, 9.8335e-01, - 2.3037e-01, 1.9742e-01, 2.0549e-01, 1.4721e-01, - 2.3052e-02, 2.7569e-01, 4.0358e-02, 3.5939e-01, - 1.0842e-01, 7.4946e-01, 8.5014e-01, 5.5189e-01, - 8.4503e-01, 4.6246e-03, 1.6066e-01, 7.2416e-01, - 9.9480e-01, 1.8422e-01, 9.4831e-01, 3.3529e-01, - 9.4494e-01, 3.4365e-01, 2.6628e-01, 5.3290e-01, - 9.3233e-02, 7.2194e-01, 7.6637e-01, 8.0021e-02, - 7.3160e-01, 8.3532e-01, 9.7215e-01, 4.4904e-01, - 7.9254e-02, 8.7417e-01, 9.8399e-01, 8.1783e-01, - 1.4564e-01, 9.6394e-01, 6.3995e-01, 7.3142e-01, - 1.1885e-01, 1.1676e-01, 4.8026e-01, 6.7704e-01, - 2.6034e-01, 6.0302e-01, 6.0501e-01, 5.6108e-01, - 6.2763e-01, 9.5279e-01, 6.6563e-01, 5.9835e-01, - 9.4274e-01, 4.2874e-01, 3.1187e-01, 1.7232e-02, - 9.2523e-01, 7.2337e-01, 2.3001e-01, 4.8959e-01, - 7.4727e-01, 9.9497e-01, 1.6681e-01, 3.5589e-01, - 1.4686e-01, 8.6831e-01, 3.7435e-01, 4.6312e-01, - 7.3891e-01, 3.6720e-01, 8.0284e-01, 7.7236e-01, - 6.5776e-02, 5.3563e-01, 2.2124e-01, 4.9381e-01, - 2.6533e-01, 2.8308e-01, 4.5876e-01, 9.9927e-01, - 5.7074e-01, 4.9419e-02, 5.5013e-02, 5.7653e-01, - 2.9803e-01, 1.5908e-01, 5.9323e-01, 1.2686e-01, - 3.1267e-01, 5.6400e-01, 8.9399e-02, 9.9575e-01, - 4.0021e-01, 8.5003e-01, 3.5400e-01, 9.8075e-01, - 9.3946e-01, 2.1345e-01, 2.9200e-01, 2.7106e-01, - 7.8208e-01, 1.2337e-01, 7.6499e-01, 8.8815e-01, - 9.4963e-01, 6.5762e-02, 1.2987e-01, 8.8306e-02, - 2.9500e-01, 3.0178e-01, 1.7011e-01, 2.0707e-01, - 1.6883e-01, 5.3873e-01, 5.3918e-01, 2.6079e-01, - 2.0780e-01, 1.8459e-01, 6.2376e-01, 6.7532e-01, - 3.6186e-01, 4.6222e-01, 4.5177e-01, 3.0205e-01, - 7.3128e-01, 5.9687e-01, 2.2776e-01, 9.4918e-01, - 9.3331e-01, 5.5633e-01, 6.6717e-01, 7.7574e-01, - 2.2907e-02, 5.4869e-01, 8.5278e-01, 7.7307e-01, - 3.3436e-01, 3.7913e-01, 5.0240e-01, 7.5275e-01, - 5.3813e-01, 8.2946e-02, 3.7859e-01, 7.9385e-01, - 9.1423e-01, 2.2822e-01, 4.3641e-01, 3.0321e-01, - 6.1523e-01, 9.1439e-01, 2.7838e-01, 6.8154e-01, - 6.0011e-01, 3.7820e-01, 1.9577e-01, 2.8305e-02, - 1.5139e-01, 2.8758e-01, 5.7909e-01, 7.1980e-01, - 5.5532e-01, 8.9573e-01, 2.4678e-01, 4.3668e-01, - 9.2720e-01, 5.7365e-01, 5.9780e-01, 8.8441e-01, - 2.1912e-01, 2.6757e-01, 8.0682e-01, 6.0861e-01, - 1.9053e-02, 6.9830e-01, 7.0238e-01, 1.8043e-03, - 6.9298e-01, 3.2843e-01, 4.4586e-01, 1.7567e-02, - 6.2070e-01, 8.5091e-01, 8.3733e-01, 9.4375e-02, - 1.1168e-01, 7.8741e-01, 7.8798e-01, 3.2198e-01, - 5.5968e-01, 9.5879e-01, 4.7578e-01, 3.4492e-02, - 1.8563e-01, 1.9669e-01, 5.0613e-01, 3.5203e-02, - 2.0767e-01, 4.9352e-01, 7.2210e-01, 2.1544e-02, - 9.4704e-01, 7.1352e-01, 4.4836e-01, 7.2969e-01, - 7.7148e-01, 1.2084e-01, 1.7120e-02, 8.4079e-01, - 6.1026e-01, 3.7129e-01, 2.4231e-01, 1.0633e-01, - 7.3717e-01, 9.7235e-02, 5.1810e-01, 5.1054e-01, - 6.1998e-01, 4.0900e-01, 6.3778e-01, 6.8169e-01, - 2.6785e-01, 9.8809e-01, 4.0731e-01, 4.2571e-01, - 4.7580e-01, 4.7162e-01, 3.2020e-01, 3.8089e-01, - 9.7933e-01, 9.2454e-01, 1.2763e-01, 7.7816e-01, - 6.8461e-01, 5.9559e-01, 9.8970e-01, 6.8703e-01, - 3.9614e-01, 6.8764e-01, 3.0704e-01, 6.3408e-01, - 7.2067e-01, 7.3043e-01, 5.8166e-01, 8.5199e-01, - 9.9214e-01, 3.2601e-01, 2.8523e-01, 5.2466e-01, - 1.6880e-01, 7.0266e-01, 8.1714e-02, 9.9022e-01, - 5.1510e-01, 2.3188e-01, 7.0673e-01, 9.3670e-01, - 6.8927e-01, 7.4791e-01, 4.1159e-01, 9.9218e-01, - 3.9861e-01, 5.8315e-01, 3.8248e-01, 9.6070e-01, - 5.2728e-01, 6.8154e-01, 9.6611e-01, 2.6567e-01, - 8.7328e-01, 1.6906e-01, 3.1671e-01, 5.9926e-01, - 9.2459e-01, 4.7512e-01, 5.5365e-01, 5.5367e-01, - 9.7200e-01, 7.8079e-01, 5.0291e-01, 9.2394e-02, - 2.9389e-01, 8.6800e-01, 3.2031e-01, 6.3648e-01, - 9.9521e-01, 1.0258e-01, 5.1779e-01, 6.8992e-01, - 2.1659e-01, 1.7651e-01, 2.3642e-02, 8.4533e-01, - 8.5897e-01, 7.7948e-01, 3.0726e-01, 4.5709e-02, - 5.5568e-01, 2.7976e-01, 6.9800e-01, 2.2745e-01, - 2.7651e-01, 9.0869e-01, 7.2947e-01, 7.3732e-01, - 8.3152e-01, 7.2021e-01, 3.0614e-01, 8.7317e-01, - 5.3468e-01, 8.3123e-01, 3.4375e-01, 4.0532e-02, - 8.3949e-02, 5.0905e-04, 4.0538e-02, 1.2199e-01, - 6.5995e-01, 6.7796e-02, 3.2800e-01, 8.6735e-01, - 4.7209e-01, 9.5358e-02, 1.6026e-01, 7.7149e-01, - 6.2073e-02, 4.7393e-01, 8.5895e-01, 9.7965e-01, - 8.6001e-01, 4.9042e-01, 7.9295e-01, 7.8847e-01, - 5.5936e-01, 3.4604e-01, 6.7556e-01, 6.1583e-01, - 6.4772e-01, 4.0602e-01, 2.8032e-01, 3.9837e-01, - 4.8167e-01, 9.2784e-01, 3.1825e-01, 6.8211e-01, - 7.1275e-01, 6.8629e-01, 5.6396e-01, 8.5622e-01, - 1.9037e-01, 1.9047e-01, 3.2621e-01, 2.3798e-01, - 1.1718e-02, 7.3091e-01, 4.3965e-01, 5.5108e-01, - 5.9347e-01, 3.6512e-02, 8.1292e-01, 6.9857e-01, - 3.0565e-01, 2.6693e-02, 4.0556e-01, 5.1861e-01, - 6.9724e-01, 3.5374e-01, 7.8549e-01, 9.2104e-01, - 5.4024e-01, 6.2665e-01, 5.9591e-01, 3.4322e-01, - 8.2211e-02, 8.2777e-01, 5.0700e-01, 1.2595e-01, - 6.6960e-01, 3.1515e-01, 1.4555e-02, 3.8011e-01, - 8.1554e-01, 7.0168e-01, 7.1629e-01, 7.0360e-01, - 5.8561e-01, 6.1912e-01, 7.9669e-01, 8.7766e-01, - 8.2578e-01, 7.0533e-01, 4.9176e-01, 2.1029e-01, - 7.9780e-01, 7.8577e-02, 6.7550e-01, 9.6215e-02, - 3.7034e-01, 8.4600e-01, 2.1426e-01, 6.8059e-01, - 8.9566e-01, 2.0948e-01, 2.5723e-01, 2.1477e-01, - 7.0287e-01, 8.8856e-01, 6.8210e-01, 2.1374e-01, - 5.9763e-01, 5.0325e-01, 9.7355e-01, 2.5234e-01, - 1.4671e-01, 7.4961e-01, 5.0105e-01, 4.1481e-01, - 3.3586e-01, 3.2885e-01, 4.4906e-01, 9.4966e-01, - 9.7029e-01, 3.6201e-01, 9.9074e-01, 5.6940e-01, - 5.3932e-01, 5.4720e-02, 8.8651e-01, 8.6641e-01, - 6.7084e-01, 6.3171e-01, 1.7082e-02, 9.7000e-01, - 6.0249e-01, 8.5227e-02, 5.1565e-01, 2.8812e-01, - 7.6133e-01, 8.8965e-01, 4.3881e-01, 8.9089e-02, - 2.0705e-02, 6.0003e-01, 6.0058e-01, 2.3583e-01, - 7.0114e-01, 4.6051e-01, 9.3395e-01, 9.1531e-01, - 9.8035e-01, 8.6110e-01, 5.0684e-01, 1.4922e-02, - 3.5417e-01, 6.3192e-01, 6.2847e-01, 2.1922e-01, - 5.2679e-01, 5.0140e-01, 4.1193e-01, 9.3722e-01, - 7.2416e-01, 8.2701e-01, 5.8773e-01, 4.5892e-01, - 8.6080e-01, 7.0796e-01, 7.3090e-01, 2.3398e-01, - 8.2336e-01, 8.1931e-02, 5.6648e-01, 9.2281e-01, - 5.8624e-01, 8.3555e-01, 9.2999e-01, 8.9524e-01, - 9.7694e-01, 1.0057e-01, 9.1836e-01, 1.8716e-01, - 9.1811e-01, 2.5747e-02, 5.2811e-01, 5.9349e-01, - 3.1233e-01, 5.9616e-01, 9.2941e-01, 9.2230e-01, - 7.5079e-01, 2.7152e-01, 6.8334e-01, 1.8560e-01, - 1.9301e-01, 7.6648e-01, 7.1679e-01, 9.8760e-01, - 8.5633e-01, 5.0996e-01, 8.6064e-01, 6.3841e-01, - 6.3329e-01, 2.8584e-02, 4.5359e-01, 6.6242e-01, - 6.9051e-01, 1.3944e-02, 2.4069e-01, 9.0102e-01, - 4.0050e-01, 1.3846e-01, 6.7660e-02, 9.8925e-01, - 6.0724e-01, 2.5858e-01, 1.3118e-01, 6.4780e-02, - 5.2943e-03, 8.5863e-01, 1.0742e-01, 8.2977e-01, - 2.6180e-01, 4.9962e-03, 3.6669e-01, 3.7121e-01, - 9.4605e-01, 9.0263e-01, 3.6751e-01, 2.8743e-01, - 5.2275e-01, 9.3565e-01, 3.4542e-02, 8.3878e-01, - 9.8396e-01, 5.7945e-01, 8.3609e-01, 2.4747e-01, - 5.0297e-01, 6.4252e-01, 4.6862e-01, 1.5146e-01, - 6.8402e-01, 7.2437e-01, 2.6435e-02, 6.6999e-01, - 8.2226e-01, 8.3864e-01, 4.0283e-01, 7.2857e-01, - 8.9768e-02, 8.3236e-01, 5.2385e-01, 2.0285e-01, - 9.6598e-01, 2.8956e-01, 6.8980e-01, 8.3998e-01, - 2.5672e-01, 6.8220e-01, 5.4447e-01, 9.8634e-01, - 4.3737e-01, 4.8044e-02, 6.7611e-01, 9.4603e-01]), - size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.7103, 0.5485, 0.7256, ..., 0.6866, 0.8523, 0.5088]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 1000 -Density: 1e-05 -Time: 10.999524116516113 seconds - -[39.86, 39.2, 39.16, 38.85, 39.2, 39.62, 38.88, 38.71, 38.95, 38.67] -[96.42] -13.729055643081665 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 375977, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.999524116516113, 'TIME_S_1KI': 0.029255843087518954, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1323.755545105934, 'W': 96.41999999999999} -[39.86, 39.2, 39.16, 38.85, 39.2, 39.62, 38.88, 38.71, 38.95, 38.67, 40.17, 39.41, 39.19, 38.69, 38.88, 39.53, 40.39, 39.3, 39.0, 38.88] -705.75 -35.2875 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 375977, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.999524116516113, 'TIME_S_1KI': 0.029255843087518954, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1323.755545105934, 'W': 96.41999999999999, 'J_1KI': 3.5208418203930933, 'W_1KI': 0.25645185742744897, 'W_D': 61.132499999999986, 'J_D': 839.2914941006896, 'W_D_1KI': 0.16259638222550846, 'J_D_1KI': 0.0004324636406628822} +[39.64, 44.74, 39.71, 38.92, 39.06, 38.91, 39.09, 38.87, 39.14, 39.48] +[97.06] +13.222800016403198 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 355068, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.454679489135742, 'TIME_S_1KI': 0.029444161369472165, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1283.4049695920944, 'W': 97.06} +[39.64, 44.74, 39.71, 38.92, 39.06, 38.91, 39.09, 38.87, 39.14, 39.48, 40.2, 40.78, 38.95, 38.87, 38.88, 38.91, 39.42, 39.12, 39.01, 39.07] +711.575 +35.57875 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 355068, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.454679489135742, 'TIME_S_1KI': 0.029444161369472165, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1283.4049695920944, 'W': 97.06, 'J_1KI': 3.614532905224054, 'W_1KI': 0.27335608953777873, 'W_D': 61.48125, 'J_D': 812.9542735084892, 'W_D_1KI': 0.17315345229646154, 'J_D_1KI': 0.0004876627921875853} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_500000_1e-05.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_500000_1e-05.json index 7e41519..98c0109 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_500000_1e-05.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_500000_1e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 21375, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.636817216873169, "TIME_S_1KI": 0.4976288756431892, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2053.7217113614083, "W": 152.01000000000002, "J_1KI": 96.08054789994893, "W_1KI": 7.111578947368422, "W_D": 116.32275000000001, "J_D": 1571.5713255724313, "W_D_1KI": 5.442000000000001, "J_D_1KI": 0.2545964912280702} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 21497, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.492619514465332, "TIME_S_1KI": 0.488096921173435, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2075.744820713997, "W": 155.25, "J_1KI": 96.55974418356035, "W_1KI": 7.221937944829511, "W_D": 119.6075, "J_D": 1599.1925838553905, "W_D_1KI": 5.5639158952411965, "J_D_1KI": 0.25882290064851826} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_500000_1e-05.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_500000_1e-05.output index e90e199..4dbd4ff 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_500000_1e-05.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_500000_1e-05.output @@ -1,15 +1,57 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '500000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.5442898273468018} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.5443899631500244} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 8, 11, ..., 2499988, +tensor(crow_indices=tensor([ 0, 7, 13, ..., 2499984, + 2499993, 2500000]), + col_indices=tensor([ 29642, 73796, 205405, ..., 362365, 387524, + 440531]), + values=tensor([0.6565, 0.4150, 0.8341, ..., 0.7997, 0.8212, 0.8706]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.3188, 0.4041, 0.2486, ..., 0.5189, 0.6175, 0.2446]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 0.5443899631500244 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '19287', '-ss', '500000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.420541286468506} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 12, ..., 2499994, + 2499998, 2500000]), + col_indices=tensor([131466, 192610, 285983, ..., 398857, 7127, + 216070]), + values=tensor([0.3766, 0.1095, 0.0818, ..., 0.7673, 0.9998, 0.7256]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.6672, 0.9862, 0.6354, ..., 0.4943, 0.9100, 0.2548]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 9.420541286468506 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '21497', '-ss', '500000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.492619514465332} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 10, 12, ..., 2499992, 2499996, 2500000]), - col_indices=tensor([ 37839, 98870, 148404, ..., 161688, 445826, - 487462]), - values=tensor([0.2708, 0.4230, 0.0396, ..., 0.5012, 0.9237, 0.4084]), + col_indices=tensor([124091, 157764, 160136, ..., 120950, 171105, + 490445]), + values=tensor([0.1739, 0.8424, 0.0842, ..., 0.2028, 0.9911, 0.7243]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.6604, 0.4578, 0.9008, ..., 0.1692, 0.6250, 0.2013]) +tensor([0.9645, 0.6044, 0.9036, ..., 0.9779, 0.7664, 0.8298]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -17,20 +59,17 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 0.5442898273468018 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '19291', '-ss', '500000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.475887298583984} +Time: 10.492619514465332 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 8, 11, ..., 2499990, - 2499994, 2500000]), - col_indices=tensor([ 2997, 16168, 106256, ..., 284595, 359619, - 400100]), - values=tensor([0.5956, 0.5098, 0.7367, ..., 0.1293, 0.8182, 0.3844]), +tensor(crow_indices=tensor([ 0, 10, 12, ..., 2499992, + 2499996, 2500000]), + col_indices=tensor([124091, 157764, 160136, ..., 120950, 171105, + 490445]), + values=tensor([0.1739, 0.8424, 0.0842, ..., 0.2028, 0.9911, 0.7243]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.4741, 0.3124, 0.4103, ..., 0.8230, 0.7925, 0.1055]) +tensor([0.9645, 0.6044, 0.9036, ..., 0.9779, 0.7664, 0.8298]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -38,52 +77,13 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 9.475887298583984 seconds +Time: 10.492619514465332 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '21375', '-ss', '500000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.636817216873169} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 7, ..., 2499987, - 2499990, 2500000]), - col_indices=tensor([ 69634, 109368, 119504, ..., 397654, 413765, - 480494]), - values=tensor([0.1977, 0.6347, 0.9236, ..., 0.5996, 0.0558, 0.7507]), - size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.2633, 0.4244, 0.4182, ..., 0.0717, 0.3446, 0.9616]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([500000, 500000]) -Rows: 500000 -Size: 250000000000 -NNZ: 2500000 -Density: 1e-05 -Time: 10.636817216873169 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 7, ..., 2499987, - 2499990, 2500000]), - col_indices=tensor([ 69634, 109368, 119504, ..., 397654, 413765, - 480494]), - values=tensor([0.1977, 0.6347, 0.9236, ..., 0.5996, 0.0558, 0.7507]), - size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.2633, 0.4244, 0.4182, ..., 0.0717, 0.3446, 0.9616]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([500000, 500000]) -Rows: 500000 -Size: 250000000000 -NNZ: 2500000 -Density: 1e-05 -Time: 10.636817216873169 seconds - -[39.92, 39.18, 39.71, 40.61, 40.15, 39.79, 39.41, 39.75, 39.22, 39.64] -[152.01] -13.510438203811646 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 21375, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.636817216873169, 'TIME_S_1KI': 0.4976288756431892, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2053.7217113614083, 'W': 152.01000000000002} -[39.92, 39.18, 39.71, 40.61, 40.15, 39.79, 39.41, 39.75, 39.22, 39.64, 39.92, 39.78, 39.31, 39.34, 39.44, 40.15, 39.73, 39.14, 39.7, 39.19] -713.7450000000001 -35.687250000000006 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 21375, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.636817216873169, 'TIME_S_1KI': 0.4976288756431892, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2053.7217113614083, 'W': 152.01000000000002, 'J_1KI': 96.08054789994893, 'W_1KI': 7.111578947368422, 'W_D': 116.32275000000001, 'J_D': 1571.5713255724313, 'W_D_1KI': 5.442000000000001, 'J_D_1KI': 0.2545964912280702} +[41.35, 39.34, 39.96, 39.31, 39.6, 39.93, 39.84, 39.39, 39.3, 39.31] +[155.25] +13.370337009429932 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 21497, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.492619514465332, 'TIME_S_1KI': 0.488096921173435, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2075.744820713997, 'W': 155.25} +[41.35, 39.34, 39.96, 39.31, 39.6, 39.93, 39.84, 39.39, 39.3, 39.31, 40.05, 40.08, 39.33, 39.83, 39.32, 39.88, 39.28, 39.25, 39.29, 39.13] +712.85 +35.6425 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 21497, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.492619514465332, 'TIME_S_1KI': 0.488096921173435, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2075.744820713997, 'W': 155.25, 'J_1KI': 96.55974418356035, 'W_1KI': 7.221937944829511, 'W_D': 119.6075, 'J_D': 1599.1925838553905, 'W_D_1KI': 5.5639158952411965, 'J_D_1KI': 0.25882290064851826} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.0001.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.0001.json index 7a8ab5e..c71ff3b 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.0001.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.0001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 88993, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.460110664367676, "TIME_S_1KI": 0.11753857791475371, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1554.5358743476868, "W": 116.24, "J_1KI": 17.468069110465844, "W_1KI": 1.3061701482139043, "W_D": 80.32, "J_D": 1074.1596819305419, "W_D_1KI": 0.9025428966323192, "J_D_1KI": 0.010141729086920537} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 91834, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.82576298713684, "TIME_S_1KI": 0.11788404062914433, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1647.9860616016388, "W": 116.97, "J_1KI": 17.945271485524305, "W_1KI": 1.273711261624235, "W_D": 80.83175, "J_D": 1138.8355760867596, "W_D_1KI": 0.8801941546703835, "J_D_1KI": 0.009584621759592129} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.0001.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.0001.output index 82a28ba..504044c 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.0001.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.0001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '50000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.1613328456878662} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.14647722244262695} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 9, ..., 249987, 249990, +tensor(crow_indices=tensor([ 0, 6, 8, ..., 249988, 249995, 250000]), - col_indices=tensor([ 2831, 11435, 18332, ..., 36257, 39398, 40541]), - values=tensor([0.1158, 0.5239, 0.2299, ..., 0.2166, 0.7808, 0.4412]), + col_indices=tensor([ 544, 6056, 19594, ..., 16208, 31107, 37035]), + values=tensor([0.8576, 0.5005, 0.2810, ..., 0.0063, 0.7171, 0.8258]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.7586, 0.4736, 0.7326, ..., 0.5631, 0.8162, 0.2413]) +tensor([0.4318, 0.7107, 0.2576, ..., 0.8496, 0.3705, 0.3608]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 0.1613328456878662 seconds +Time: 0.14647722244262695 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '65082', '-ss', '50000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 8.079791784286499} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '71683', '-ss', '50000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.233005046844482} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 7, ..., 249989, 249995, +tensor(crow_indices=tensor([ 0, 7, 10, ..., 249988, 249994, 250000]), - col_indices=tensor([ 9506, 10457, 11174, ..., 14178, 16522, 25750]), - values=tensor([0.5729, 0.5279, 0.3744, ..., 0.1961, 0.5511, 0.6709]), + col_indices=tensor([ 4979, 12449, 23825, ..., 32585, 40358, 48594]), + values=tensor([0.7825, 0.8569, 0.5029, ..., 0.3250, 0.4106, 0.3303]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.0404, 0.4787, 0.7701, ..., 0.8815, 0.0868, 0.4305]) +tensor([0.8033, 0.4755, 0.5204, ..., 0.8611, 0.9528, 0.0172]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,19 +36,19 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 8.079791784286499 seconds +Time: 9.233005046844482 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '84576', '-ss', '50000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.978835582733154} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '81519', '-ss', '50000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.805182695388794} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 7, ..., 249990, 249996, +tensor(crow_indices=tensor([ 0, 6, 11, ..., 249983, 249992, 250000]), - col_indices=tensor([26217, 28400, 13678, ..., 15637, 35417, 48424]), - values=tensor([0.3837, 0.9571, 0.9616, ..., 0.3970, 0.1960, 0.8766]), + col_indices=tensor([ 7422, 17911, 31055, ..., 30707, 32021, 38558]), + values=tensor([0.7718, 0.8036, 0.8293, ..., 0.2159, 0.0251, 0.0647]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.6737, 0.6555, 0.0878, ..., 0.0726, 0.6482, 0.1469]) +tensor([0.3183, 0.3041, 0.1046, ..., 0.2603, 0.8118, 0.2097]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -56,19 +56,19 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 9.978835582733154 seconds +Time: 9.805182695388794 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '88993', '-ss', '50000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.460110664367676} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '87295', '-ss', '50000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.980920553207397} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 12, ..., 249992, 249993, +tensor(crow_indices=tensor([ 0, 3, 5, ..., 249989, 249993, 250000]), - col_indices=tensor([ 7470, 20811, 24121, ..., 36968, 38743, 40607]), - values=tensor([0.2685, 0.7271, 0.6618, ..., 0.0403, 0.7886, 0.4035]), + col_indices=tensor([19530, 21432, 40127, ..., 33319, 45642, 48654]), + values=tensor([0.8438, 0.0330, 0.2387, ..., 0.6115, 0.5796, 0.5067]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.0454, 0.0390, 0.3317, ..., 0.3195, 0.9524, 0.5758]) +tensor([0.1992, 0.5617, 0.3460, ..., 0.4818, 0.9372, 0.6597]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -76,16 +76,19 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 10.460110664367676 seconds +Time: 9.980920553207397 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '91834', '-ss', '50000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.82576298713684} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 12, ..., 249992, 249993, +tensor(crow_indices=tensor([ 0, 7, 12, ..., 249987, 249995, 250000]), - col_indices=tensor([ 7470, 20811, 24121, ..., 36968, 38743, 40607]), - values=tensor([0.2685, 0.7271, 0.6618, ..., 0.0403, 0.7886, 0.4035]), + col_indices=tensor([ 2714, 5631, 18387, ..., 39061, 48792, 49070]), + values=tensor([0.1970, 0.9435, 0.9859, ..., 0.7944, 0.6863, 0.0587]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.0454, 0.0390, 0.3317, ..., 0.3195, 0.9524, 0.5758]) +tensor([0.5128, 0.8861, 0.8900, ..., 0.3721, 0.4809, 0.7353]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -93,13 +96,30 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 10.460110664367676 seconds +Time: 10.82576298713684 seconds -[40.38, 39.72, 39.57, 39.21, 40.98, 39.68, 39.65, 39.04, 39.23, 43.6] -[116.24] -13.373502016067505 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 88993, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.460110664367676, 'TIME_S_1KI': 0.11753857791475371, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1554.5358743476868, 'W': 116.24} -[40.38, 39.72, 39.57, 39.21, 40.98, 39.68, 39.65, 39.04, 39.23, 43.6, 39.84, 40.48, 39.37, 39.22, 39.24, 38.98, 44.18, 39.09, 39.18, 39.34] -718.4 -35.92 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 88993, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.460110664367676, 'TIME_S_1KI': 0.11753857791475371, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1554.5358743476868, 'W': 116.24, 'J_1KI': 17.468069110465844, 'W_1KI': 1.3061701482139043, 'W_D': 80.32, 'J_D': 1074.1596819305419, 'W_D_1KI': 0.9025428966323192, 'J_D_1KI': 0.010141729086920537} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 12, ..., 249987, 249995, + 250000]), + col_indices=tensor([ 2714, 5631, 18387, ..., 39061, 48792, 49070]), + values=tensor([0.1970, 0.9435, 0.9859, ..., 0.7944, 0.6863, 0.0587]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.5128, 0.8861, 0.8900, ..., 0.3721, 0.4809, 0.7353]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 10.82576298713684 seconds + +[40.37, 39.3, 39.3, 39.19, 39.7, 39.15, 40.14, 44.33, 39.43, 39.81] +[116.97] +14.088963508605957 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 91834, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.82576298713684, 'TIME_S_1KI': 0.11788404062914433, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1647.9860616016388, 'W': 116.97} +[40.37, 39.3, 39.3, 39.19, 39.7, 39.15, 40.14, 44.33, 39.43, 39.81, 40.26, 39.09, 41.97, 44.11, 39.87, 39.01, 40.55, 38.99, 38.97, 38.89] +722.7650000000001 +36.138250000000006 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 91834, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.82576298713684, 'TIME_S_1KI': 0.11788404062914433, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1647.9860616016388, 'W': 116.97, 'J_1KI': 17.945271485524305, 'W_1KI': 1.273711261624235, 'W_D': 80.83175, 'J_D': 1138.8355760867596, 'W_D_1KI': 0.8801941546703835, 'J_D_1KI': 0.009584621759592129} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.001.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.001.json index 19e233d..3676d30 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.001.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 46287, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 11.077528476715088, "TIME_S_1KI": 0.23932267108940064, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2013.3453536748884, "W": 148.1, "J_1KI": 43.49699383573981, "W_1KI": 3.1996024801780196, "W_D": 112.52425, "J_D": 1529.7108434385657, "W_D_1KI": 2.4310119471989973, "J_D_1KI": 0.052520404156652996} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 46775, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.762548208236694, "TIME_S_1KI": 0.2300918911434889, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2033.0924378275872, "W": 149.27, "J_1KI": 43.46536478519695, "W_1KI": 3.1912346338856232, "W_D": 113.87475, "J_D": 1551.0008245763183, "W_D_1KI": 2.434521646178514, "J_D_1KI": 0.052047496444222636} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.001.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.001.output index a9b5d6b..3112aac 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.001.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '50000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.2967829704284668} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.29659008979797363} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 60, 108, ..., 2499894, - 2499951, 2500000]), - col_indices=tensor([ 368, 1693, 4088, ..., 44885, 46596, 47442]), - values=tensor([0.5982, 0.3592, 0.7042, ..., 0.6155, 0.2314, 0.2925]), +tensor(crow_indices=tensor([ 0, 46, 83, ..., 2499888, + 2499946, 2500000]), + col_indices=tensor([ 2168, 2264, 3614, ..., 46868, 47216, 48811]), + values=tensor([0.2788, 0.0512, 0.3475, ..., 0.9281, 0.1898, 0.0144]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.7227, 0.5816, 0.4934, ..., 0.3583, 0.6407, 0.9822]) +tensor([0.5080, 0.1629, 0.0847, ..., 0.6599, 0.4582, 0.2341]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 0.2967829704284668 seconds +Time: 0.29659008979797363 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '35379', '-ss', '50000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 8.025555610656738} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '35402', '-ss', '50000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 7.946921348571777} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 48, 96, ..., 2499914, - 2499959, 2500000]), - col_indices=tensor([ 123, 723, 909, ..., 47588, 48779, 49819]), - values=tensor([0.6654, 0.3505, 0.8901, ..., 0.8476, 0.5107, 0.1185]), +tensor(crow_indices=tensor([ 0, 54, 117, ..., 2499905, + 2499953, 2500000]), + col_indices=tensor([ 1300, 1442, 2491, ..., 47415, 49147, 49910]), + values=tensor([0.1149, 0.9707, 0.0968, ..., 0.7933, 0.6392, 0.9343]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.6658, 0.6242, 0.4020, ..., 0.5009, 0.1451, 0.6481]) +tensor([0.2903, 0.7408, 0.0968, ..., 0.3344, 0.5691, 0.3821]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,19 +36,19 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 8.025555610656738 seconds +Time: 7.946921348571777 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '46287', '-ss', '50000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 11.077528476715088} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '46775', '-ss', '50000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.762548208236694} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 45, 94, ..., 2499903, - 2499951, 2500000]), - col_indices=tensor([ 506, 1320, 4404, ..., 49283, 49651, 49966]), - values=tensor([0.0094, 0.0130, 0.0811, ..., 0.5846, 0.7695, 0.1584]), +tensor(crow_indices=tensor([ 0, 65, 117, ..., 2499903, + 2499954, 2500000]), + col_indices=tensor([ 2232, 2981, 3015, ..., 49447, 49836, 49877]), + values=tensor([0.3281, 0.9452, 0.1004, ..., 0.4282, 0.2346, 0.1167]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.8952, 0.2999, 0.6108, ..., 0.3758, 0.9662, 0.9596]) +tensor([0.5238, 0.6081, 0.9163, ..., 0.2866, 0.2457, 0.9117]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -56,16 +56,16 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 11.077528476715088 seconds +Time: 10.762548208236694 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 45, 94, ..., 2499903, - 2499951, 2500000]), - col_indices=tensor([ 506, 1320, 4404, ..., 49283, 49651, 49966]), - values=tensor([0.0094, 0.0130, 0.0811, ..., 0.5846, 0.7695, 0.1584]), +tensor(crow_indices=tensor([ 0, 65, 117, ..., 2499903, + 2499954, 2500000]), + col_indices=tensor([ 2232, 2981, 3015, ..., 49447, 49836, 49877]), + values=tensor([0.3281, 0.9452, 0.1004, ..., 0.4282, 0.2346, 0.1167]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.8952, 0.2999, 0.6108, ..., 0.3758, 0.9662, 0.9596]) +tensor([0.5238, 0.6081, 0.9163, ..., 0.2866, 0.2457, 0.9117]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -73,13 +73,13 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 11.077528476715088 seconds +Time: 10.762548208236694 seconds -[40.89, 39.29, 39.33, 39.28, 39.26, 39.19, 39.19, 40.42, 39.41, 39.34] -[148.1] -13.594499349594116 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 46287, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 11.077528476715088, 'TIME_S_1KI': 0.23932267108940064, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2013.3453536748884, 'W': 148.1} -[40.89, 39.29, 39.33, 39.28, 39.26, 39.19, 39.19, 40.42, 39.41, 39.34, 40.01, 40.0, 39.66, 39.57, 39.25, 39.46, 39.33, 39.3, 39.25, 40.41] -711.5149999999999 -35.57574999999999 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 46287, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 11.077528476715088, 'TIME_S_1KI': 0.23932267108940064, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2013.3453536748884, 'W': 148.1, 'J_1KI': 43.49699383573981, 'W_1KI': 3.1996024801780196, 'W_D': 112.52425, 'J_D': 1529.7108434385657, 'W_D_1KI': 2.4310119471989973, 'J_D_1KI': 0.052520404156652996} +[41.04, 39.15, 39.26, 39.11, 39.17, 39.29, 39.32, 39.24, 39.56, 39.01] +[149.27] +13.620234727859497 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 46775, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.762548208236694, 'TIME_S_1KI': 0.2300918911434889, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2033.0924378275872, 'W': 149.27} +[41.04, 39.15, 39.26, 39.11, 39.17, 39.29, 39.32, 39.24, 39.56, 39.01, 39.97, 39.18, 39.31, 39.08, 39.46, 39.32, 39.13, 39.45, 39.15, 39.43] +707.905 +35.39525 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 46775, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.762548208236694, 'TIME_S_1KI': 0.2300918911434889, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2033.0924378275872, 'W': 149.27, 'J_1KI': 43.46536478519695, 'W_1KI': 3.1912346338856232, 'W_D': 113.87475, 'J_D': 1551.0008245763183, 'W_D_1KI': 2.434521646178514, 'J_D_1KI': 0.052047496444222636} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_1e-05.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_1e-05.json index 1e1cae0..efee96e 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_1e-05.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_1e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 126164, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.496079683303833, "TIME_S_1KI": 0.08319393553869434, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1339.4068178844452, "W": 103.41, "J_1KI": 10.616394675854009, "W_1KI": 0.8196474430106845, "W_D": 67.667, "J_D": 876.4494840517044, "W_D_1KI": 0.5363415871405472, "J_D_1KI": 0.0042511460253364455} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 128043, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.502545356750488, "TIME_S_1KI": 0.08202358080293722, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1345.7720102190972, "W": 103.43, "J_1KI": 10.510313021556017, "W_1KI": 0.8077755129136307, "W_D": 68.14325000000001, "J_D": 886.6409990850092, "W_D_1KI": 0.532190357926634, "J_D_1KI": 0.004156340900530557} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_1e-05.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_1e-05.output index f5dabe4..1baca85 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_1e-05.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_1e-05.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '50000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.13353276252746582} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.1170039176940918} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 24999, 25000, 25000]), - col_indices=tensor([10989, 5739, 28866, ..., 21823, 4005, 34886]), - values=tensor([0.4353, 0.4497, 0.0871, ..., 0.0925, 0.2903, 0.5435]), +tensor(crow_indices=tensor([ 0, 0, 2, ..., 25000, 25000, 25000]), + col_indices=tensor([20669, 48572, 15521, ..., 4942, 37440, 49163]), + values=tensor([0.4805, 0.0794, 0.3246, ..., 0.3038, 0.8605, 0.6038]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.7562, 0.8922, 0.4564, ..., 0.1486, 0.4797, 0.4813]) +tensor([0.4235, 0.9189, 0.0697, ..., 0.8234, 0.9093, 0.0251]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -15,18 +15,37 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 0.13353276252746582 seconds +Time: 0.1170039176940918 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '78632', '-ss', '50000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 6.544129848480225} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '89740', '-ss', '50000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 7.3589677810668945} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 24998, 24999, 25000]), + col_indices=tensor([42797, 39277, 20964, ..., 31232, 43143, 42518]), + values=tensor([0.7162, 0.4091, 0.9127, ..., 0.7828, 0.7816, 0.8353]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.2017, 0.4349, 0.5577, ..., 0.2868, 0.8229, 0.7966]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 7.3589677810668945 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '128043', '-ss', '50000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.502545356750488} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 24999, 24999, 25000]), - col_indices=tensor([34114, 35224, 10296, ..., 13464, 985, 3770]), - values=tensor([0.2384, 0.3975, 0.4000, ..., 0.4541, 0.7785, 0.5313]), + col_indices=tensor([18076, 25567, 40242, ..., 19386, 42443, 43843]), + values=tensor([0.1613, 0.4932, 0.9378, ..., 0.7394, 0.5576, 0.1832]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.2311, 0.0634, 0.6873, ..., 0.2883, 0.1765, 0.0650]) +tensor([0.1994, 0.9899, 0.9038, ..., 0.7869, 0.4416, 0.9952]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -34,18 +53,15 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 6.544129848480225 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '126164', '-ss', '50000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.496079683303833} +Time: 10.502545356750488 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 3, ..., 25000, 25000, 25000]), - col_indices=tensor([ 3707, 41195, 46820, ..., 24919, 16438, 24153]), - values=tensor([0.4077, 0.2091, 0.6369, ..., 0.9924, 0.1508, 0.5036]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 24999, 24999, 25000]), + col_indices=tensor([18076, 25567, 40242, ..., 19386, 42443, 43843]), + values=tensor([0.1613, 0.4932, 0.9378, ..., 0.7394, 0.5576, 0.1832]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.4831, 0.5861, 0.9166, ..., 0.7031, 0.1228, 0.1244]) +tensor([0.1994, 0.9899, 0.9038, ..., 0.7869, 0.4416, 0.9952]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -53,29 +69,13 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 10.496079683303833 seconds +Time: 10.502545356750488 seconds -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 3, ..., 25000, 25000, 25000]), - col_indices=tensor([ 3707, 41195, 46820, ..., 24919, 16438, 24153]), - values=tensor([0.4077, 0.2091, 0.6369, ..., 0.9924, 0.1508, 0.5036]), - size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.4831, 0.5861, 0.9166, ..., 0.7031, 0.1228, 0.1244]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 25000 -Density: 1e-05 -Time: 10.496079683303833 seconds - -[41.91, 39.37, 39.94, 39.02, 39.81, 39.1, 40.39, 38.98, 39.08, 39.0] -[103.41] -12.952391624450684 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 126164, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.496079683303833, 'TIME_S_1KI': 0.08319393553869434, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1339.4068178844452, 'W': 103.41} -[41.91, 39.37, 39.94, 39.02, 39.81, 39.1, 40.39, 38.98, 39.08, 39.0, 39.67, 39.09, 39.31, 39.83, 39.31, 39.25, 39.43, 38.89, 44.16, 39.22] -714.86 -35.743 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 126164, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.496079683303833, 'TIME_S_1KI': 0.08319393553869434, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1339.4068178844452, 'W': 103.41, 'J_1KI': 10.616394675854009, 'W_1KI': 0.8196474430106845, 'W_D': 67.667, 'J_D': 876.4494840517044, 'W_D_1KI': 0.5363415871405472, 'J_D_1KI': 0.0042511460253364455} +[40.3, 39.43, 39.29, 39.36, 38.95, 39.04, 39.78, 38.84, 39.12, 39.2] +[103.43] +13.011428117752075 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 128043, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.502545356750488, 'TIME_S_1KI': 0.08202358080293722, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1345.7720102190972, 'W': 103.43} +[40.3, 39.43, 39.29, 39.36, 38.95, 39.04, 39.78, 38.84, 39.12, 39.2, 40.2, 39.28, 39.13, 39.22, 38.89, 38.97, 38.85, 39.3, 39.03, 38.81] +705.7349999999999 +35.28675 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 128043, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.502545356750488, 'TIME_S_1KI': 0.08202358080293722, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1345.7720102190972, 'W': 103.43, 'J_1KI': 10.510313021556017, 'W_1KI': 0.8077755129136307, 'W_D': 68.14325000000001, 'J_D': 886.6409990850092, 'W_D_1KI': 0.532190357926634, 'J_D_1KI': 0.004156340900530557} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.0001.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.0001.json new file mode 100644 index 0000000..4891577 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 435807, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.511547565460205, "TIME_S_1KI": 0.024119730902578906, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1232.545183532238, "W": 96.09, "J_1KI": 2.828190422669296, "W_1KI": 0.2204875093791518, "W_D": 60.76, "J_D": 779.3677318286896, "W_D_1KI": 0.1394195136838096, "J_D_1KI": 0.0003199111388385446} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.0001.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.0001.output new file mode 100644 index 0000000..0251f49 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.0001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '5000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.040769100189208984} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 2499, 2500, 2500]), + col_indices=tensor([4125, 1116, 4300, ..., 690, 2880, 3382]), + values=tensor([0.0653, 0.6541, 0.1575, ..., 0.5764, 0.0907, 0.6553]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.7803, 0.2089, 0.7573, ..., 0.7596, 0.3125, 0.6078]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 0.040769100189208984 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '257547', '-ss', '5000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 6.205128908157349} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 2500, 2500, 2500]), + col_indices=tensor([1273, 2247, 4850, ..., 2520, 1394, 3793]), + values=tensor([0.8733, 0.7089, 0.0515, ..., 0.3445, 0.4099, 0.0495]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.0086, 0.7636, 0.4685, ..., 0.9955, 0.7657, 0.7966]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 6.205128908157349 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '435807', '-ss', '5000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.511547565460205} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 2499, 2499, 2500]), + col_indices=tensor([4068, 4690, 1058, ..., 2571, 4364, 3391]), + values=tensor([0.9209, 0.6933, 0.9201, ..., 0.0738, 0.0357, 0.7845]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.3918, 0.7384, 0.9927, ..., 0.9998, 0.6009, 0.1634]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.511547565460205 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 2499, 2499, 2500]), + col_indices=tensor([4068, 4690, 1058, ..., 2571, 4364, 3391]), + values=tensor([0.9209, 0.6933, 0.9201, ..., 0.0738, 0.0357, 0.7845]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.3918, 0.7384, 0.9927, ..., 0.9998, 0.6009, 0.1634]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.511547565460205 seconds + +[39.78, 39.82, 44.51, 38.6, 40.39, 38.56, 38.63, 38.62, 38.67, 38.55] +[96.09] +12.826987028121948 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 435807, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.511547565460205, 'TIME_S_1KI': 0.024119730902578906, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1232.545183532238, 'W': 96.09} +[39.78, 39.82, 44.51, 38.6, 40.39, 38.56, 38.63, 38.62, 38.67, 38.55, 39.31, 38.89, 38.76, 38.73, 39.09, 38.53, 38.74, 39.13, 38.76, 38.7] +706.6000000000001 +35.330000000000005 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 435807, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.511547565460205, 'TIME_S_1KI': 0.024119730902578906, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1232.545183532238, 'W': 96.09, 'J_1KI': 2.828190422669296, 'W_1KI': 0.2204875093791518, 'W_D': 60.76, 'J_D': 779.3677318286896, 'W_D_1KI': 0.1394195136838096, 'J_D_1KI': 0.0003199111388385446} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.001.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.001.json new file mode 100644 index 0000000..fe8abca --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 245735, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.373013734817505, "TIME_S_1KI": 0.042212194985726516, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1260.703432407379, "W": 98.11, "J_1KI": 5.130337283689255, "W_1KI": 0.39925122591409445, "W_D": 62.99425, "J_D": 809.4696483225822, "W_D_1KI": 0.2563503367448674, "J_D_1KI": 0.0010431983101506395} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.001.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.001.output new file mode 100644 index 0000000..79cf966 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '5000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.05801510810852051} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 14, ..., 24987, 24992, 25000]), + col_indices=tensor([2155, 3530, 3567, ..., 2695, 4305, 4878]), + values=tensor([0.7077, 0.9384, 0.0254, ..., 0.2116, 0.4863, 0.3277]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.5025, 0.8306, 0.5455, ..., 0.1180, 0.7485, 0.4884]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 0.05801510810852051 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '180987', '-ss', '5000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 7.7333667278289795} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 9, ..., 24993, 24997, 25000]), + col_indices=tensor([ 162, 480, 815, ..., 2232, 2732, 2847]), + values=tensor([0.8302, 0.2791, 0.7518, ..., 0.7674, 0.4968, 0.3066]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.5831, 0.9483, 0.7910, ..., 0.0226, 0.1378, 0.9053]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 7.7333667278289795 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '245735', '-ss', '5000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.373013734817505} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 13, ..., 24990, 24995, 25000]), + col_indices=tensor([1389, 1769, 1783, ..., 2323, 3077, 3881]), + values=tensor([0.3893, 0.4927, 0.3928, ..., 0.2440, 0.9871, 0.0384]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.3455, 0.7497, 0.7321, ..., 0.5403, 0.0178, 0.6295]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.373013734817505 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 13, ..., 24990, 24995, 25000]), + col_indices=tensor([1389, 1769, 1783, ..., 2323, 3077, 3881]), + values=tensor([0.3893, 0.4927, 0.3928, ..., 0.2440, 0.9871, 0.0384]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.3455, 0.7497, 0.7321, ..., 0.5403, 0.0178, 0.6295]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.373013734817505 seconds + +[40.07, 38.96, 38.85, 38.93, 38.85, 39.21, 39.52, 38.55, 38.74, 38.71] +[98.11] +12.849897384643555 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 245735, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.373013734817505, 'TIME_S_1KI': 0.042212194985726516, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1260.703432407379, 'W': 98.11} +[40.07, 38.96, 38.85, 38.93, 38.85, 39.21, 39.52, 38.55, 38.74, 38.71, 40.05, 38.72, 38.65, 39.11, 38.9, 39.03, 39.06, 38.96, 38.91, 39.9] +702.3149999999999 +35.11575 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 245735, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.373013734817505, 'TIME_S_1KI': 0.042212194985726516, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1260.703432407379, 'W': 98.11, 'J_1KI': 5.130337283689255, 'W_1KI': 0.39925122591409445, 'W_D': 62.99425, 'J_D': 809.4696483225822, 'W_D_1KI': 0.2563503367448674, 'J_D_1KI': 0.0010431983101506395} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.01.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.01.json new file mode 100644 index 0000000..65befab --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 145666, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.058181524276733, "TIME_S_1KI": 0.06904961709854553, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1397.6345458984374, "W": 116.8, "J_1KI": 9.594789078428992, "W_1KI": 0.80183433333791, "W_D": 81.52975, "J_D": 975.5889993019105, "W_D_1KI": 0.5597033624867849, "J_D_1KI": 0.0038423747647823438} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.01.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.01.output new file mode 100644 index 0000000..930f25a --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.01.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '5000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.10852384567260742} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 51, 93, ..., 249898, 249945, + 250000]), + col_indices=tensor([ 121, 263, 268, ..., 4347, 4657, 4780]), + values=tensor([0.9155, 0.4457, 0.5767, ..., 0.8561, 0.2482, 0.9078]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.9909, 0.5337, 0.2877, ..., 0.9413, 0.4687, 0.7116]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 0.10852384567260742 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '96752', '-ss', '5000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 6.974123954772949} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 54, 108, ..., 249899, 249958, + 250000]), + col_indices=tensor([ 30, 44, 230, ..., 4553, 4620, 4987]), + values=tensor([0.7207, 0.9659, 0.8009, ..., 0.1897, 0.2795, 0.9074]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.9275, 0.8053, 0.7107, ..., 0.1305, 0.9789, 0.9894]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 6.974123954772949 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '145666', '-ss', '5000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.058181524276733} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 49, 108, ..., 249901, 249948, + 250000]), + col_indices=tensor([ 207, 226, 430, ..., 4797, 4906, 4947]), + values=tensor([0.9242, 0.6665, 0.8223, ..., 0.0998, 0.8618, 0.4766]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.6439, 0.4458, 0.8465, ..., 0.5021, 0.5940, 0.7614]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.058181524276733 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 49, 108, ..., 249901, 249948, + 250000]), + col_indices=tensor([ 207, 226, 430, ..., 4797, 4906, 4947]), + values=tensor([0.9242, 0.6665, 0.8223, ..., 0.0998, 0.8618, 0.4766]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.6439, 0.4458, 0.8465, ..., 0.5021, 0.5940, 0.7614]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.058181524276733 seconds + +[39.46, 38.8, 39.74, 39.98, 38.95, 39.2, 39.78, 38.76, 39.24, 38.86] +[116.8] +11.966049194335938 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 145666, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.058181524276733, 'TIME_S_1KI': 0.06904961709854553, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1397.6345458984374, 'W': 116.8} +[39.46, 38.8, 39.74, 39.98, 38.95, 39.2, 39.78, 38.76, 39.24, 38.86, 39.39, 39.72, 39.13, 38.67, 39.47, 39.2, 39.27, 38.58, 38.78, 38.56] +705.405 +35.27025 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 145666, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.058181524276733, 'TIME_S_1KI': 0.06904961709854553, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1397.6345458984374, 'W': 116.8, 'J_1KI': 9.594789078428992, 'W_1KI': 0.80183433333791, 'W_D': 81.52975, 'J_D': 975.5889993019105, 'W_D_1KI': 0.5597033624867849, 'J_D_1KI': 0.0038423747647823438} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.05.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.05.json new file mode 100644 index 0000000..d2380a6 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 91710, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.455259799957275, "TIME_S_1KI": 0.11400348707836959, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1752.8075734996796, "W": 134.53, "J_1KI": 19.112502164427866, "W_1KI": 1.466906553265729, "W_D": 98.01950000000001, "J_D": 1277.107871483326, "W_D_1KI": 1.0687983862174244, "J_D_1KI": 0.011654109543315062} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.05.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.05.output new file mode 100644 index 0000000..16b0c97 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.05.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '5000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.15620112419128418} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 240, 508, ..., 1249498, + 1249743, 1250000]), + col_indices=tensor([ 1, 2, 46, ..., 4888, 4964, 4980]), + values=tensor([0.7368, 0.9867, 0.0616, ..., 0.4088, 0.7518, 0.0307]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.2511, 0.5490, 0.8698, ..., 0.0135, 0.9603, 0.4779]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 0.15620112419128418 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '67221', '-ss', '5000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 7.696179628372192} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 260, 523, ..., 1249493, + 1249731, 1250000]), + col_indices=tensor([ 11, 36, 51, ..., 4933, 4983, 4999]), + values=tensor([0.1688, 0.6439, 0.5409, ..., 0.9889, 0.0264, 0.5294]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.2628, 0.4260, 0.4558, ..., 0.6039, 0.8509, 0.7408]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 7.696179628372192 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '91710', '-ss', '5000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.455259799957275} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 271, 506, ..., 1249448, + 1249721, 1250000]), + col_indices=tensor([ 29, 30, 76, ..., 4981, 4997, 4999]), + values=tensor([0.0426, 0.5256, 0.4347, ..., 0.1903, 0.6901, 0.8658]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.0426, 0.2115, 0.6413, ..., 0.2013, 0.2155, 0.0145]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.455259799957275 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 271, 506, ..., 1249448, + 1249721, 1250000]), + col_indices=tensor([ 29, 30, 76, ..., 4981, 4997, 4999]), + values=tensor([0.0426, 0.5256, 0.4347, ..., 0.1903, 0.6901, 0.8658]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.0426, 0.2115, 0.6413, ..., 0.2013, 0.2155, 0.0145]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.455259799957275 seconds + +[40.22, 39.01, 39.13, 39.06, 38.89, 38.77, 38.89, 45.02, 39.49, 38.74] +[134.53] +13.029120445251465 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 91710, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.455259799957275, 'TIME_S_1KI': 0.11400348707836959, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1752.8075734996796, 'W': 134.53} +[40.22, 39.01, 39.13, 39.06, 38.89, 38.77, 38.89, 45.02, 39.49, 38.74, 39.45, 38.91, 38.91, 38.77, 45.05, 51.64, 42.38, 38.87, 38.81, 38.81] +730.2099999999999 +36.51049999999999 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 91710, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.455259799957275, 'TIME_S_1KI': 0.11400348707836959, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1752.8075734996796, 'W': 134.53, 'J_1KI': 19.112502164427866, 'W_1KI': 1.466906553265729, 'W_D': 98.01950000000001, 'J_D': 1277.107871483326, 'W_D_1KI': 1.0687983862174244, 'J_D_1KI': 0.011654109543315062} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.1.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.1.json new file mode 100644 index 0000000..074b9e9 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 53642, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.719228744506836, "TIME_S_1KI": 0.19982902845730652, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1911.0154657673836, "W": 139.47, "J_1KI": 35.62535822242615, "W_1KI": 2.600014913687036, "W_D": 104.0105, "J_D": 1425.1500258277654, "W_D_1KI": 1.9389750568584316, "J_D_1KI": 0.036146583961418885} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.1.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.1.output new file mode 100644 index 0000000..687e6a4 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.1.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '5000', '-sd', '0.1', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 0.2534494400024414} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 505, 999, ..., 2498983, + 2499471, 2500000]), + col_indices=tensor([ 5, 18, 40, ..., 4969, 4978, 4986]), + values=tensor([0.5163, 0.4412, 0.3185, ..., 0.4202, 0.6886, 0.7408]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.4458, 0.7168, 0.1041, ..., 0.9922, 0.1900, 0.6310]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 0.2534494400024414 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '41428', '-ss', '5000', '-sd', '0.1', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 8.109162092208862} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 495, 993, ..., 2498983, + 2499503, 2500000]), + col_indices=tensor([ 3, 19, 21, ..., 4946, 4955, 4989]), + values=tensor([0.8083, 0.9559, 0.4619, ..., 0.5259, 0.1142, 0.6698]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.9615, 0.1263, 0.8854, ..., 0.2773, 0.4703, 0.0965]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 8.109162092208862 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '53642', '-ss', '5000', '-sd', '0.1', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.719228744506836} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 514, 1060, ..., 2499048, + 2499512, 2500000]), + col_indices=tensor([ 10, 15, 21, ..., 4947, 4988, 4996]), + values=tensor([0.5424, 0.5712, 0.8006, ..., 0.9771, 0.7885, 0.2387]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.8250, 0.9268, 0.6213, ..., 0.2000, 0.5207, 0.9721]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.719228744506836 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 514, 1060, ..., 2499048, + 2499512, 2500000]), + col_indices=tensor([ 10, 15, 21, ..., 4947, 4988, 4996]), + values=tensor([0.5424, 0.5712, 0.8006, ..., 0.9771, 0.7885, 0.2387]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.8250, 0.9268, 0.6213, ..., 0.2000, 0.5207, 0.9721]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.719228744506836 seconds + +[40.33, 39.84, 39.58, 39.84, 39.14, 39.05, 39.58, 39.29, 39.44, 39.24] +[139.47] +13.701982259750366 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 53642, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.719228744506836, 'TIME_S_1KI': 0.19982902845730652, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1911.0154657673836, 'W': 139.47} +[40.33, 39.84, 39.58, 39.84, 39.14, 39.05, 39.58, 39.29, 39.44, 39.24, 39.75, 39.09, 39.67, 39.12, 39.04, 39.58, 39.1, 39.15, 39.53, 38.98] +709.19 +35.459500000000006 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 53642, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.719228744506836, 'TIME_S_1KI': 0.19982902845730652, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1911.0154657673836, 'W': 139.47, 'J_1KI': 35.62535822242615, 'W_1KI': 2.600014913687036, 'W_D': 104.0105, 'J_D': 1425.1500258277654, 'W_D_1KI': 1.9389750568584316, 'J_D_1KI': 0.036146583961418885} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_1e-05.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_1e-05.json new file mode 100644 index 0000000..f4429df --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 491380, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.363765478134155, "TIME_S_1KI": 0.021091142248634776, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1327.781383190155, "W": 95.12, "J_1KI": 2.7021477943549903, "W_1KI": 0.1935772721722496, "W_D": 60.282000000000004, "J_D": 841.4772638926506, "W_D_1KI": 0.12267898571370427, "J_D_1KI": 0.00024966214683891136} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_1e-05.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_1e-05.output new file mode 100644 index 0000000..5c32150 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_1e-05.output @@ -0,0 +1,464 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '5000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.0375218391418457} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([4395, 1896, 1016, 4725, 4287, 4964, 4709, 2475, 4723, + 2193, 4334, 4011, 1534, 947, 2980, 1276, 2745, 2145, + 4595, 3295, 1907, 4436, 575, 2869, 2437, 1774, 103, + 4181, 1510, 1361, 4237, 4144, 4620, 1378, 4923, 2023, + 170, 2835, 1311, 2663, 3014, 105, 2833, 4415, 2179, + 2930, 693, 1558, 1071, 3383, 2339, 3436, 478, 4648, + 3106, 1411, 4257, 307, 1671, 1884, 1213, 4984, 642, + 1762, 3957, 2642, 3601, 1788, 779, 952, 1165, 4886, + 1883, 1290, 3845, 617, 3725, 3513, 4081, 2223, 1340, + 3232, 3261, 4162, 1911, 3991, 2182, 2166, 4202, 2629, + 1539, 1110, 990, 1798, 3362, 92, 4378, 3447, 4318, + 1039, 2930, 1879, 4375, 2295, 3990, 746, 3339, 2924, + 1503, 2112, 2677, 3879, 2287, 1293, 3194, 3630, 2849, + 3363, 1715, 457, 1006, 888, 2409, 2177, 4389, 4129, + 1812, 2617, 4717, 2316, 4949, 4158, 4435, 1917, 1201, + 1815, 715, 270, 923, 1913, 3452, 2985, 4782, 1099, + 4541, 1002, 2896, 4712, 4267, 2282, 628, 3973, 2938, + 376, 3252, 94, 2656, 4853, 4987, 1689, 1656, 463, + 3165, 992, 2823, 3447, 1273, 2259, 3674, 3345, 2191, + 1553, 3931, 925, 4111, 4050, 2652, 4860, 4434, 4407, + 4679, 4167, 4708, 2520, 3526, 2887, 3132, 3816, 2503, + 1957, 3455, 1933, 2402, 1540, 2844, 1178, 2305, 1831, + 1888, 1548, 3851, 4681, 615, 1793, 720, 2902, 503, + 2399, 4452, 2482, 1672, 109, 4558, 522, 4488, 4193, + 4882, 4297, 3385, 3297, 4242, 2939, 945, 273, 1189, + 1168, 4866, 495, 4965, 2390, 1391, 2738, 4804, 1124, + 3476, 3768, 384, 2163, 1378, 1422, 3827, 12, 4549, + 4524, 1374, 4468, 1024, 3152, 985, 3013]), + values=tensor([0.3589, 0.1660, 0.5969, 0.5688, 0.7752, 0.6324, 0.0921, + 0.8083, 0.2140, 0.4448, 0.7196, 0.7942, 0.0476, 0.7765, + 0.8012, 0.3506, 0.5836, 0.4105, 0.9051, 0.1137, 0.9336, + 0.6799, 0.2082, 0.3357, 0.2380, 0.0294, 0.3136, 0.6271, + 0.0480, 0.8189, 0.3762, 0.4307, 0.7550, 0.2975, 0.8129, + 0.6595, 0.2962, 0.6547, 0.2906, 0.5665, 0.2166, 0.0083, + 0.8507, 0.4177, 0.3111, 0.7802, 0.8212, 0.9638, 0.3557, + 0.0980, 0.2482, 0.5366, 0.7901, 0.2480, 0.2830, 0.7633, + 0.5347, 0.7196, 0.5079, 0.6330, 0.0116, 0.5729, 0.0163, + 0.3271, 0.1166, 0.7494, 0.8340, 0.1356, 0.0263, 0.4976, + 0.5250, 0.2124, 0.2063, 0.9876, 0.3997, 0.7903, 0.7881, + 0.3414, 0.4348, 0.0748, 0.0069, 0.4733, 0.7388, 0.7424, + 0.3306, 0.5022, 0.7748, 0.6669, 0.3713, 0.6478, 0.6388, + 0.2317, 0.6064, 0.6536, 0.7202, 0.1361, 0.2493, 0.4139, + 0.3712, 0.5295, 0.2695, 0.1631, 0.6452, 0.1880, 0.6974, + 0.2683, 0.9017, 0.1561, 0.7046, 0.4239, 0.3874, 0.9700, + 0.0969, 0.1337, 0.7109, 0.6092, 0.5278, 0.2182, 0.9419, + 0.1230, 0.0570, 0.8053, 0.7324, 0.3831, 0.6385, 0.6323, + 0.1642, 0.2573, 0.2933, 0.0240, 0.6775, 0.2145, 0.7747, + 0.7540, 0.1746, 0.4005, 0.6380, 0.0383, 0.0075, 0.4765, + 0.6191, 0.5223, 0.3245, 0.6164, 0.9290, 0.7803, 0.7819, + 0.8932, 0.7100, 0.6960, 0.3784, 0.1869, 0.3217, 0.0764, + 0.2134, 0.0336, 0.4501, 0.8327, 0.9741, 0.2640, 0.8758, + 0.2835, 0.3411, 0.2947, 0.0888, 0.7701, 0.5229, 0.9266, + 0.0848, 0.6607, 0.1111, 0.4010, 0.5304, 0.7457, 0.6466, + 0.5183, 0.6236, 0.8001, 0.5880, 0.2006, 0.1409, 0.4395, + 0.5142, 0.7264, 0.5640, 0.9227, 0.8507, 0.0543, 0.7639, + 0.4626, 0.9840, 0.9821, 0.7239, 0.8139, 0.7906, 0.7453, + 0.9443, 0.9108, 0.4282, 0.6493, 0.3251, 0.2113, 0.5069, + 0.2668, 0.6773, 0.2164, 0.4803, 0.1428, 0.5884, 0.9624, + 0.2800, 0.1414, 0.8042, 0.8031, 0.1028, 0.1173, 0.0795, + 0.0760, 0.4125, 0.2705, 0.8781, 0.8291, 0.9000, 0.5426, + 0.0626, 0.4498, 0.1347, 0.0120, 0.0110, 0.1303, 0.5281, + 0.8963, 0.0447, 0.5862, 0.0936, 0.4003, 0.0188, 0.9347, + 0.9400, 0.0108, 0.8998, 0.5855, 0.1393, 0.5266, 0.4851, + 0.4774, 0.1186, 0.4945, 0.1561, 0.6695]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.1727, 0.0592, 0.5429, ..., 0.7822, 0.3152, 0.8983]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 0.0375218391418457 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '279837', '-ss', '5000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 6.313635349273682} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([1000, 776, 2038, 1918, 4703, 1006, 4783, 345, 4138, + 3890, 4809, 2179, 2654, 39, 3277, 4397, 222, 2644, + 2751, 2925, 4735, 3220, 3118, 2167, 634, 4745, 1720, + 1787, 1820, 1926, 473, 4992, 3200, 1675, 2855, 1802, + 1163, 3602, 1443, 3413, 1710, 3667, 710, 2344, 517, + 391, 713, 190, 1392, 2043, 4585, 625, 4376, 675, + 2895, 3693, 1220, 3427, 1249, 2791, 1410, 4832, 399, + 4671, 1556, 854, 3021, 1498, 4986, 3565, 408, 836, + 665, 2782, 351, 4429, 75, 2826, 2951, 2393, 4532, + 3245, 2288, 2902, 2022, 286, 18, 633, 1739, 1345, + 793, 3242, 1720, 741, 64, 3142, 2934, 4827, 3950, + 3991, 4310, 1432, 1925, 3941, 4723, 3084, 4330, 746, + 137, 294, 384, 3827, 745, 1424, 1461, 4954, 1830, + 44, 2741, 256, 4697, 1693, 4846, 724, 3317, 1459, + 3982, 3106, 1079, 1976, 1040, 1694, 3341, 1005, 3602, + 4265, 468, 3692, 3883, 2013, 2499, 2240, 197, 333, + 4890, 1103, 1367, 971, 9, 2617, 23, 2576, 3485, + 3475, 1094, 2503, 3587, 3228, 2141, 4874, 4780, 4139, + 1118, 4510, 2959, 1803, 1379, 4711, 1070, 1400, 798, + 3550, 1131, 4486, 1422, 239, 651, 4998, 1567, 1515, + 4080, 2218, 4949, 747, 3327, 4701, 733, 1212, 419, + 63, 3878, 2875, 4861, 4483, 644, 592, 3560, 3073, + 4937, 789, 208, 3509, 3077, 4039, 4563, 1839, 362, + 2824, 2672, 4373, 1492, 1152, 3845, 328, 2405, 4091, + 2931, 2541, 2530, 3217, 4233, 1852, 2606, 3892, 380, + 2119, 1221, 1290, 592, 3077, 4909, 282, 3215, 863, + 1452, 3214, 1592, 795, 193, 4254, 3986, 1847, 2461, + 4353, 1361, 3013, 1482, 4277, 3046, 277]), + values=tensor([0.1993, 0.0335, 0.2936, 0.2778, 0.6825, 0.6252, 0.3746, + 0.6011, 0.3211, 0.0488, 0.6153, 0.4477, 0.3116, 0.5339, + 0.8158, 0.9445, 0.2638, 0.2848, 0.0424, 0.7741, 0.0547, + 0.0033, 0.5605, 0.2034, 0.9731, 0.4334, 0.6773, 0.7018, + 0.1534, 0.3665, 0.8519, 0.3002, 0.6885, 0.4688, 0.2572, + 0.6610, 0.5022, 0.8309, 0.6908, 0.4905, 0.2911, 0.9203, + 0.1018, 0.0930, 0.0540, 0.4357, 0.3509, 0.7870, 0.0358, + 0.8075, 0.3342, 0.2290, 0.0496, 0.3593, 0.2995, 0.8746, + 0.4914, 0.4993, 0.3891, 0.1546, 0.8356, 0.6696, 0.4824, + 0.2231, 0.1034, 0.1057, 0.9353, 0.7565, 0.0205, 0.6134, + 0.2384, 0.3674, 0.3962, 0.9296, 0.6846, 0.4976, 0.1741, + 0.5769, 0.7161, 0.8852, 0.4021, 0.6679, 0.8123, 0.7585, + 0.4922, 0.4006, 0.7864, 0.5428, 0.2744, 0.6398, 0.5713, + 0.5059, 0.5864, 0.9374, 0.2614, 0.5042, 0.9384, 0.6001, + 0.6641, 0.9381, 0.7652, 0.8431, 0.3189, 0.3689, 0.1936, + 0.2802, 0.9156, 0.2338, 0.8578, 0.8112, 0.0258, 0.5958, + 0.3193, 0.8350, 0.4442, 0.6220, 0.0680, 0.3877, 0.0287, + 0.0452, 0.0470, 0.9809, 0.1556, 0.9905, 0.7569, 0.6043, + 0.3024, 0.2231, 0.5911, 0.7279, 0.1875, 0.4016, 0.8539, + 0.8317, 0.9058, 0.9818, 0.9295, 0.7640, 0.2727, 0.6203, + 0.1544, 0.4062, 0.9584, 0.7373, 0.5273, 0.9229, 0.0078, + 0.4057, 0.6887, 0.2597, 0.9070, 0.0464, 0.2160, 0.1271, + 0.9922, 0.5976, 0.8143, 0.2235, 0.4892, 0.2001, 0.4528, + 0.1225, 0.4565, 0.8621, 0.9634, 0.9838, 0.1175, 0.1191, + 0.3323, 0.5146, 0.3230, 0.2640, 0.7803, 0.1440, 0.3733, + 0.5784, 0.4250, 0.8408, 0.1600, 0.2238, 0.8622, 0.6312, + 0.1334, 0.8781, 0.5698, 0.6408, 0.9350, 0.2941, 0.4688, + 0.7220, 0.4646, 0.9861, 0.0500, 0.4193, 0.0556, 0.5709, + 0.7646, 0.4955, 0.8941, 0.2442, 0.8406, 0.6412, 0.9435, + 0.4433, 0.6774, 0.7909, 0.0668, 0.2898, 0.6302, 0.4354, + 0.5554, 0.1307, 0.3038, 0.5817, 0.3553, 0.0957, 0.1830, + 0.0409, 0.7005, 0.4236, 0.5500, 0.1534, 0.6689, 0.3917, + 0.6300, 0.3524, 0.5544, 0.7816, 0.9821, 0.6097, 0.7965, + 0.4709, 0.7898, 0.8168, 0.4400, 0.9718, 0.6481, 0.1531, + 0.2683, 0.6283, 0.0070, 0.5412, 0.3329, 0.0354, 0.8301, + 0.9730, 0.0239, 0.4507, 0.6650, 0.1805]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.7126, 0.1651, 0.2523, ..., 0.5242, 0.8574, 0.9519]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 6.313635349273682 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '465387', '-ss', '5000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.944559097290039} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([3699, 186, 998, 2786, 4646, 2125, 3758, 4753, 3164, + 2363, 875, 4485, 3467, 1146, 3713, 40, 3541, 4449, + 4355, 2987, 2483, 619, 973, 1036, 3097, 3292, 1211, + 2063, 4539, 4771, 731, 3646, 2815, 768, 3249, 2575, + 2960, 363, 3877, 2937, 63, 415, 487, 1370, 4864, + 2020, 2769, 2052, 1779, 3036, 1442, 3834, 769, 3436, + 2189, 3115, 684, 1261, 3554, 1491, 3600, 1655, 2428, + 3514, 792, 3919, 634, 3347, 2785, 3599, 785, 1340, + 4938, 4142, 605, 2442, 1836, 454, 2921, 2205, 4312, + 181, 1216, 3787, 581, 4008, 4443, 54, 218, 289, + 3888, 1643, 1803, 2145, 3434, 2861, 1581, 1923, 2613, + 1349, 2463, 1604, 2867, 1095, 4657, 931, 3380, 929, + 4999, 4668, 111, 3182, 838, 3317, 3428, 2012, 269, + 2099, 3052, 2433, 4600, 3901, 797, 3047, 2694, 392, + 612, 4059, 890, 2451, 1440, 3830, 4505, 1010, 683, + 4379, 1969, 1059, 4043, 1700, 4918, 4169, 4943, 4644, + 344, 3773, 2125, 3043, 2084, 4564, 3622, 4125, 3605, + 3620, 3969, 1469, 3232, 2350, 1746, 3483, 4665, 442, + 2281, 432, 3712, 4513, 1703, 4987, 1609, 4799, 4974, + 2930, 777, 1513, 2040, 3501, 924, 1312, 2761, 948, + 3882, 1800, 3270, 2810, 2360, 431, 325, 629, 2700, + 2385, 3741, 1991, 4920, 4732, 1712, 3784, 2538, 4236, + 4704, 1653, 472, 3253, 3463, 2914, 2140, 436, 935, + 765, 4469, 3079, 4283, 3904, 4286, 3503, 727, 4200, + 1701, 2666, 1961, 3779, 4941, 2916, 3776, 4130, 4512, + 1476, 2724, 2096, 1261, 329, 2574, 2829, 4425, 2766, + 1392, 2849, 4694, 1310, 3819, 2271, 220, 1555, 4415, + 4380, 4811, 1487, 4371, 1280, 1276, 2851]), + values=tensor([0.3089, 0.4336, 0.4888, 0.1926, 0.0728, 0.0243, 0.5274, + 0.6630, 0.9150, 0.7137, 0.4027, 0.4542, 0.9097, 0.1648, + 0.5277, 0.5028, 0.4187, 0.4809, 0.9495, 0.9227, 0.8070, + 0.4872, 0.3446, 0.8684, 0.3301, 0.9325, 0.3317, 0.0577, + 0.4077, 0.7212, 0.2245, 0.3196, 0.4084, 0.0026, 0.5069, + 0.0203, 0.9024, 0.9005, 0.2265, 0.0366, 0.5914, 0.1735, + 0.1170, 0.5798, 0.1354, 0.6739, 0.4242, 0.7100, 0.8828, + 0.2350, 0.1061, 0.7739, 0.9333, 0.1778, 0.6243, 0.7262, + 0.1337, 0.7381, 0.8993, 0.7142, 0.5462, 0.6796, 0.8532, + 0.3021, 0.1257, 0.1108, 0.2909, 0.1187, 0.8439, 0.5066, + 0.4898, 0.1147, 0.6201, 0.7106, 0.4508, 0.8557, 0.4904, + 0.5557, 0.3419, 0.5877, 0.9547, 0.2594, 0.1852, 0.0350, + 0.3573, 0.0073, 0.2921, 0.3868, 0.0717, 0.2638, 0.7715, + 0.2654, 0.7597, 0.8902, 0.4843, 0.0265, 0.2605, 0.7290, + 0.5883, 0.0284, 0.5260, 0.4294, 0.5088, 0.0923, 0.3560, + 0.9787, 0.3363, 0.6477, 0.5162, 0.2371, 0.5050, 0.3174, + 0.6755, 0.9371, 0.4029, 0.6291, 0.5378, 0.6016, 0.3741, + 0.4575, 0.7950, 0.1548, 0.4512, 0.4784, 0.3947, 0.6917, + 0.4337, 0.8695, 0.5511, 0.7730, 0.3604, 0.8313, 0.8321, + 0.1678, 0.2050, 0.7939, 0.9473, 0.7778, 0.3518, 0.5993, + 0.4048, 0.8949, 0.5428, 0.1845, 0.0665, 0.1550, 0.8858, + 0.8184, 0.3209, 0.1943, 0.3738, 0.7342, 0.8776, 0.4150, + 0.0843, 0.7937, 0.3737, 0.5068, 0.0092, 0.7933, 0.9316, + 0.9604, 0.9872, 0.9223, 0.4179, 0.0277, 0.0332, 0.3930, + 0.4059, 0.1792, 0.0113, 0.6697, 0.8110, 0.8809, 0.1653, + 0.5665, 0.2395, 0.2295, 0.0506, 0.8476, 0.6881, 0.7949, + 0.4503, 0.4586, 0.0727, 0.7405, 0.5349, 0.7008, 0.6280, + 0.8345, 0.3285, 0.7596, 0.7892, 0.6309, 0.7345, 0.5322, + 0.7826, 0.1455, 0.8185, 0.8804, 0.2134, 0.6699, 0.6927, + 0.7560, 0.1842, 0.8768, 0.4998, 0.8685, 0.7312, 0.6282, + 0.6567, 0.7052, 0.6029, 0.4550, 0.8792, 0.8789, 0.0886, + 0.4430, 0.3115, 0.8372, 0.3892, 0.9008, 0.8514, 0.6428, + 0.8764, 0.6919, 0.7104, 0.8790, 0.0593, 0.9565, 0.4781, + 0.3394, 0.5834, 0.8882, 0.5458, 0.1550, 0.9061, 0.0203, + 0.0355, 0.9846, 0.3746, 0.1614, 0.6948, 0.0117, 0.0137, + 0.0383, 0.7353, 0.3583, 0.0622, 0.0459]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.5720, 0.9223, 0.3340, ..., 0.6697, 0.2837, 0.3607]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 9.944559097290039 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '491380', '-ss', '5000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.363765478134155} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([1815, 573, 313, 4753, 4998, 3650, 708, 3756, 4632, + 3171, 1063, 2555, 1629, 3246, 3070, 2176, 3197, 2451, + 4183, 1276, 3956, 2941, 590, 310, 901, 582, 3400, + 3816, 854, 431, 4240, 4381, 4138, 3465, 1098, 4568, + 1899, 575, 4191, 2652, 1753, 2563, 2759, 4705, 3778, + 3664, 3914, 746, 477, 1085, 3621, 200, 1606, 1735, + 567, 4327, 4532, 959, 1950, 219, 1019, 4655, 3231, + 739, 2806, 3755, 920, 3178, 1203, 2773, 3742, 4216, + 3040, 3288, 1862, 3988, 3055, 2380, 386, 4811, 4992, + 2551, 454, 3476, 3586, 1425, 4793, 4634, 4563, 197, + 1634, 1276, 3200, 3036, 1449, 923, 3741, 1238, 917, + 13, 4497, 485, 2520, 1891, 1907, 2355, 3849, 1705, + 4617, 3918, 387, 152, 370, 3166, 1980, 3215, 2459, + 4636, 960, 2987, 498, 3413, 4946, 1982, 2382, 4484, + 67, 2842, 3291, 3435, 4345, 3653, 4720, 2468, 2052, + 1025, 1841, 1304, 2057, 4424, 4112, 134, 4127, 448, + 2737, 3483, 1455, 2363, 189, 1811, 740, 3821, 2568, + 4923, 4229, 447, 1138, 4148, 2122, 232, 3305, 3147, + 1717, 408, 644, 2055, 527, 3062, 248, 4109, 399, + 1356, 4770, 2528, 2684, 4997, 3795, 4694, 440, 3426, + 1710, 4340, 1612, 56, 646, 771, 1729, 765, 1920, + 4681, 3827, 3045, 4987, 598, 406, 2175, 1659, 4617, + 1246, 2976, 4027, 4995, 1783, 4600, 3838, 4759, 1930, + 3732, 234, 3852, 2906, 2962, 686, 832, 3809, 994, + 87, 19, 2535, 4315, 3169, 3549, 2170, 3920, 3910, + 2128, 3451, 3492, 42, 369, 863, 4827, 2245, 672, + 3029, 4444, 3612, 4409, 2915, 1931, 518, 3028, 4272, + 2556, 3052, 1905, 3640, 2925, 2354, 3707]), + values=tensor([1.9637e-01, 9.6917e-01, 6.9012e-01, 6.5144e-02, + 6.9969e-01, 6.0735e-01, 9.8413e-01, 5.5329e-01, + 4.9977e-01, 8.2849e-02, 6.0922e-01, 9.8307e-01, + 7.2683e-01, 6.2751e-01, 2.5140e-01, 6.5370e-01, + 9.8048e-01, 8.3008e-01, 9.4034e-01, 5.6135e-01, + 4.5053e-04, 8.4765e-01, 6.7162e-01, 6.6604e-01, + 7.6374e-01, 3.7730e-01, 7.9733e-01, 5.1905e-01, + 1.1698e-01, 6.2411e-01, 4.1882e-01, 9.2515e-01, + 7.1296e-01, 7.6621e-01, 9.1292e-01, 2.3384e-01, + 9.5049e-01, 2.9472e-01, 4.8881e-01, 7.8866e-01, + 3.0122e-01, 3.0501e-01, 9.5326e-02, 6.3170e-01, + 1.3931e-01, 8.2970e-01, 2.2371e-01, 7.9744e-01, + 4.4607e-01, 1.5447e-02, 1.0137e-01, 3.8368e-01, + 8.2513e-01, 8.9986e-01, 2.3061e-01, 9.8290e-01, + 4.3469e-01, 7.3495e-01, 1.5216e-01, 3.9507e-01, + 7.1334e-01, 7.7117e-01, 9.9550e-01, 9.2278e-01, + 3.0890e-01, 6.6914e-01, 1.2145e-01, 9.1632e-01, + 5.0784e-01, 6.2243e-01, 6.5077e-01, 6.2687e-01, + 2.0114e-01, 7.5097e-01, 2.0777e-01, 4.2757e-01, + 2.2520e-01, 5.5414e-01, 9.1256e-01, 1.3031e-01, + 1.5351e-01, 4.1244e-01, 2.4735e-01, 9.5465e-01, + 3.7976e-01, 3.1882e-01, 2.8598e-02, 8.3393e-01, + 7.4047e-01, 7.3298e-01, 9.7843e-01, 4.0729e-01, + 9.2998e-02, 4.3465e-01, 3.2636e-01, 9.5106e-02, + 4.8367e-02, 3.1339e-01, 4.7275e-01, 6.9317e-01, + 6.7922e-01, 7.2355e-01, 6.1366e-01, 7.6219e-01, + 2.1995e-01, 3.9216e-01, 8.5252e-01, 7.1761e-01, + 4.5198e-01, 9.8165e-01, 7.6941e-01, 8.2823e-01, + 7.6982e-01, 4.3963e-01, 2.2626e-01, 2.9003e-01, + 7.3718e-01, 8.0941e-01, 4.5213e-01, 1.9323e-01, + 3.6014e-01, 6.7950e-02, 2.6777e-01, 7.5770e-01, + 8.8988e-01, 1.1815e-01, 1.1244e-01, 9.2625e-01, + 7.6156e-01, 9.7142e-01, 2.3564e-01, 3.8882e-01, + 5.9567e-01, 4.8258e-01, 5.5462e-01, 2.7503e-01, + 2.0411e-01, 3.1168e-01, 7.6951e-01, 7.2732e-01, + 4.6023e-02, 4.7740e-01, 9.9557e-01, 7.3789e-02, + 6.2383e-02, 3.5543e-01, 1.8242e-01, 3.6846e-01, + 5.3628e-02, 5.3874e-01, 3.0038e-01, 9.6174e-01, + 9.6554e-01, 4.7430e-01, 2.2738e-01, 8.6557e-01, + 5.4122e-02, 8.5019e-01, 5.0852e-01, 5.3410e-01, + 1.7285e-01, 5.4149e-01, 8.0869e-01, 6.5103e-01, + 2.7217e-01, 7.0732e-01, 5.5532e-01, 9.9150e-01, + 7.5543e-01, 2.6834e-01, 2.8447e-01, 3.5912e-01, + 4.5601e-01, 7.0765e-01, 6.6949e-01, 5.9725e-01, + 4.8923e-01, 9.9235e-01, 7.6412e-02, 4.1164e-02, + 4.3938e-01, 9.1861e-01, 8.8739e-01, 9.4972e-01, + 4.9408e-01, 4.9347e-01, 3.4149e-01, 5.0322e-01, + 3.1901e-02, 5.2875e-01, 3.3499e-01, 9.5821e-01, + 5.2956e-01, 4.7216e-01, 2.0353e-01, 3.0726e-02, + 5.1848e-01, 2.6131e-01, 8.5289e-02, 4.9542e-01, + 1.5835e-01, 6.7945e-01, 7.8119e-01, 3.4856e-01, + 7.3888e-01, 4.3503e-01, 4.8394e-01, 1.0914e-01, + 5.9027e-01, 7.1288e-01, 9.8329e-01, 5.5542e-02, + 1.2536e-01, 1.9606e-01, 5.4455e-01, 4.3811e-01, + 5.8744e-01, 3.2588e-01, 6.3981e-02, 1.1337e-01, + 5.4324e-01, 8.4644e-01, 5.6165e-02, 5.0125e-01, + 1.5973e-01, 1.8614e-01, 7.8747e-01, 9.1964e-01, + 9.1086e-01, 5.6162e-01, 9.8390e-01, 1.9761e-01, + 4.5863e-01, 7.9353e-01, 3.8658e-02, 1.4135e-01, + 8.1843e-01, 3.0910e-01, 1.5630e-01, 6.8785e-01, + 4.2323e-01, 9.6230e-02, 7.4216e-01, 2.9855e-02, + 3.1890e-01, 2.8569e-01, 1.1579e-01, 7.3771e-01, + 8.3701e-01, 7.5848e-01]), size=(5000, 5000), nnz=250, + layout=torch.sparse_csr) +tensor([0.7970, 0.8043, 0.6125, ..., 0.7108, 0.2175, 0.0136]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.363765478134155 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([1815, 573, 313, 4753, 4998, 3650, 708, 3756, 4632, + 3171, 1063, 2555, 1629, 3246, 3070, 2176, 3197, 2451, + 4183, 1276, 3956, 2941, 590, 310, 901, 582, 3400, + 3816, 854, 431, 4240, 4381, 4138, 3465, 1098, 4568, + 1899, 575, 4191, 2652, 1753, 2563, 2759, 4705, 3778, + 3664, 3914, 746, 477, 1085, 3621, 200, 1606, 1735, + 567, 4327, 4532, 959, 1950, 219, 1019, 4655, 3231, + 739, 2806, 3755, 920, 3178, 1203, 2773, 3742, 4216, + 3040, 3288, 1862, 3988, 3055, 2380, 386, 4811, 4992, + 2551, 454, 3476, 3586, 1425, 4793, 4634, 4563, 197, + 1634, 1276, 3200, 3036, 1449, 923, 3741, 1238, 917, + 13, 4497, 485, 2520, 1891, 1907, 2355, 3849, 1705, + 4617, 3918, 387, 152, 370, 3166, 1980, 3215, 2459, + 4636, 960, 2987, 498, 3413, 4946, 1982, 2382, 4484, + 67, 2842, 3291, 3435, 4345, 3653, 4720, 2468, 2052, + 1025, 1841, 1304, 2057, 4424, 4112, 134, 4127, 448, + 2737, 3483, 1455, 2363, 189, 1811, 740, 3821, 2568, + 4923, 4229, 447, 1138, 4148, 2122, 232, 3305, 3147, + 1717, 408, 644, 2055, 527, 3062, 248, 4109, 399, + 1356, 4770, 2528, 2684, 4997, 3795, 4694, 440, 3426, + 1710, 4340, 1612, 56, 646, 771, 1729, 765, 1920, + 4681, 3827, 3045, 4987, 598, 406, 2175, 1659, 4617, + 1246, 2976, 4027, 4995, 1783, 4600, 3838, 4759, 1930, + 3732, 234, 3852, 2906, 2962, 686, 832, 3809, 994, + 87, 19, 2535, 4315, 3169, 3549, 2170, 3920, 3910, + 2128, 3451, 3492, 42, 369, 863, 4827, 2245, 672, + 3029, 4444, 3612, 4409, 2915, 1931, 518, 3028, 4272, + 2556, 3052, 1905, 3640, 2925, 2354, 3707]), + values=tensor([1.9637e-01, 9.6917e-01, 6.9012e-01, 6.5144e-02, + 6.9969e-01, 6.0735e-01, 9.8413e-01, 5.5329e-01, + 4.9977e-01, 8.2849e-02, 6.0922e-01, 9.8307e-01, + 7.2683e-01, 6.2751e-01, 2.5140e-01, 6.5370e-01, + 9.8048e-01, 8.3008e-01, 9.4034e-01, 5.6135e-01, + 4.5053e-04, 8.4765e-01, 6.7162e-01, 6.6604e-01, + 7.6374e-01, 3.7730e-01, 7.9733e-01, 5.1905e-01, + 1.1698e-01, 6.2411e-01, 4.1882e-01, 9.2515e-01, + 7.1296e-01, 7.6621e-01, 9.1292e-01, 2.3384e-01, + 9.5049e-01, 2.9472e-01, 4.8881e-01, 7.8866e-01, + 3.0122e-01, 3.0501e-01, 9.5326e-02, 6.3170e-01, + 1.3931e-01, 8.2970e-01, 2.2371e-01, 7.9744e-01, + 4.4607e-01, 1.5447e-02, 1.0137e-01, 3.8368e-01, + 8.2513e-01, 8.9986e-01, 2.3061e-01, 9.8290e-01, + 4.3469e-01, 7.3495e-01, 1.5216e-01, 3.9507e-01, + 7.1334e-01, 7.7117e-01, 9.9550e-01, 9.2278e-01, + 3.0890e-01, 6.6914e-01, 1.2145e-01, 9.1632e-01, + 5.0784e-01, 6.2243e-01, 6.5077e-01, 6.2687e-01, + 2.0114e-01, 7.5097e-01, 2.0777e-01, 4.2757e-01, + 2.2520e-01, 5.5414e-01, 9.1256e-01, 1.3031e-01, + 1.5351e-01, 4.1244e-01, 2.4735e-01, 9.5465e-01, + 3.7976e-01, 3.1882e-01, 2.8598e-02, 8.3393e-01, + 7.4047e-01, 7.3298e-01, 9.7843e-01, 4.0729e-01, + 9.2998e-02, 4.3465e-01, 3.2636e-01, 9.5106e-02, + 4.8367e-02, 3.1339e-01, 4.7275e-01, 6.9317e-01, + 6.7922e-01, 7.2355e-01, 6.1366e-01, 7.6219e-01, + 2.1995e-01, 3.9216e-01, 8.5252e-01, 7.1761e-01, + 4.5198e-01, 9.8165e-01, 7.6941e-01, 8.2823e-01, + 7.6982e-01, 4.3963e-01, 2.2626e-01, 2.9003e-01, + 7.3718e-01, 8.0941e-01, 4.5213e-01, 1.9323e-01, + 3.6014e-01, 6.7950e-02, 2.6777e-01, 7.5770e-01, + 8.8988e-01, 1.1815e-01, 1.1244e-01, 9.2625e-01, + 7.6156e-01, 9.7142e-01, 2.3564e-01, 3.8882e-01, + 5.9567e-01, 4.8258e-01, 5.5462e-01, 2.7503e-01, + 2.0411e-01, 3.1168e-01, 7.6951e-01, 7.2732e-01, + 4.6023e-02, 4.7740e-01, 9.9557e-01, 7.3789e-02, + 6.2383e-02, 3.5543e-01, 1.8242e-01, 3.6846e-01, + 5.3628e-02, 5.3874e-01, 3.0038e-01, 9.6174e-01, + 9.6554e-01, 4.7430e-01, 2.2738e-01, 8.6557e-01, + 5.4122e-02, 8.5019e-01, 5.0852e-01, 5.3410e-01, + 1.7285e-01, 5.4149e-01, 8.0869e-01, 6.5103e-01, + 2.7217e-01, 7.0732e-01, 5.5532e-01, 9.9150e-01, + 7.5543e-01, 2.6834e-01, 2.8447e-01, 3.5912e-01, + 4.5601e-01, 7.0765e-01, 6.6949e-01, 5.9725e-01, + 4.8923e-01, 9.9235e-01, 7.6412e-02, 4.1164e-02, + 4.3938e-01, 9.1861e-01, 8.8739e-01, 9.4972e-01, + 4.9408e-01, 4.9347e-01, 3.4149e-01, 5.0322e-01, + 3.1901e-02, 5.2875e-01, 3.3499e-01, 9.5821e-01, + 5.2956e-01, 4.7216e-01, 2.0353e-01, 3.0726e-02, + 5.1848e-01, 2.6131e-01, 8.5289e-02, 4.9542e-01, + 1.5835e-01, 6.7945e-01, 7.8119e-01, 3.4856e-01, + 7.3888e-01, 4.3503e-01, 4.8394e-01, 1.0914e-01, + 5.9027e-01, 7.1288e-01, 9.8329e-01, 5.5542e-02, + 1.2536e-01, 1.9606e-01, 5.4455e-01, 4.3811e-01, + 5.8744e-01, 3.2588e-01, 6.3981e-02, 1.1337e-01, + 5.4324e-01, 8.4644e-01, 5.6165e-02, 5.0125e-01, + 1.5973e-01, 1.8614e-01, 7.8747e-01, 9.1964e-01, + 9.1086e-01, 5.6162e-01, 9.8390e-01, 1.9761e-01, + 4.5863e-01, 7.9353e-01, 3.8658e-02, 1.4135e-01, + 8.1843e-01, 3.0910e-01, 1.5630e-01, 6.8785e-01, + 4.2323e-01, 9.6230e-02, 7.4216e-01, 2.9855e-02, + 3.1890e-01, 2.8569e-01, 1.1579e-01, 7.3771e-01, + 8.3701e-01, 7.5848e-01]), size=(5000, 5000), nnz=250, + layout=torch.sparse_csr) +tensor([0.7970, 0.8043, 0.6125, ..., 0.7108, 0.2175, 0.0136]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.363765478134155 seconds + +[39.35, 38.55, 38.58, 38.65, 38.55, 38.56, 38.58, 38.95, 38.97, 38.84] +[95.12] +13.95901370048523 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 491380, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.363765478134155, 'TIME_S_1KI': 0.021091142248634776, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1327.781383190155, 'W': 95.12} +[39.35, 38.55, 38.58, 38.65, 38.55, 38.56, 38.58, 38.95, 38.97, 38.84, 39.11, 38.56, 38.47, 38.42, 38.44, 38.49, 38.99, 38.76, 39.03, 39.12] +696.76 +34.838 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 491380, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.363765478134155, 'TIME_S_1KI': 0.021091142248634776, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1327.781383190155, 'W': 95.12, 'J_1KI': 2.7021477943549903, 'W_1KI': 0.1935772721722496, 'W_D': 60.282000000000004, 'J_D': 841.4772638926506, 'W_D_1KI': 0.12267898571370427, 'J_D_1KI': 0.00024966214683891136} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_20_10_10_synthetic_30000_0.0001.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_20_10_10_synthetic_30000_0.0001.json deleted file mode 100644 index 62f730b..0000000 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_20_10_10_synthetic_30000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 250038, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [30000, 30000], "MATRIX_ROWS": 30000, "MATRIX_SIZE": 900000000, "MATRIX_NNZ": 90000, "MATRIX_DENSITY": 0.0001, "TIME_S": 22.432795524597168, "TIME_S_1KI": 0.08971754503154387, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2606.0890457463265, "W": 109.03, "J_1KI": 10.422771921653215, "W_1KI": 0.4360537198345852, "W_D": 73.72525, "J_D": 1762.2174302477242, "W_D_1KI": 0.2948561818603573, "J_D_1KI": 0.0011792454821281456} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_20_10_10_synthetic_30000_0.0001.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_20_10_10_synthetic_30000_0.0001.output deleted file mode 100644 index 3701848..0000000 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_20_10_10_synthetic_30000_0.0001.output +++ /dev/null @@ -1,81 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '30000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [30000, 30000], "MATRIX_ROWS": 30000, "MATRIX_SIZE": 900000000, "MATRIX_NNZ": 90000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.10453343391418457} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 3, ..., 89993, 89997, 90000]), - col_indices=tensor([20651, 24290, 28771, ..., 10287, 15356, 24487]), - values=tensor([0.1253, 0.8320, 0.5079, ..., 0.2152, 0.2753, 0.6533]), - size=(30000, 30000), nnz=90000, layout=torch.sparse_csr) -tensor([0.9310, 0.8886, 0.9050, ..., 0.7990, 0.2751, 0.5722]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([30000, 30000]) -Rows: 30000 -Size: 900000000 -NNZ: 90000 -Density: 0.0001 -Time: 0.10453343391418457 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '200892', '-ss', '30000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [30000, 30000], "MATRIX_ROWS": 30000, "MATRIX_SIZE": 900000000, "MATRIX_NNZ": 90000, "MATRIX_DENSITY": 0.0001, "TIME_S": 16.872318267822266} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 5, ..., 89991, 89992, 90000]), - col_indices=tensor([ 9009, 16842, 24312, ..., 27764, 28622, 29005]), - values=tensor([0.8393, 0.9269, 0.8193, ..., 0.0379, 0.8842, 0.8625]), - size=(30000, 30000), nnz=90000, layout=torch.sparse_csr) -tensor([0.6604, 0.9619, 0.4104, ..., 0.2632, 0.2079, 0.2105]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([30000, 30000]) -Rows: 30000 -Size: 900000000 -NNZ: 90000 -Density: 0.0001 -Time: 16.872318267822266 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '250038', '-ss', '30000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [30000, 30000], "MATRIX_ROWS": 30000, "MATRIX_SIZE": 900000000, "MATRIX_NNZ": 90000, "MATRIX_DENSITY": 0.0001, "TIME_S": 22.432795524597168} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 6, ..., 89997, 89998, 90000]), - col_indices=tensor([12588, 20450, 20704, ..., 21668, 10676, 12342]), - values=tensor([0.6372, 0.0652, 0.9949, ..., 0.3492, 0.9239, 0.3604]), - size=(30000, 30000), nnz=90000, layout=torch.sparse_csr) -tensor([0.0127, 0.8502, 0.1682, ..., 0.0608, 0.3685, 0.2970]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([30000, 30000]) -Rows: 30000 -Size: 900000000 -NNZ: 90000 -Density: 0.0001 -Time: 22.432795524597168 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 6, ..., 89997, 89998, 90000]), - col_indices=tensor([12588, 20450, 20704, ..., 21668, 10676, 12342]), - values=tensor([0.6372, 0.0652, 0.9949, ..., 0.3492, 0.9239, 0.3604]), - size=(30000, 30000), nnz=90000, layout=torch.sparse_csr) -tensor([0.0127, 0.8502, 0.1682, ..., 0.0608, 0.3685, 0.2970]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([30000, 30000]) -Rows: 30000 -Size: 900000000 -NNZ: 90000 -Density: 0.0001 -Time: 22.432795524597168 seconds - -[40.69, 39.01, 39.44, 38.94, 38.95, 39.35, 38.98, 39.05, 38.97, 38.87] -[109.03] -23.90249514579773 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 250038, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [30000, 30000], 'MATRIX_ROWS': 30000, 'MATRIX_SIZE': 900000000, 'MATRIX_NNZ': 90000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 22.432795524597168, 'TIME_S_1KI': 0.08971754503154387, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2606.0890457463265, 'W': 109.03} -[40.69, 39.01, 39.44, 38.94, 38.95, 39.35, 38.98, 39.05, 38.97, 38.87, 40.47, 39.15, 39.52, 39.41, 39.16, 39.78, 39.02, 38.95, 38.92, 38.96] -706.095 -35.30475 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 250038, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [30000, 30000], 'MATRIX_ROWS': 30000, 'MATRIX_SIZE': 900000000, 'MATRIX_NNZ': 90000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 22.432795524597168, 'TIME_S_1KI': 0.08971754503154387, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2606.0890457463265, 'W': 109.03, 'J_1KI': 10.422771921653215, 'W_1KI': 0.4360537198345852, 'W_D': 73.72525, 'J_D': 1762.2174302477242, 'W_D_1KI': 0.2948561818603573, 'J_D_1KI': 0.0011792454821281456} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_20_10_10_synthetic_30000_0.001.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_20_10_10_synthetic_30000_0.001.json deleted file mode 100644 index e69de29..0000000 diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_20_10_10_synthetic_30000_0.001.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_20_10_10_synthetic_30000_0.001.output deleted file mode 100644 index 8658000..0000000 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_20_10_10_synthetic_30000_0.001.output +++ /dev/null @@ -1,21 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '30000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [30000, 30000], "MATRIX_ROWS": 30000, "MATRIX_SIZE": 900000000, "MATRIX_NNZ": 900000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.1439976692199707} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 29, 67, ..., 899936, 899964, - 900000]), - col_indices=tensor([ 58, 341, 3959, ..., 27670, 28034, 29816]), - values=tensor([0.8286, 0.0691, 0.1730, ..., 0.2645, 0.7295, 0.5386]), - size=(30000, 30000), nnz=900000, layout=torch.sparse_csr) -tensor([0.0558, 0.4553, 0.9674, ..., 0.2366, 0.6209, 0.6160]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([30000, 30000]) -Rows: 30000 -Size: 900000000 -NNZ: 900000 -Density: 0.001 -Time: 0.1439976692199707 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '145835', '-ss', '30000', '-sd', '0.001', '-c', '16'] diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_20_10_10_synthetic_30000_1e-05.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_20_10_10_synthetic_30000_1e-05.json deleted file mode 100644 index 9b8b9bd..0000000 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_20_10_10_synthetic_30000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 321850, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [30000, 30000], "MATRIX_ROWS": 30000, "MATRIX_SIZE": 900000000, "MATRIX_NNZ": 9000, "MATRIX_DENSITY": 1e-05, "TIME_S": 20.594725370407104, "TIME_S_1KI": 0.06398858278827747, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2327.1387594389917, "W": 101.41000000000001, "J_1KI": 7.230507253189348, "W_1KI": 0.3150846667702346, "W_D": 65.9145, "J_D": 1512.5942979887725, "W_D_1KI": 0.20479881932577287, "J_D_1KI": 0.0006363175992722476} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_20_10_10_synthetic_30000_1e-05.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_20_10_10_synthetic_30000_1e-05.output deleted file mode 100644 index e9d1ad8..0000000 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_20_10_10_synthetic_30000_1e-05.output +++ /dev/null @@ -1,81 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '30000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [30000, 30000], "MATRIX_ROWS": 30000, "MATRIX_SIZE": 900000000, "MATRIX_NNZ": 9000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.08333611488342285} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 9000, 9000, 9000]), - col_indices=tensor([13464, 15002, 12998, ..., 1674, 7890, 9839]), - values=tensor([0.3937, 0.5826, 0.6728, ..., 0.2443, 0.0810, 0.3168]), - size=(30000, 30000), nnz=9000, layout=torch.sparse_csr) -tensor([0.3767, 0.3322, 0.0921, ..., 0.4449, 0.8687, 0.6223]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([30000, 30000]) -Rows: 30000 -Size: 900000000 -NNZ: 9000 -Density: 1e-05 -Time: 0.08333611488342285 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '251991', '-ss', '30000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [30000, 30000], "MATRIX_ROWS": 30000, "MATRIX_SIZE": 900000000, "MATRIX_NNZ": 9000, "MATRIX_DENSITY": 1e-05, "TIME_S": 16.441835403442383} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 9000, 9000, 9000]), - col_indices=tensor([ 1592, 26221, 2007, ..., 5499, 7511, 18290]), - values=tensor([0.1009, 0.0773, 0.0762, ..., 0.6540, 0.2265, 0.9524]), - size=(30000, 30000), nnz=9000, layout=torch.sparse_csr) -tensor([0.5719, 0.1239, 0.1698, ..., 0.8424, 0.3509, 0.9636]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([30000, 30000]) -Rows: 30000 -Size: 900000000 -NNZ: 9000 -Density: 1e-05 -Time: 16.441835403442383 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '321850', '-ss', '30000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [30000, 30000], "MATRIX_ROWS": 30000, "MATRIX_SIZE": 900000000, "MATRIX_NNZ": 9000, "MATRIX_DENSITY": 1e-05, "TIME_S": 20.594725370407104} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 9000, 9000, 9000]), - col_indices=tensor([28655, 14046, 22660, ..., 19793, 14001, 26576]), - values=tensor([0.0604, 0.3035, 0.4856, ..., 0.8323, 0.7946, 0.0096]), - size=(30000, 30000), nnz=9000, layout=torch.sparse_csr) -tensor([0.2670, 0.6630, 0.3861, ..., 0.4215, 0.9031, 0.7574]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([30000, 30000]) -Rows: 30000 -Size: 900000000 -NNZ: 9000 -Density: 1e-05 -Time: 20.594725370407104 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 9000, 9000, 9000]), - col_indices=tensor([28655, 14046, 22660, ..., 19793, 14001, 26576]), - values=tensor([0.0604, 0.3035, 0.4856, ..., 0.8323, 0.7946, 0.0096]), - size=(30000, 30000), nnz=9000, layout=torch.sparse_csr) -tensor([0.2670, 0.6630, 0.3861, ..., 0.4215, 0.9031, 0.7574]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([30000, 30000]) -Rows: 30000 -Size: 900000000 -NNZ: 9000 -Density: 1e-05 -Time: 20.594725370407104 seconds - -[39.56, 39.04, 39.2, 38.58, 39.16, 39.36, 38.83, 40.63, 38.67, 39.16] -[101.41] -22.94782328605652 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 321850, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [30000, 30000], 'MATRIX_ROWS': 30000, 'MATRIX_SIZE': 900000000, 'MATRIX_NNZ': 9000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 20.594725370407104, 'TIME_S_1KI': 0.06398858278827747, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2327.1387594389917, 'W': 101.41000000000001} -[39.56, 39.04, 39.2, 38.58, 39.16, 39.36, 38.83, 40.63, 38.67, 39.16, 39.61, 44.11, 38.89, 39.27, 38.75, 38.81, 40.71, 38.62, 38.81, 38.61] -709.91 -35.4955 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 321850, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [30000, 30000], 'MATRIX_ROWS': 30000, 'MATRIX_SIZE': 900000000, 'MATRIX_NNZ': 9000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 20.594725370407104, 'TIME_S_1KI': 0.06398858278827747, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2327.1387594389917, 'W': 101.41000000000001, 'J_1KI': 7.230507253189348, 'W_1KI': 0.3150846667702346, 'W_D': 65.9145, 'J_D': 1512.5942979887725, 'W_D_1KI': 0.20479881932577287, 'J_D_1KI': 0.0006363175992722476} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.0001.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.0001.json index 092cf4b..174f326 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.0001.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.0001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 33012, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.519692420959473, "TIME_S_1KI": 0.318662680872394, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1224.664799463749, "W": 88.39, "J_1KI": 37.09756450574788, "W_1KI": 2.677511208045559, "W_D": 72.108, "J_D": 999.0737567567826, "W_D_1KI": 2.184296619411123, "J_D_1KI": 0.06616674601390778} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 32214, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.624319791793823, "TIME_S_1KI": 0.3298044263920601, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1308.5074545145035, "W": 90.38, "J_1KI": 40.619216940290045, "W_1KI": 2.8056124666294155, "W_D": 74.09325, "J_D": 1072.7104442819953, "W_D_1KI": 2.30003259452412, "J_D_1KI": 0.07139854083703111} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.0001.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.0001.output index d5ef17d..48c062d 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.0001.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.0001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '100000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.3180568218231201} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.3259446620941162} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 17, ..., 999979, +tensor(crow_indices=tensor([ 0, 11, 22, ..., 999982, 999991, 1000000]), - col_indices=tensor([10691, 12782, 14246, ..., 70658, 88202, 93324]), - values=tensor([0.3844, 0.6658, 0.7124, ..., 0.3153, 0.8920, 0.6509]), + col_indices=tensor([10285, 14477, 16251, ..., 79839, 98536, 99886]), + values=tensor([0.0755, 0.8469, 0.4749, ..., 0.2250, 0.2555, 0.2499]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.9202, 0.9151, 0.8232, ..., 0.5628, 0.6151, 0.8368]) +tensor([0.5289, 0.3805, 0.4649, ..., 0.7570, 0.9550, 0.1372]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 0.3180568218231201 seconds +Time: 0.3259446620941162 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '33012', '-ss', '100000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.519692420959473} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '32214', '-ss', '100000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.624319791793823} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 15, ..., 999984, - 999990, 1000000]), - col_indices=tensor([ 7405, 49048, 69982, ..., 87685, 98650, 99933]), - values=tensor([0.6053, 0.2022, 0.4562, ..., 0.3977, 0.5709, 0.7435]), +tensor(crow_indices=tensor([ 0, 9, 15, ..., 999974, + 999991, 1000000]), + col_indices=tensor([ 27, 9769, 50112, ..., 53126, 61224, 82066]), + values=tensor([0.2467, 0.4042, 0.1080, ..., 0.3359, 0.4921, 0.7955]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.0490, 0.1129, 0.5767, ..., 0.3037, 0.9982, 0.0194]) +tensor([0.8754, 0.9877, 0.9510, ..., 0.4555, 0.1143, 0.3690]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,16 +36,16 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 10.519692420959473 seconds +Time: 10.624319791793823 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 15, ..., 999984, - 999990, 1000000]), - col_indices=tensor([ 7405, 49048, 69982, ..., 87685, 98650, 99933]), - values=tensor([0.6053, 0.2022, 0.4562, ..., 0.3977, 0.5709, 0.7435]), +tensor(crow_indices=tensor([ 0, 9, 15, ..., 999974, + 999991, 1000000]), + col_indices=tensor([ 27, 9769, 50112, ..., 53126, 61224, 82066]), + values=tensor([0.2467, 0.4042, 0.1080, ..., 0.3359, 0.4921, 0.7955]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.0490, 0.1129, 0.5767, ..., 0.3037, 0.9982, 0.0194]) +tensor([0.8754, 0.9877, 0.9510, ..., 0.4555, 0.1143, 0.3690]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -53,13 +53,13 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 10.519692420959473 seconds +Time: 10.624319791793823 seconds -[18.25, 17.99, 18.13, 17.81, 18.01, 17.82, 18.18, 18.25, 17.98, 18.75] -[88.39] -13.855241537094116 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 33012, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.519692420959473, 'TIME_S_1KI': 0.318662680872394, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1224.664799463749, 'W': 88.39} -[18.25, 17.99, 18.13, 17.81, 18.01, 17.82, 18.18, 18.25, 17.98, 18.75, 18.55, 17.94, 18.05, 17.81, 18.35, 17.79, 18.28, 18.36, 18.2, 17.83] -325.64 -16.282 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 33012, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.519692420959473, 'TIME_S_1KI': 0.318662680872394, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1224.664799463749, 'W': 88.39, 'J_1KI': 37.09756450574788, 'W_1KI': 2.677511208045559, 'W_D': 72.108, 'J_D': 999.0737567567826, 'W_D_1KI': 2.184296619411123, 'J_D_1KI': 0.06616674601390778} +[18.22, 17.55, 18.1, 17.52, 17.64, 17.6, 18.51, 17.5, 17.7, 17.75] +[90.38] +14.477843046188354 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 32214, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.624319791793823, 'TIME_S_1KI': 0.3298044263920601, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1308.5074545145035, 'W': 90.38} +[18.22, 17.55, 18.1, 17.52, 17.64, 17.6, 18.51, 17.5, 17.7, 17.75, 18.28, 17.75, 17.59, 17.7, 17.88, 17.82, 18.16, 18.33, 22.4, 17.72] +325.735 +16.28675 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 32214, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.624319791793823, 'TIME_S_1KI': 0.3298044263920601, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1308.5074545145035, 'W': 90.38, 'J_1KI': 40.619216940290045, 'W_1KI': 2.8056124666294155, 'W_D': 74.09325, 'J_D': 1072.7104442819953, 'W_D_1KI': 2.30003259452412, 'J_D_1KI': 0.07139854083703111} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.001.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.001.json new file mode 100644 index 0000000..be7a2cb --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 2697, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.332640647888184, "TIME_S_1KI": 3.8311607889833827, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1417.5436856460572, "W": 83.44, "J_1KI": 525.6001800689867, "W_1KI": 30.938079347423063, "W_D": 67.3325, "J_D": 1143.8969344890118, "W_D_1KI": 24.965702632554688, "J_D_1KI": 9.25684191047634} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.001.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.001.output new file mode 100644 index 0000000..d59582d --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.001.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '100000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 3.892810106277466} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 92, 187, ..., 9999795, + 9999888, 10000000]), + col_indices=tensor([ 1843, 1850, 4412, ..., 98725, 98752, 98846]), + values=tensor([0.9343, 0.4740, 0.0577, ..., 0.9099, 0.1721, 0.4592]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.5874, 0.0844, 0.8298, ..., 0.9009, 0.0712, 0.0168]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 3.892810106277466 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2697', '-ss', '100000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.332640647888184} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 99, 214, ..., 9999796, + 9999890, 10000000]), + col_indices=tensor([ 133, 206, 762, ..., 95508, 95519, 98505]), + values=tensor([0.7799, 0.5247, 0.9444, ..., 0.2262, 0.0403, 0.9029]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.3536, 0.8501, 0.0907, ..., 0.0431, 0.6064, 0.5575]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 10.332640647888184 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 99, 214, ..., 9999796, + 9999890, 10000000]), + col_indices=tensor([ 133, 206, 762, ..., 95508, 95519, 98505]), + values=tensor([0.7799, 0.5247, 0.9444, ..., 0.2262, 0.0403, 0.9029]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.3536, 0.8501, 0.0907, ..., 0.0431, 0.6064, 0.5575]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 10.332640647888184 seconds + +[18.26, 17.98, 17.82, 17.72, 17.76, 17.93, 17.75, 17.95, 17.83, 18.05] +[83.44] +16.988778591156006 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2697, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.332640647888184, 'TIME_S_1KI': 3.8311607889833827, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1417.5436856460572, 'W': 83.44} +[18.26, 17.98, 17.82, 17.72, 17.76, 17.93, 17.75, 17.95, 17.83, 18.05, 18.55, 18.9, 17.56, 17.66, 17.87, 17.73, 17.82, 17.88, 17.78, 17.56] +322.15 +16.107499999999998 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2697, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.332640647888184, 'TIME_S_1KI': 3.8311607889833827, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1417.5436856460572, 'W': 83.44, 'J_1KI': 525.6001800689867, 'W_1KI': 30.938079347423063, 'W_D': 67.3325, 'J_D': 1143.8969344890118, 'W_D_1KI': 24.965702632554688, 'J_D_1KI': 9.25684191047634} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_1e-05.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_1e-05.json index d7ba913..60a31fb 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_1e-05.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_1e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 64591, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.66994047164917, "TIME_S_1KI": 0.16519237156336286, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1159.8280617713929, "W": 82.8, "J_1KI": 17.9564964433341, "W_1KI": 1.2819123407285846, "W_D": 66.57124999999999, "J_D": 932.5024620434641, "W_D_1KI": 1.0306582960474369, "J_D_1KI": 0.015956685854800777} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 63032, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.23182988166809, "TIME_S_1KI": 0.1623275460348409, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1227.4584797358514, "W": 89.17, "J_1KI": 19.473576591824017, "W_1KI": 1.4146782586622668, "W_D": 73.15375, "J_D": 1006.9887940111756, "W_D_1KI": 1.1605811333925626, "J_D_1KI": 0.018412570335584508} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_1e-05.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_1e-05.output index ad36eba..14be47f 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_1e-05.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_1e-05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '100000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.17906904220581055} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.17937397956848145} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 99999, 99999, +tensor(crow_indices=tensor([ 0, 0, 4, ..., 99996, 100000, 100000]), - col_indices=tensor([85471, 5444, 13434, ..., 17615, 87992, 83918]), - values=tensor([0.7119, 0.1219, 0.2242, ..., 0.7199, 0.3920, 0.9751]), + col_indices=tensor([ 6463, 19403, 32975, ..., 50312, 73566, 75866]), + values=tensor([0.6504, 0.4570, 0.8704, ..., 0.7277, 0.1675, 0.6048]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.8861, 0.1716, 0.8373, ..., 0.2826, 0.6276, 0.0027]) +tensor([0.7096, 0.4020, 0.6001, ..., 0.3911, 0.2531, 0.2591]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 0.17906904220581055 seconds +Time: 0.17937397956848145 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '58636', '-ss', '100000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.531909704208374} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '58536', '-ss', '100000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.751020431518555} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 99999, 100000, +tensor(crow_indices=tensor([ 0, 1, 2, ..., 99998, 99998, 100000]), - col_indices=tensor([28875, 86601, 1118, ..., 53659, 98581, 89346]), - values=tensor([0.0170, 0.0837, 0.6677, ..., 0.0775, 0.7543, 0.4196]), + col_indices=tensor([64186, 21974, 57698, ..., 75952, 18460, 38945]), + values=tensor([0.5668, 0.1226, 0.0967, ..., 0.2541, 0.6343, 0.4356]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.4702, 0.4277, 0.7376, ..., 0.9470, 0.3873, 0.6416]) +tensor([0.9872, 0.9595, 0.0420, ..., 0.0153, 0.9518, 0.5571]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,19 +36,19 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 9.531909704208374 seconds +Time: 9.751020431518555 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '64591', '-ss', '100000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.66994047164917} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '63032', '-ss', '100000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.23182988166809} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 3, ..., 100000, 100000, +tensor(crow_indices=tensor([ 0, 1, 2, ..., 99998, 100000, 100000]), - col_indices=tensor([32373, 45973, 94969, ..., 5823, 12968, 35562]), - values=tensor([0.6698, 0.7885, 0.1863, ..., 0.4943, 0.2796, 0.7613]), + col_indices=tensor([35835, 88904, 80345, ..., 79801, 8127, 81515]), + values=tensor([0.8153, 0.8474, 0.9328, ..., 0.8046, 0.4857, 0.5161]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.4737, 0.5533, 0.8139, ..., 0.3662, 0.3156, 0.7007]) +tensor([0.1493, 0.1613, 0.9905, ..., 0.3209, 0.7704, 0.3686]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -56,16 +56,16 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 10.66994047164917 seconds +Time: 10.23182988166809 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 3, ..., 100000, 100000, +tensor(crow_indices=tensor([ 0, 1, 2, ..., 99998, 100000, 100000]), - col_indices=tensor([32373, 45973, 94969, ..., 5823, 12968, 35562]), - values=tensor([0.6698, 0.7885, 0.1863, ..., 0.4943, 0.2796, 0.7613]), + col_indices=tensor([35835, 88904, 80345, ..., 79801, 8127, 81515]), + values=tensor([0.8153, 0.8474, 0.9328, ..., 0.8046, 0.4857, 0.5161]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.4737, 0.5533, 0.8139, ..., 0.3662, 0.3156, 0.7007]) +tensor([0.1493, 0.1613, 0.9905, ..., 0.3209, 0.7704, 0.3686]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -73,13 +73,13 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 10.66994047164917 seconds +Time: 10.23182988166809 seconds -[18.59, 17.9, 18.34, 17.96, 18.14, 17.94, 18.32, 17.79, 17.82, 17.71] -[82.8] -14.007585287094116 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 64591, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.66994047164917, 'TIME_S_1KI': 0.16519237156336286, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1159.8280617713929, 'W': 82.8} -[18.59, 17.9, 18.34, 17.96, 18.14, 17.94, 18.32, 17.79, 17.82, 17.71, 18.29, 17.85, 18.6, 17.8, 18.13, 17.74, 17.99, 17.83, 18.16, 17.94] -324.57500000000005 -16.22875 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 64591, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.66994047164917, 'TIME_S_1KI': 0.16519237156336286, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1159.8280617713929, 'W': 82.8, 'J_1KI': 17.9564964433341, 'W_1KI': 1.2819123407285846, 'W_D': 66.57124999999999, 'J_D': 932.5024620434641, 'W_D_1KI': 1.0306582960474369, 'J_D_1KI': 0.015956685854800777} +[18.1, 17.83, 17.83, 17.64, 17.89, 17.76, 17.87, 17.83, 17.97, 17.52] +[89.17] +13.765374898910522 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 63032, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.23182988166809, 'TIME_S_1KI': 0.1623275460348409, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1227.4584797358514, 'W': 89.17} +[18.1, 17.83, 17.83, 17.64, 17.89, 17.76, 17.87, 17.83, 17.97, 17.52, 18.59, 17.72, 17.71, 17.51, 17.74, 17.62, 17.64, 18.03, 17.89, 17.48] +320.32500000000005 +16.016250000000003 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 63032, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.23182988166809, 'TIME_S_1KI': 0.1623275460348409, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1227.4584797358514, 'W': 89.17, 'J_1KI': 19.473576591824017, 'W_1KI': 1.4146782586622668, 'W_D': 73.15375, 'J_D': 1006.9887940111756, 'W_D_1KI': 1.1605811333925626, 'J_D_1KI': 0.018412570335584508} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.0001.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.0001.json index c1f8ab6..05a227c 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.0001.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.0001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 250193, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.988550901412964, "TIME_S_1KI": 0.043920297136262665, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1067.7657339859009, "W": 74.48, "J_1KI": 4.267768218878628, "W_1KI": 0.29769018317858614, "W_D": 58.048, "J_D": 832.1920693664551, "W_D_1KI": 0.23201288605196788, "J_D_1KI": 0.0009273356410929478} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 253876, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.530851364135742, "TIME_S_1KI": 0.041480294963429955, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1171.4377240467072, "W": 80.68, "J_1KI": 4.614212150997759, "W_1KI": 0.317792938284832, "W_D": 64.302, "J_D": 933.636446847439, "W_D_1KI": 0.2532811293702437, "J_D_1KI": 0.0009976568457445514} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.0001.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.0001.output index 15dcb35..04a16aa 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.0001.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.0001.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.06029987335205078} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.05977439880371094} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 9998, 9999, 10000]), - col_indices=tensor([9584, 2249, 9621, ..., 267, 2843, 1232]), - values=tensor([0.1887, 0.8280, 0.8733, ..., 0.6422, 0.8241, 0.9503]), +tensor(crow_indices=tensor([ 0, 0, 2, ..., 9999, 9999, 10000]), + col_indices=tensor([6615, 8991, 2810, ..., 6295, 8510, 7610]), + values=tensor([0.3885, 0.8426, 0.7862, ..., 0.5955, 0.1672, 0.2063]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.2203, 0.8610, 0.9153, ..., 0.2931, 0.9983, 0.3156]) +tensor([0.1595, 0.0624, 0.6993, ..., 0.5987, 0.7271, 0.9533]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -15,18 +15,18 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 0.06029987335205078 seconds +Time: 0.05977439880371094 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '174129', '-ss', '10000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 7.307769536972046} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '175660', '-ss', '10000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 7.265056371688843} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 10000, 10000, 10000]), - col_indices=tensor([5050, 9096, 467, ..., 6460, 6547, 2963]), - values=tensor([0.3312, 0.9984, 0.8182, ..., 0.5509, 0.3722, 0.7285]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 9997, 9998, 10000]), + col_indices=tensor([6157, 6465, 6955, ..., 9189, 5553, 9168]), + values=tensor([0.9492, 0.4977, 0.7776, ..., 0.2833, 0.2034, 0.6430]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.0543, 0.3720, 0.3677, ..., 0.5280, 0.6433, 0.3148]) +tensor([0.2429, 0.7570, 0.9101, ..., 0.6676, 0.5300, 0.9328]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -34,18 +34,18 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 7.307769536972046 seconds +Time: 7.265056371688843 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '250193', '-ss', '10000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.988550901412964} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '253876', '-ss', '10000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.530851364135742} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 2, ..., 9997, 9999, 10000]), - col_indices=tensor([4233, 4275, 7541, ..., 2248, 7833, 717]), - values=tensor([0.0347, 0.7995, 0.4404, ..., 0.0217, 0.2651, 0.9390]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 9998, 9999, 10000]), + col_indices=tensor([ 868, 4014, 6169, ..., 4688, 7367, 6538]), + values=tensor([0.9131, 0.0133, 0.5134, ..., 0.5757, 0.9187, 0.1463]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.4739, 0.4789, 0.6628, ..., 0.7267, 0.9323, 0.5704]) +tensor([0.7710, 0.0750, 0.1717, ..., 0.8123, 0.4992, 0.1144]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,15 +53,15 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 10.988550901412964 seconds +Time: 10.530851364135742 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 2, ..., 9997, 9999, 10000]), - col_indices=tensor([4233, 4275, 7541, ..., 2248, 7833, 717]), - values=tensor([0.0347, 0.7995, 0.4404, ..., 0.0217, 0.2651, 0.9390]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 9998, 9999, 10000]), + col_indices=tensor([ 868, 4014, 6169, ..., 4688, 7367, 6538]), + values=tensor([0.9131, 0.0133, 0.5134, ..., 0.5757, 0.9187, 0.1463]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.4739, 0.4789, 0.6628, ..., 0.7267, 0.9323, 0.5704]) +tensor([0.7710, 0.0750, 0.1717, ..., 0.8123, 0.4992, 0.1144]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -69,13 +69,13 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 10.988550901412964 seconds +Time: 10.530851364135742 seconds -[18.5, 18.04, 18.08, 20.55, 18.03, 18.27, 18.34, 17.92, 18.14, 18.0] -[74.48] -14.33627462387085 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 250193, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.988550901412964, 'TIME_S_1KI': 0.043920297136262665, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1067.7657339859009, 'W': 74.48} -[18.5, 18.04, 18.08, 20.55, 18.03, 18.27, 18.34, 17.92, 18.14, 18.0, 18.31, 18.29, 18.5, 18.09, 18.0, 17.95, 17.89, 18.08, 18.14, 17.85] -328.64 -16.432 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 250193, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.988550901412964, 'TIME_S_1KI': 0.043920297136262665, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1067.7657339859009, 'W': 74.48, 'J_1KI': 4.267768218878628, 'W_1KI': 0.29769018317858614, 'W_D': 58.048, 'J_D': 832.1920693664551, 'W_D_1KI': 0.23201288605196788, 'J_D_1KI': 0.0009273356410929478} +[18.32, 17.72, 17.73, 18.28, 18.02, 18.18, 17.93, 18.12, 17.89, 21.22] +[80.68] +14.51955533027649 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 253876, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.530851364135742, 'TIME_S_1KI': 0.041480294963429955, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1171.4377240467072, 'W': 80.68} +[18.32, 17.72, 17.73, 18.28, 18.02, 18.18, 17.93, 18.12, 17.89, 21.22, 18.16, 17.68, 17.59, 17.54, 18.03, 22.12, 17.56, 17.65, 17.87, 17.6] +327.56000000000006 +16.378000000000004 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 253876, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.530851364135742, 'TIME_S_1KI': 0.041480294963429955, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1171.4377240467072, 'W': 80.68, 'J_1KI': 4.614212150997759, 'W_1KI': 0.317792938284832, 'W_D': 64.302, 'J_D': 933.636446847439, 'W_D_1KI': 0.2532811293702437, 'J_D_1KI': 0.0009976568457445514} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.001.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.001.json index dab286f..fdd9d92 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.001.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 186516, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.228839635848999, "TIME_S_1KI": 0.054841620214078145, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1075.814607758522, "W": 79.58, "J_1KI": 5.767948099672533, "W_1KI": 0.4266658088314139, "W_D": 63.054, "J_D": 852.4053063282967, "W_D_1KI": 0.338062150164061, "J_D_1KI": 0.0018125101876732344} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 195071, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.382015228271484, "TIME_S_1KI": 0.05322172556798029, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1196.0013425803186, "W": 86.31, "J_1KI": 6.131107866265712, "W_1KI": 0.4424542858753992, "W_D": 70.10050000000001, "J_D": 971.3856113492252, "W_D_1KI": 0.35935890009278676, "J_D_1KI": 0.0018421954062509895} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.001.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.001.output index fad6e92..5d9701a 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.001.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.0709388256072998} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.06870174407958984} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 17, ..., 99974, 99988, +tensor(crow_indices=tensor([ 0, 10, 25, ..., 99981, 99995, 100000]), - col_indices=tensor([1106, 1398, 2518, ..., 6886, 7547, 8173]), - values=tensor([0.5902, 0.0057, 0.8492, ..., 0.2608, 0.7269, 0.6940]), + col_indices=tensor([ 3, 150, 370, ..., 2691, 9535, 9749]), + values=tensor([0.2561, 0.9230, 0.8831, ..., 0.2203, 0.7623, 0.4185]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.8144, 0.0674, 0.1585, ..., 0.0850, 0.2846, 0.5370]) +tensor([0.1427, 0.1860, 0.4972, ..., 0.5058, 0.8744, 0.6551]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 0.0709388256072998 seconds +Time: 0.06870174407958984 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '148014', '-ss', '10000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 8.332475900650024} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '152834', '-ss', '10000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 8.226486682891846} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 11, 17, ..., 99965, 99978, +tensor(crow_indices=tensor([ 0, 9, 17, ..., 99982, 99990, 100000]), - col_indices=tensor([ 77, 628, 3642, ..., 8176, 8481, 9600]), - values=tensor([0.7580, 0.3721, 0.0885, ..., 0.9345, 0.1388, 0.5730]), + col_indices=tensor([ 560, 3215, 3961, ..., 6911, 7414, 7504]), + values=tensor([0.0904, 0.0706, 0.8224, ..., 0.0963, 0.3127, 0.0052]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.9678, 0.5744, 0.4262, ..., 0.2115, 0.3242, 0.5272]) +tensor([0.8141, 0.4563, 0.6350, ..., 0.0924, 0.8861, 0.1694]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,19 +36,19 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 8.332475900650024 seconds +Time: 8.226486682891846 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '186516', '-ss', '10000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.228839635848999} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '195071', '-ss', '10000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.382015228271484} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 15, 30, ..., 99982, 99990, +tensor(crow_indices=tensor([ 0, 4, 10, ..., 99988, 99994, 100000]), - col_indices=tensor([ 298, 367, 1190, ..., 3689, 6850, 7173]), - values=tensor([0.7086, 0.6908, 0.8648, ..., 0.4576, 0.3199, 0.8368]), + col_indices=tensor([1742, 3653, 4110, ..., 7414, 9186, 9217]), + values=tensor([0.4393, 0.0633, 0.6988, ..., 0.9636, 0.3600, 0.6461]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.7366, 0.0593, 0.8663, ..., 0.2557, 0.4256, 0.5242]) +tensor([0.5362, 0.5145, 0.3988, ..., 0.1543, 0.7121, 0.2032]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -56,16 +56,16 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 10.228839635848999 seconds +Time: 10.382015228271484 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 15, 30, ..., 99982, 99990, +tensor(crow_indices=tensor([ 0, 4, 10, ..., 99988, 99994, 100000]), - col_indices=tensor([ 298, 367, 1190, ..., 3689, 6850, 7173]), - values=tensor([0.7086, 0.6908, 0.8648, ..., 0.4576, 0.3199, 0.8368]), + col_indices=tensor([1742, 3653, 4110, ..., 7414, 9186, 9217]), + values=tensor([0.4393, 0.0633, 0.6988, ..., 0.9636, 0.3600, 0.6461]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.7366, 0.0593, 0.8663, ..., 0.2557, 0.4256, 0.5242]) +tensor([0.5362, 0.5145, 0.3988, ..., 0.1543, 0.7121, 0.2032]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -73,13 +73,13 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 10.228839635848999 seconds +Time: 10.382015228271484 seconds -[18.35, 18.1, 18.16, 18.05, 17.94, 18.34, 18.01, 17.89, 17.93, 17.71] -[79.58] -13.51865553855896 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 186516, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.228839635848999, 'TIME_S_1KI': 0.054841620214078145, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1075.814607758522, 'W': 79.58} -[18.35, 18.1, 18.16, 18.05, 17.94, 18.34, 18.01, 17.89, 17.93, 17.71, 19.07, 18.27, 18.23, 17.94, 18.27, 18.14, 21.36, 18.53, 18.63, 18.33] -330.52 -16.526 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 186516, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.228839635848999, 'TIME_S_1KI': 0.054841620214078145, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1075.814607758522, 'W': 79.58, 'J_1KI': 5.767948099672533, 'W_1KI': 0.4266658088314139, 'W_D': 63.054, 'J_D': 852.4053063282967, 'W_D_1KI': 0.338062150164061, 'J_D_1KI': 0.0018125101876732344} +[18.32, 18.17, 18.0, 18.26, 17.83, 17.86, 19.18, 17.87, 17.68, 18.06] +[86.31] +13.85704255104065 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 195071, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.382015228271484, 'TIME_S_1KI': 0.05322172556798029, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1196.0013425803186, 'W': 86.31} +[18.32, 18.17, 18.0, 18.26, 17.83, 17.86, 19.18, 17.87, 17.68, 18.06, 18.29, 17.98, 17.81, 17.75, 18.0, 18.2, 17.75, 17.66, 17.87, 17.97] +324.18999999999994 +16.2095 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 195071, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.382015228271484, 'TIME_S_1KI': 0.05322172556798029, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1196.0013425803186, 'W': 86.31, 'J_1KI': 6.131107866265712, 'W_1KI': 0.4424542858753992, 'W_D': 70.10050000000001, 'J_D': 971.3856113492252, 'W_D_1KI': 0.35935890009278676, 'J_D_1KI': 0.0018421954062509895} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.01.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.01.json index 27eac6d..5933eb4 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.01.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.01.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 57497, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.399010181427002, "TIME_S_1KI": 0.18086178724849997, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1216.1904487371446, "W": 87.17000000000002, "J_1KI": 21.15224183413299, "W_1KI": 1.5160790997791191, "W_D": 70.89300000000001, "J_D": 989.0947514319422, "W_D_1KI": 1.2329860688383745, "J_D_1KI": 0.021444354815701245} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 53507, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.015070676803589, "TIME_S_1KI": 0.18717309280661576, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1202.573525466919, "W": 88.64, "J_1KI": 22.47506915855718, "W_1KI": 1.6566056777617881, "W_D": 72.22725, "J_D": 979.9027376723885, "W_D_1KI": 1.3498654381669688, "J_D_1KI": 0.02522782884794455} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.01.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.01.output index 98e81fe..d725fe1 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.01.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.01.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '0.01', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.1964414119720459} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.19623374938964844} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 102, 210, ..., 999804, - 999909, 1000000]), - col_indices=tensor([ 4, 297, 328, ..., 9417, 9717, 9744]), - values=tensor([0.3827, 0.2830, 0.2497, ..., 0.1291, 0.2102, 0.5312]), +tensor(crow_indices=tensor([ 0, 102, 197, ..., 999814, + 999918, 1000000]), + col_indices=tensor([ 21, 221, 266, ..., 9711, 9962, 9983]), + values=tensor([0.8240, 0.1342, 0.9347, ..., 0.9531, 0.8710, 0.7315]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.7948, 0.9855, 0.6473, ..., 0.4205, 0.5296, 0.9253]) +tensor([0.2953, 0.0740, 0.7231, ..., 0.2507, 0.0704, 0.5422]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 0.1964414119720459 seconds +Time: 0.19623374938964844 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '53451', '-ss', '10000', '-sd', '0.01', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 9.761078357696533} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '53507', '-ss', '10000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.015070676803589} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 112, 220, ..., 999796, - 999898, 1000000]), - col_indices=tensor([ 465, 658, 715, ..., 9500, 9653, 9927]), - values=tensor([0.9513, 0.9158, 0.4499, ..., 0.0775, 0.2496, 0.9759]), +tensor(crow_indices=tensor([ 0, 117, 202, ..., 999813, + 999911, 1000000]), + col_indices=tensor([ 101, 231, 245, ..., 9677, 9872, 9873]), + values=tensor([0.6066, 0.1771, 0.9671, ..., 0.7083, 0.4630, 0.7862]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.5799, 0.5098, 0.6156, ..., 0.8166, 0.2331, 0.2979]) +tensor([0.1666, 0.0462, 0.0015, ..., 0.3047, 0.2438, 0.6174]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,19 +36,16 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 9.761078357696533 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '57497', '-ss', '10000', '-sd', '0.01', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.399010181427002} +Time: 10.015070676803589 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 104, 198, ..., 999802, - 999905, 1000000]), - col_indices=tensor([ 124, 157, 187, ..., 9539, 9601, 9680]), - values=tensor([0.6532, 0.0603, 0.0418, ..., 0.1935, 0.1125, 0.4778]), +tensor(crow_indices=tensor([ 0, 117, 202, ..., 999813, + 999911, 1000000]), + col_indices=tensor([ 101, 231, 245, ..., 9677, 9872, 9873]), + values=tensor([0.6066, 0.1771, 0.9671, ..., 0.7083, 0.4630, 0.7862]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.5307, 0.8097, 0.3092, ..., 0.4937, 0.1856, 0.7516]) +tensor([0.1666, 0.0462, 0.0015, ..., 0.3047, 0.2438, 0.6174]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -56,30 +53,13 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 10.399010181427002 seconds +Time: 10.015070676803589 seconds -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 104, 198, ..., 999802, - 999905, 1000000]), - col_indices=tensor([ 124, 157, 187, ..., 9539, 9601, 9680]), - values=tensor([0.6532, 0.0603, 0.0418, ..., 0.1935, 0.1125, 0.4778]), - size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.5307, 0.8097, 0.3092, ..., 0.4937, 0.1856, 0.7516]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 1000000 -Density: 0.01 -Time: 10.399010181427002 seconds - -[18.23, 18.13, 17.96, 18.03, 17.94, 17.9, 17.91, 17.8, 18.15, 18.93] -[87.17] -13.951938152313232 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 57497, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.399010181427002, 'TIME_S_1KI': 0.18086178724849997, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1216.1904487371446, 'W': 87.17000000000002} -[18.23, 18.13, 17.96, 18.03, 17.94, 17.9, 17.91, 17.8, 18.15, 18.93, 18.34, 17.87, 18.21, 18.15, 18.42, 17.87, 18.22, 18.25, 18.03, 17.9] -325.54 -16.277 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 57497, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.399010181427002, 'TIME_S_1KI': 0.18086178724849997, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1216.1904487371446, 'W': 87.17000000000002, 'J_1KI': 21.15224183413299, 'W_1KI': 1.5160790997791191, 'W_D': 70.89300000000001, 'J_D': 989.0947514319422, 'W_D_1KI': 1.2329860688383745, 'J_D_1KI': 0.021444354815701245} +[18.69, 17.91, 17.85, 17.85, 22.65, 17.8, 18.01, 17.72, 17.94, 17.78] +[88.64] +13.56693959236145 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 53507, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.015070676803589, 'TIME_S_1KI': 0.18717309280661576, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1202.573525466919, 'W': 88.64} +[18.69, 17.91, 17.85, 17.85, 22.65, 17.8, 18.01, 17.72, 17.94, 17.78, 20.07, 19.06, 18.03, 17.54, 17.79, 18.03, 17.73, 17.66, 17.6, 17.63] +328.255 +16.41275 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 53507, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.015070676803589, 'TIME_S_1KI': 0.18717309280661576, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1202.573525466919, 'W': 88.64, 'J_1KI': 22.47506915855718, 'W_1KI': 1.6566056777617881, 'W_D': 72.22725, 'J_D': 979.9027376723885, 'W_D_1KI': 1.3498654381669688, 'J_D_1KI': 0.02522782884794455} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.05.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.05.json index ae32267..31ef3d6 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.05.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 9007, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.744792222976685, "TIME_S_1KI": 1.192937961915919, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1359.414791688919, "W": 84.86, "J_1KI": 150.92869897734195, "W_1KI": 9.421561008104806, "W_D": 68.55725000000001, "J_D": 1098.2528838971855, "W_D_1KI": 7.611552126124127, "J_D_1KI": 0.8450707367740786} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 8765, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.565605640411377, "TIME_S_1KI": 1.2054313337605678, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1349.8730677318572, "W": 85.96, "J_1KI": 154.0071954058023, "W_1KI": 9.807187678265828, "W_D": 69.70649999999999, "J_D": 1094.636191203475, "W_D_1KI": 7.952823730747289, "J_D_1KI": 0.9073387028804666} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.05.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.05.output index c9ec716..34eb983 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.05.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '0.05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 1.1656646728515625} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 1.1979267597198486} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 506, 991, ..., 4998989, - 4999492, 5000000]), - col_indices=tensor([ 25, 30, 53, ..., 9970, 9993, 9995]), - values=tensor([0.0157, 0.5603, 0.3033, ..., 0.4419, 0.2413, 0.9606]), +tensor(crow_indices=tensor([ 0, 503, 997, ..., 4999030, + 4999508, 5000000]), + col_indices=tensor([ 13, 17, 19, ..., 9920, 9929, 9953]), + values=tensor([0.9385, 0.6026, 0.1531, ..., 0.7529, 0.2170, 0.3875]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.4291, 0.9468, 0.9558, ..., 0.3375, 0.0455, 0.9666]) +tensor([0.6172, 0.1221, 0.7807, ..., 0.3915, 0.5006, 0.2223]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 1.1656646728515625 seconds +Time: 1.1979267597198486 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '9007', '-ss', '10000', '-sd', '0.05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.744792222976685} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '8765', '-ss', '10000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.565605640411377} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 491, 1013, ..., 4998981, - 4999517, 5000000]), - col_indices=tensor([ 61, 62, 77, ..., 9979, 9982, 9988]), - values=tensor([0.6511, 0.9070, 0.7175, ..., 0.4257, 0.4784, 0.0096]), +tensor(crow_indices=tensor([ 0, 511, 969, ..., 4998985, + 4999485, 5000000]), + col_indices=tensor([ 18, 30, 44, ..., 9958, 9974, 9994]), + values=tensor([0.9183, 0.2043, 0.3929, ..., 0.1798, 0.2421, 0.5984]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.7046, 0.2172, 0.5779, ..., 0.4690, 0.0165, 0.6122]) +tensor([0.9280, 0.7586, 0.0981, ..., 0.8069, 0.8205, 0.0580]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,16 +36,16 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 10.744792222976685 seconds +Time: 10.565605640411377 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 491, 1013, ..., 4998981, - 4999517, 5000000]), - col_indices=tensor([ 61, 62, 77, ..., 9979, 9982, 9988]), - values=tensor([0.6511, 0.9070, 0.7175, ..., 0.4257, 0.4784, 0.0096]), +tensor(crow_indices=tensor([ 0, 511, 969, ..., 4998985, + 4999485, 5000000]), + col_indices=tensor([ 18, 30, 44, ..., 9958, 9974, 9994]), + values=tensor([0.9183, 0.2043, 0.3929, ..., 0.1798, 0.2421, 0.5984]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.7046, 0.2172, 0.5779, ..., 0.4690, 0.0165, 0.6122]) +tensor([0.9280, 0.7586, 0.0981, ..., 0.8069, 0.8205, 0.0580]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,13 +53,13 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 10.744792222976685 seconds +Time: 10.565605640411377 seconds -[18.37, 18.68, 18.13, 17.9, 18.06, 18.22, 18.04, 18.49, 17.9, 18.1] -[84.86] -16.019500255584717 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 9007, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.744792222976685, 'TIME_S_1KI': 1.192937961915919, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1359.414791688919, 'W': 84.86} -[18.37, 18.68, 18.13, 17.9, 18.06, 18.22, 18.04, 18.49, 17.9, 18.1, 18.25, 17.97, 18.05, 17.84, 17.92, 18.2, 17.96, 17.89, 18.19, 18.51] -326.05499999999995 -16.302749999999996 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 9007, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.744792222976685, 'TIME_S_1KI': 1.192937961915919, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1359.414791688919, 'W': 84.86, 'J_1KI': 150.92869897734195, 'W_1KI': 9.421561008104806, 'W_D': 68.55725000000001, 'J_D': 1098.2528838971855, 'W_D_1KI': 7.611552126124127, 'J_D_1KI': 0.8450707367740786} +[18.27, 17.87, 17.87, 17.65, 18.34, 18.87, 17.99, 17.76, 18.35, 17.83] +[85.96] +15.703502416610718 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8765, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.565605640411377, 'TIME_S_1KI': 1.2054313337605678, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1349.8730677318572, 'W': 85.96} +[18.27, 17.87, 17.87, 17.65, 18.34, 18.87, 17.99, 17.76, 18.35, 17.83, 18.43, 17.64, 18.08, 18.62, 18.12, 17.89, 17.87, 17.65, 18.18, 18.11] +325.07 +16.2535 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8765, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.565605640411377, 'TIME_S_1KI': 1.2054313337605678, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1349.8730677318572, 'W': 85.96, 'J_1KI': 154.0071954058023, 'W_1KI': 9.807187678265828, 'W_D': 69.70649999999999, 'J_D': 1094.636191203475, 'W_D_1KI': 7.952823730747289, 'J_D_1KI': 0.9073387028804666} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.1.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.1.json new file mode 100644 index 0000000..916740d --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 2843, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.2704017162323, "TIME_S_1KI": 3.6125225874893774, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1409.3875760412218, "W": 82.18, "J_1KI": 495.73956244854793, "W_1KI": 28.90608512135069, "W_D": 66.09875000000001, "J_D": 1133.5940258196, "W_D_1KI": 23.249648258881464, "J_D_1KI": 8.177857284165128} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.1.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.1.output new file mode 100644 index 0000000..f77d6ca --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.1.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '0.1', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 3.6929545402526855} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 984, 2024, ..., 9998063, + 9998995, 10000000]), + col_indices=tensor([ 8, 13, 17, ..., 9976, 9985, 9991]), + values=tensor([0.9364, 0.6574, 0.1385, ..., 0.6834, 0.0920, 0.4928]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.2645, 0.6514, 0.1258, ..., 0.8959, 0.1836, 0.1827]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 3.6929545402526855 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2843', '-ss', '10000', '-sd', '0.1', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.2704017162323} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 979, 2005, ..., 9997991, + 9998986, 10000000]), + col_indices=tensor([ 33, 43, 63, ..., 9975, 9988, 9994]), + values=tensor([0.7459, 0.7397, 0.9950, ..., 0.6626, 0.6614, 0.8057]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.0776, 0.4434, 0.3294, ..., 0.9636, 0.8443, 0.5700]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 10.2704017162323 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 979, 2005, ..., 9997991, + 9998986, 10000000]), + col_indices=tensor([ 33, 43, 63, ..., 9975, 9988, 9994]), + values=tensor([0.7459, 0.7397, 0.9950, ..., 0.6626, 0.6614, 0.8057]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.0776, 0.4434, 0.3294, ..., 0.9636, 0.8443, 0.5700]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 10.2704017162323 seconds + +[18.3, 17.96, 17.98, 17.52, 17.65, 17.88, 17.7, 17.55, 17.72, 17.81] +[82.18] +17.150007009506226 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2843, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.2704017162323, 'TIME_S_1KI': 3.6125225874893774, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1409.3875760412218, 'W': 82.18} +[18.3, 17.96, 17.98, 17.52, 17.65, 17.88, 17.7, 17.55, 17.72, 17.81, 18.29, 17.77, 17.64, 17.69, 18.52, 18.77, 17.66, 17.68, 17.94, 17.59] +321.625 +16.08125 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2843, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.2704017162323, 'TIME_S_1KI': 3.6125225874893774, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1409.3875760412218, 'W': 82.18, 'J_1KI': 495.73956244854793, 'W_1KI': 28.90608512135069, 'W_D': 66.09875000000001, 'J_D': 1133.5940258196, 'W_D_1KI': 23.249648258881464, 'J_D_1KI': 8.177857284165128} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_1e-05.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_1e-05.json index 3d91d0f..a7f50f8 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_1e-05.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_1e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 279705, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.192691802978516, "TIME_S_1KI": 0.03644086377783206, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1002.2923643112182, "W": 73.24, "J_1KI": 3.5833909451429835, "W_1KI": 0.2618473034089487, "W_D": 56.983999999999995, "J_D": 779.8283463668822, "W_D_1KI": 0.20372892869272982, "J_D_1KI": 0.0007283707073263969} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 286739, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.441989183425903, "TIME_S_1KI": 0.036416354885194915, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1103.374533557892, "W": 79.9, "J_1KI": 3.8480099796605693, "W_1KI": 0.27865061955297327, "W_D": 63.605500000000006, "J_D": 878.3565568737985, "W_D_1KI": 0.22182367937392544, "J_D_1KI": 0.0007736083315277149} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_1e-05.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_1e-05.output index d0774e1..03249c3 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_1e-05.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_1e-05.output @@ -1,373 +1,373 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.05549430847167969} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.05496048927307129} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([3370, 8033, 9994, 2466, 6901, 8760, 7929, 6009, 6694, - 5159, 1569, 4321, 2884, 3593, 7087, 277, 9865, 708, - 219, 1957, 2394, 9752, 9341, 4634, 7106, 8670, 5732, - 279, 8294, 2115, 4730, 6630, 1087, 3467, 99, 364, - 8115, 4267, 1834, 5621, 1569, 7117, 9388, 1669, 5931, - 9264, 3811, 5721, 3727, 135, 5730, 2995, 3406, 9737, - 8203, 4619, 3682, 7347, 200, 8973, 7753, 580, 2253, - 5338, 9810, 8027, 181, 7440, 8883, 5987, 8971, 592, - 4310, 5459, 5555, 5982, 2912, 5657, 5155, 5158, 2575, - 4534, 5426, 285, 2313, 564, 416, 9640, 2595, 4194, - 651, 1798, 5551, 7426, 7258, 3436, 2400, 6562, 5104, - 7010, 536, 2620, 9757, 68, 4487, 1288, 1752, 3582, - 4320, 2874, 3544, 5364, 8870, 570, 876, 9095, 9069, - 7054, 4172, 1984, 9030, 5728, 1404, 5844, 3846, 641, - 8291, 9336, 3061, 3478, 6348, 6870, 170, 9681, 4066, - 161, 4454, 1223, 5179, 2812, 3556, 5193, 2967, 5978, - 997, 659, 500, 7339, 9371, 8915, 9451, 2693, 6998, - 4462, 9723, 8519, 528, 8686, 4869, 1134, 5722, 3684, - 5254, 2008, 3928, 3524, 7467, 5611, 9303, 1848, 7513, - 144, 6865, 9315, 260, 6542, 8384, 7775, 9566, 1870, - 5432, 4007, 6545, 8496, 7112, 7663, 8511, 3654, 8360, - 7458, 7244, 9547, 237, 7595, 3067, 3897, 1148, 6044, - 9905, 3252, 3717, 2097, 8397, 5162, 880, 3643, 3669, - 3006, 4680, 1145, 5124, 2016, 4795, 1874, 2179, 4471, - 5866, 4135, 4797, 3701, 1368, 8916, 2162, 8724, 3185, - 2438, 7109, 8896, 7886, 7935, 5984, 6692, 9779, 356, - 8330, 7674, 7049, 1894, 9291, 438, 644, 5593, 4012, - 4092, 1074, 7656, 8497, 9936, 1003, 1428, 4946, 3098, - 3834, 5854, 9677, 3180, 1592, 4702, 1405, 2278, 844, - 3771, 581, 700, 6434, 4124, 9326, 859, 5258, 8507, - 5298, 8386, 9430, 4702, 2123, 1428, 3971, 9516, 3171, - 631, 9941, 183, 4223, 8243, 8550, 4681, 1680, 6085, - 9231, 7887, 2007, 9929, 319, 2262, 7173, 8741, 4368, - 7062, 7568, 7422, 6915, 6947, 8441, 2961, 6342, 2044, - 1447, 9597, 985, 8998, 9652, 4424, 5209, 960, 6967, - 167, 332, 8301, 5318, 8462, 9782, 1436, 5255, 8291, - 1679, 100, 7990, 7641, 5493, 6378, 4691, 7683, 2088, - 3372, 8060, 1057, 3230, 8284, 1196, 9763, 1017, 415, - 4982, 6524, 8257, 2213, 7932, 3442, 8568, 3619, 2201, - 8218, 4246, 4441, 4982, 2118, 427, 6086, 1745, 4209, - 7427, 2079, 9082, 6939, 9748, 4182, 4214, 8354, 8651, - 3029, 8143, 5135, 9139, 422, 6584, 4206, 3350, 3991, - 580, 2652, 5057, 3635, 5440, 7829, 2912, 7163, 3688, - 695, 1097, 7502, 9778, 6390, 2778, 5858, 8301, 3489, - 4549, 1884, 825, 5427, 443, 8921, 1023, 9620, 3942, - 4314, 675, 963, 9852, 4203, 3468, 8239, 870, 8839, - 968, 4232, 4985, 3353, 73, 2459, 5154, 1956, 1327, - 6990, 1883, 4040, 9749, 9910, 4672, 5452, 4110, 9189, - 5479, 140, 5133, 1656, 2426, 8412, 8268, 6211, 4670, - 4046, 220, 7783, 3136, 5529, 1519, 5090, 4609, 7290, - 6130, 3587, 3942, 1229, 552, 8186, 3250, 8456, 5207, - 6427, 6375, 7492, 5408, 4660, 134, 3661, 13, 8254, - 5276, 4385, 9507, 9434, 8019, 1686, 3007, 5215, 4722, - 2267, 549, 6567, 9286, 5206, 4726, 1559, 1580, 1843, - 9051, 1093, 6549, 227, 7062, 3358, 7788, 368, 1631, - 6978, 486, 1733, 2943, 2244, 1432, 7203, 6090, 6747, - 7011, 1946, 5223, 4474, 9388, 5043, 3504, 986, 4293, - 9704, 6750, 9397, 4917, 3565, 4328, 3764, 3459, 2307, - 2729, 635, 5686, 8216, 7948, 4213, 7647, 9654, 8289, - 3877, 9908, 2629, 3758, 2379, 7870, 1661, 2524, 2025, - 1815, 1665, 3312, 6095, 9851, 161, 2621, 4565, 7285, - 2137, 5689, 9717, 1329, 6660, 9626, 5019, 6708, 5239, - 2539, 7410, 9243, 432, 2490, 4568, 6231, 3113, 8655, - 7941, 5808, 4116, 1627, 1188, 3208, 6858, 4204, 4107, - 8587, 6451, 7251, 5391, 5116, 7261, 6410, 672, 1611, - 2152, 6577, 1898, 7871, 7055, 9067, 2195, 7972, 8151, - 3524, 2108, 1529, 903, 5082, 5551, 7367, 8808, 5241, - 1594, 4707, 2380, 9631, 7076, 8394, 60, 5765, 6660, - 7579, 5424, 5880, 2129, 1669, 1900, 4256, 3378, 8104, - 786, 7144, 1856, 8099, 6815, 7882, 423, 608, 9782, - 4637, 469, 8681, 5348, 4408, 1360, 3224, 6978, 6622, - 2157, 480, 8176, 5588, 3027, 9032, 9775, 4249, 3903, - 1436, 224, 3043, 777, 9049, 6566, 883, 450, 1264, - 5827, 1325, 7033, 7598, 679, 18, 1120, 5920, 5758, - 2185, 5454, 2940, 7503, 9812, 4562, 3927, 3436, 2601, - 8869, 7843, 2841, 3522, 6350, 1990, 4736, 9057, 1919, - 3994, 2486, 4123, 9431, 8733, 1141, 4361, 3867, 8089, - 5781, 3266, 3873, 5516, 9798, 9002, 2944, 3297, 5197, - 1504, 1823, 4464, 7507, 2669, 3882, 4906, 2310, 4718, - 234, 7610, 4257, 4365, 5806, 4335, 3504, 7354, 8689, - 7274, 7744, 172, 8368, 5331, 8399, 5274, 8133, 5361, - 9298, 1154, 1186, 3811, 4358, 2956, 1986, 9397, 8700, - 7053, 4314, 9823, 2442, 8137, 2830, 8408, 6332, 298, - 3646, 3656, 82, 1106, 1369, 2489, 4912, 3347, 7014, - 6752, 7779, 5247, 3746, 5700, 1718, 5366, 8881, 5179, - 8357, 1175, 6351, 9104, 5664, 5393, 8089, 1751, 1181, - 442, 1145, 4570, 9871, 3241, 5972, 4479, 5878, 696, - 7460, 910, 8388, 5287, 6423, 2487, 7461, 8332, 6223, - 9907, 9105, 8301, 1183, 9143, 8830, 6716, 8759, 8681, - 8760, 2734, 4492, 4751, 9627, 6030, 6438, 4871, 2463, - 4874, 1337, 8813, 300, 266, 401, 9585, 8122, 8805, - 1942, 3005, 3251, 3369, 2623, 8238, 6235, 2278, 6386, - 8368, 7653, 5795, 8078, 6633, 864, 376, 7421, 3698, - 1655, 8162, 1822, 4509, 7622, 5335, 2343, 7786, 3326, - 6814, 8651, 1850, 6620, 4412, 8162, 5623, 110, 9074, - 5711, 6790, 4627, 1735, 2586, 315, 7785, 7851, 2760, - 1940, 1744, 4300, 8283, 2876, 1795, 3647, 5205, 5974, - 4770, 3963, 9075, 9085, 565, 5510, 5205, 1690, 2947, - 1996, 9032, 9696, 3776, 3990, 1005, 1961, 5455, 7300, - 4600, 5848, 4530, 4076, 1870, 1037, 8211, 83, 1549, - 6014, 5185, 1742, 7350, 5452, 7365, 9636, 5003, 9594, - 8668, 4421, 624, 8506, 8084, 1884, 4572, 304, 7069, - 7367, 7536, 9106, 7375, 2723, 6809, 334, 4105, 6967, - 1311, 7930, 2278, 7959, 1574, 326, 4014, 7802, 1752, - 657, 502, 6011, 3203, 6400, 3973, 9079, 1712, 8626, - 5170, 7690, 8122, 9371, 2199, 9383, 7195, 969, 7515, - 5328, 5262, 2752, 9435, 4633, 4921, 9066, 7196, 6365, - 139, 3707, 9176, 4417, 2454, 5392, 7486, 8004, 7632, - 1440, 9450, 7206, 3521, 6136, 4155, 916, 3884, 644, - 8199, 257, 4918, 721, 1533, 4326, 6500, 3021, 1625, - 6549, 1839, 5719, 458, 9056, 305, 1616, 7198, 4853, - 9757]), - values=tensor([2.2760e-01, 3.5221e-02, 5.9072e-01, 2.5078e-01, - 4.1717e-01, 3.4549e-01, 4.2576e-01, 7.0564e-01, - 5.7849e-01, 9.1025e-01, 7.0210e-01, 4.3945e-02, - 6.9883e-01, 6.9216e-01, 4.5909e-01, 8.9233e-01, - 1.7268e-02, 6.5426e-01, 9.7972e-01, 2.1253e-01, - 2.7185e-01, 2.9433e-01, 1.1540e-01, 7.2390e-01, - 2.2506e-01, 9.7264e-01, 8.2542e-01, 4.6082e-01, - 8.7183e-01, 8.5724e-01, 7.1250e-01, 5.0168e-02, - 9.4167e-01, 2.6410e-01, 8.8473e-01, 7.0941e-01, - 5.2592e-01, 2.2444e-03, 6.6744e-01, 7.7300e-01, - 7.7005e-01, 8.7332e-01, 4.4405e-01, 7.1587e-01, - 1.8697e-01, 1.1902e-01, 8.8079e-01, 3.7209e-01, - 1.5485e-01, 7.5170e-02, 9.4715e-02, 6.1784e-01, - 1.0109e-01, 6.2407e-01, 7.9488e-01, 2.2843e-01, - 6.0383e-01, 7.8142e-01, 9.7064e-01, 7.2740e-01, - 5.5003e-01, 8.2418e-01, 4.8375e-01, 3.5116e-01, - 3.3782e-01, 7.0287e-01, 5.8611e-01, 8.6277e-01, - 8.4254e-02, 6.2127e-01, 5.8344e-01, 3.2293e-01, - 7.7329e-01, 3.6031e-01, 9.6753e-01, 3.7167e-01, - 1.0712e-01, 4.6351e-01, 9.6488e-02, 2.0868e-01, - 5.6222e-01, 2.7802e-01, 6.2376e-01, 6.1054e-01, - 4.5383e-01, 3.9105e-02, 2.9168e-01, 6.4735e-02, - 8.5285e-02, 6.4931e-01, 5.5497e-01, 2.7920e-01, - 2.7865e-01, 3.1448e-02, 6.6142e-01, 1.8658e-01, - 3.0011e-01, 2.3787e-01, 6.4867e-02, 1.2835e-01, - 5.7700e-01, 2.9192e-01, 6.4012e-01, 3.5225e-01, - 4.5143e-01, 5.6227e-02, 6.0549e-01, 4.2581e-01, - 6.7602e-01, 7.4395e-01, 4.8068e-01, 5.2565e-01, - 8.9017e-01, 9.6632e-01, 9.5015e-02, 2.8433e-01, - 9.0995e-01, 9.8708e-01, 4.8744e-01, 1.1256e-01, - 3.0450e-02, 2.0815e-01, 1.9671e-01, 8.9670e-01, - 3.5518e-01, 9.4204e-01, 5.5091e-01, 3.7966e-01, - 6.3899e-01, 5.5009e-01, 2.0501e-01, 4.1548e-01, - 3.6357e-02, 2.7655e-01, 3.6680e-01, 5.4367e-01, - 5.7968e-01, 2.1066e-01, 2.3530e-01, 3.7776e-01, - 2.8517e-01, 9.0314e-01, 3.4144e-01, 5.8359e-01, - 1.7634e-01, 3.5609e-01, 1.0387e-01, 9.8228e-01, - 2.7448e-01, 9.3196e-01, 1.1081e-01, 7.4499e-01, - 3.1778e-01, 2.0672e-01, 1.7794e-02, 6.1138e-01, - 3.1989e-02, 3.6895e-01, 8.2641e-01, 5.9869e-01, - 3.5595e-01, 9.6373e-01, 3.7480e-01, 5.2143e-01, - 4.3349e-02, 1.3590e-01, 2.0842e-01, 2.1166e-01, - 3.8860e-01, 9.1333e-01, 8.6377e-01, 6.6471e-01, - 2.7729e-01, 2.3286e-01, 9.3413e-01, 9.8011e-01, - 9.0617e-01, 8.3591e-01, 5.6210e-01, 5.3489e-02, - 7.8598e-01, 3.9560e-02, 4.2489e-01, 7.4518e-01, - 5.7467e-01, 9.3889e-01, 8.6789e-01, 7.5959e-01, - 4.9204e-01, 4.2775e-01, 7.1760e-02, 2.8086e-01, - 6.7026e-02, 7.1900e-01, 2.9688e-01, 5.3084e-01, - 9.7309e-01, 9.7190e-01, 5.2828e-01, 6.9719e-01, - 5.8565e-01, 5.6568e-01, 5.9262e-01, 4.6447e-01, - 4.2431e-01, 6.8029e-01, 6.2315e-01, 6.3149e-01, - 3.5814e-01, 7.3594e-01, 7.3362e-01, 6.9685e-01, - 9.1457e-01, 3.4904e-01, 8.8544e-01, 8.7293e-01, - 3.9296e-01, 4.9813e-01, 3.9881e-01, 2.3942e-01, - 4.5849e-01, 9.8730e-01, 7.5710e-01, 9.1946e-01, - 2.4114e-01, 8.6826e-01, 9.5078e-01, 3.1663e-01, - 6.3165e-01, 8.8688e-01, 1.2552e-01, 6.0931e-01, - 4.3551e-02, 2.6901e-01, 5.2283e-01, 1.0118e-01, - 2.8103e-01, 9.6430e-01, 5.1347e-01, 7.0134e-01, - 8.7571e-01, 6.9586e-01, 4.3836e-01, 8.1650e-01, - 6.4549e-01, 9.2081e-01, 8.2708e-01, 1.4900e-02, - 5.1331e-01, 5.9988e-01, 1.1665e-01, 8.7802e-01, - 2.1991e-02, 6.0721e-01, 9.2510e-01, 8.6212e-01, - 2.3484e-01, 6.7764e-01, 1.9046e-01, 3.4118e-01, - 1.9233e-01, 3.4028e-01, 5.2586e-01, 5.1113e-02, - 9.3913e-01, 3.9639e-01, 4.1876e-01, 6.1110e-01, - 6.8628e-01, 2.4277e-01, 1.8463e-01, 4.8661e-01, - 5.4496e-01, 8.3349e-02, 1.0702e-01, 3.9832e-01, - 6.1730e-01, 6.8752e-01, 3.9926e-01, 6.8654e-01, - 4.6554e-02, 3.9080e-01, 5.3455e-01, 3.7730e-02, - 5.8768e-01, 9.8626e-01, 7.7561e-01, 8.3431e-01, - 4.8087e-01, 9.9003e-01, 4.9649e-01, 6.8913e-01, - 5.3056e-01, 7.1228e-01, 9.5001e-01, 9.0827e-01, - 5.3483e-01, 6.4157e-01, 7.8345e-02, 5.8628e-01, - 8.6508e-01, 2.5309e-01, 6.6991e-01, 5.7129e-01, - 1.2317e-01, 3.7441e-01, 9.9664e-01, 8.2565e-01, - 1.2528e-01, 2.7012e-01, 1.5033e-01, 9.9663e-01, - 4.0935e-01, 8.9345e-01, 3.6637e-01, 5.5793e-02, - 7.5246e-01, 4.2466e-01, 7.7363e-02, 4.8869e-01, - 3.8751e-01, 6.9318e-01, 8.5083e-02, 1.4567e-01, - 2.9250e-01, 5.2467e-01, 9.3045e-01, 5.3264e-01, - 1.2770e-01, 3.4043e-01, 2.2540e-01, 7.5242e-01, - 5.3542e-01, 4.6133e-01, 9.7629e-02, 3.1000e-01, - 5.3638e-01, 8.9934e-01, 6.8304e-01, 2.0806e-01, - 8.5919e-01, 3.4203e-01, 5.8597e-01, 6.2314e-01, - 3.5090e-01, 3.1243e-01, 9.2962e-01, 5.6896e-01, - 1.2483e-01, 7.1930e-01, 6.4281e-02, 2.1398e-01, - 1.6170e-01, 9.6231e-01, 9.2507e-01, 4.7013e-06, - 7.0523e-01, 8.2283e-01, 4.5006e-01, 9.7704e-01, - 1.8690e-01, 5.7980e-01, 3.6762e-01, 8.2601e-01, - 6.5415e-01, 1.4619e-01, 4.2706e-01, 3.1835e-01, - 2.1401e-01, 7.7684e-01, 4.4234e-01, 8.3810e-01, - 7.2964e-01, 9.5477e-02, 1.6790e-01, 8.4675e-01, - 5.1339e-01, 5.7626e-01, 1.3007e-01, 2.9192e-01, - 2.5965e-01, 6.0686e-01, 2.9454e-01, 1.3320e-01, - 7.0453e-01, 5.2023e-01, 7.1022e-02, 7.0869e-01, - 6.2223e-01, 2.2574e-02, 1.4690e-01, 8.6693e-01, - 7.3473e-02, 1.5893e-01, 9.5142e-01, 9.9127e-01, - 5.7690e-01, 2.2612e-01, 5.0941e-01, 4.6335e-01, - 4.3759e-02, 8.2616e-01, 9.9737e-01, 5.8361e-01, - 2.9258e-01, 9.5502e-01, 2.7463e-01, 9.1821e-01, - 4.5758e-02, 4.4305e-01, 4.3265e-01, 5.9651e-01, - 5.5001e-01, 5.4088e-01, 9.7137e-01, 1.3754e-01, - 9.7428e-01, 6.3537e-01, 8.5881e-01, 5.3823e-01, - 1.1991e-01, 1.9536e-01, 7.9941e-01, 9.1208e-02, - 2.4127e-01, 8.1470e-01, 4.8731e-01, 2.1894e-01, - 2.6253e-01, 1.1551e-01, 4.9124e-02, 9.8311e-01, - 3.0121e-01, 3.1190e-01, 4.5535e-01, 6.2488e-01, - 6.1180e-01, 5.8929e-01, 8.9395e-01, 1.4285e-01, - 4.1279e-01, 5.7728e-01, 1.5323e-01, 1.7431e-01, - 9.2063e-01, 9.8269e-01, 4.5162e-01, 3.2896e-01, - 3.2116e-01, 6.7039e-01, 3.5667e-01, 7.9971e-01, - 4.9289e-01, 8.0621e-01, 3.6761e-01, 8.0539e-01, - 5.6533e-01, 6.2716e-01, 4.3960e-01, 2.3829e-01, - 1.0842e-01, 4.0311e-01, 6.9153e-01, 1.0539e-01, - 9.6062e-01, 2.7797e-01, 4.8334e-01, 3.4844e-01, - 5.7558e-01, 1.7801e-01, 4.7933e-01, 5.9983e-01, - 8.0374e-01, 4.8049e-01, 4.0849e-01, 1.0845e-01, - 8.0544e-01, 7.6767e-01, 5.7822e-01, 9.0690e-01, - 8.5136e-01, 7.1502e-01, 6.3645e-01, 8.0806e-01, - 6.1472e-01, 9.2440e-01, 6.1268e-01, 6.9137e-01, - 4.3311e-01, 7.2099e-01, 1.4897e-01, 9.2718e-01, - 9.8949e-01, 2.5598e-01, 4.5546e-01, 2.8312e-01, - 9.8466e-01, 7.4199e-01, 6.7438e-01, 5.9843e-01, - 8.3839e-01, 9.5652e-01, 9.9919e-01, 7.9694e-01, - 6.0763e-01, 2.8055e-01, 9.3936e-01, 7.9674e-02, - 2.7143e-01, 8.7185e-01, 4.2139e-01, 8.7106e-01, - 9.5567e-01, 7.3814e-01, 8.8050e-01, 2.4368e-01, - 3.5106e-02, 9.5191e-02, 7.8457e-01, 8.2905e-01, - 5.5522e-01, 1.4311e-01, 6.3803e-01, 6.3374e-01, - 3.4075e-01, 9.6717e-01, 5.2403e-01, 9.9450e-01, - 3.6817e-01, 6.2288e-01, 6.5032e-01, 7.4284e-01, - 3.2219e-01, 9.1163e-01, 1.8294e-01, 7.4995e-01, - 9.3000e-01, 1.4936e-01, 9.2124e-01, 1.9468e-01, - 2.6204e-01, 3.9770e-01, 3.5536e-01, 9.6075e-01, - 6.5686e-01, 5.3686e-01, 3.6731e-01, 9.0661e-02, - 5.8419e-01, 1.9639e-02, 6.2822e-01, 6.9317e-01, - 3.0485e-01, 6.4889e-01, 9.5125e-01, 5.7206e-01, - 6.2057e-01, 2.1344e-01, 3.9874e-01, 6.3107e-01, - 1.3827e-01, 3.4122e-01, 2.2950e-02, 9.8721e-01, - 4.8321e-01, 8.6787e-01, 6.2517e-01, 5.1338e-01, - 8.8793e-01, 6.6596e-01, 6.0825e-01, 9.4586e-01, - 6.2170e-01, 7.0521e-01, 1.7779e-01, 8.1480e-01, - 7.3632e-01, 7.0682e-01, 5.9327e-02, 3.1923e-01, - 7.6294e-01, 8.6778e-01, 7.4956e-01, 3.3441e-01, - 5.7045e-02, 5.6280e-01, 4.9676e-01, 4.1326e-01, - 5.9257e-01, 9.5921e-01, 1.2747e-01, 8.1687e-01, - 5.1016e-01, 3.5676e-01, 5.2429e-01, 9.7885e-01, - 1.5599e-01, 9.5171e-01, 3.7502e-03, 3.6002e-01, - 6.0546e-01, 9.9878e-01, 9.1985e-01, 5.7490e-01, - 4.5907e-01, 3.5069e-02, 9.8653e-01, 9.7455e-01, - 6.2178e-01, 7.2946e-02, 2.4594e-01, 1.5277e-01, - 7.8208e-01, 3.1965e-01, 9.0671e-01, 7.4361e-01, - 1.5947e-01, 7.7474e-01, 2.8499e-01, 5.6960e-01, - 5.5471e-01, 1.7479e-01, 5.0218e-01, 9.8562e-01, - 6.8947e-01, 3.4734e-01, 2.5316e-01, 4.1212e-01, - 8.9755e-02, 1.5165e-01, 7.4017e-01, 1.2175e-01, - 1.8502e-01, 7.8411e-01, 5.7160e-01, 9.2566e-01, - 2.0186e-01, 2.8193e-01, 6.3665e-01, 4.8082e-01, - 5.8762e-01, 7.5002e-01, 7.5287e-01, 2.1272e-01, - 7.0649e-01, 4.6356e-01, 7.9673e-01, 6.7600e-02, - 8.0324e-01, 7.4938e-01, 1.5113e-01, 9.3034e-03, - 1.8397e-01, 4.1201e-01, 9.3876e-01, 5.1078e-01, - 9.6300e-01, 7.4361e-01, 5.2908e-01, 5.6847e-01, - 9.4865e-01, 8.6867e-01, 9.8734e-01, 8.2798e-02, - 2.7453e-01, 8.7811e-01, 8.1549e-02, 7.5651e-01, - 8.8527e-01, 2.5916e-01, 9.3804e-01, 8.8769e-02, - 6.5843e-01, 7.1280e-01, 7.4733e-01, 8.4278e-01, - 9.4306e-02, 3.4855e-01, 3.6923e-01, 5.2801e-01, - 3.5479e-01, 7.6427e-01, 8.6350e-01, 8.9582e-01, - 5.0220e-01, 5.0060e-01, 5.3680e-01, 1.1208e-01, - 5.7576e-01, 7.6430e-01, 5.7555e-01, 5.8240e-01, - 2.2192e-01, 1.9720e-01, 1.9796e-01, 1.7826e-01, - 5.1031e-01, 6.0345e-01, 6.0780e-01, 3.3075e-01, - 7.5456e-01, 1.5925e-01, 5.9667e-01, 6.0470e-01, - 3.3668e-01, 9.2265e-02, 3.4059e-01, 3.0964e-01, - 7.2838e-01, 1.1446e-01, 5.1217e-01, 1.8042e-01, - 5.6532e-01, 3.9903e-01, 9.1478e-01, 5.5361e-01, - 6.9311e-01, 3.5533e-01, 6.0366e-01, 1.6747e-01, - 8.2851e-01, 3.1336e-01, 2.6558e-01, 6.2869e-02, - 8.0341e-01, 2.6676e-01, 1.2992e-01, 6.6329e-01, - 4.5308e-01, 1.2271e-02, 9.0200e-01, 8.1114e-01, - 1.9246e-01, 7.8270e-01, 9.7264e-01, 7.6296e-01, - 3.9219e-01, 1.8942e-01, 7.9924e-02, 3.6218e-01, - 8.3925e-01, 9.4399e-01, 3.8968e-02, 1.4799e-01, - 4.6923e-01, 5.6256e-01, 3.1602e-01, 1.4438e-01, - 6.8883e-01, 4.7218e-01, 9.9266e-01, 4.9855e-01, - 8.3918e-01, 7.7844e-01, 3.5122e-01, 9.9153e-02, - 8.1764e-01, 5.4243e-02, 3.4209e-01, 1.8529e-02, - 9.1887e-01, 4.4449e-01, 7.7983e-01, 1.7256e-01, - 9.9816e-01, 6.0125e-01, 2.6970e-02, 5.0364e-01, - 6.7695e-01, 9.0214e-01, 4.7416e-01, 5.0448e-01, - 2.7249e-01, 6.9798e-01, 7.8632e-01, 7.4111e-01, - 7.8624e-01, 4.5545e-01, 6.8036e-01, 6.2030e-01, - 1.4065e-02, 2.2507e-01, 3.0552e-01, 3.2135e-01, - 7.0066e-01, 2.0238e-01, 6.8231e-01, 2.5313e-01, - 3.5273e-01, 4.7153e-02, 6.4807e-01, 1.1778e-01, - 9.4661e-02, 6.0636e-01, 2.5697e-01, 1.3635e-01, - 8.6819e-01, 6.6326e-01, 2.8635e-01, 4.0010e-01, - 3.3971e-01, 9.5972e-01, 6.2596e-01, 9.9963e-01, - 9.7307e-01, 1.6734e-01, 5.3424e-02, 5.5741e-01, - 4.0894e-01, 8.8019e-01, 8.3831e-01, 4.6774e-02, - 7.5041e-01, 2.7236e-01, 2.2185e-01, 2.6223e-01, - 8.4859e-01, 9.7958e-01, 9.1309e-01, 6.6954e-01, - 7.2329e-01, 1.1151e-01, 8.3584e-02, 5.7305e-01, - 1.0986e-01, 5.4961e-01, 2.6916e-01, 8.8619e-01, - 5.4081e-01, 9.2680e-01, 1.5231e-01, 5.0414e-01, - 5.9220e-01, 1.4815e-01, 7.2665e-01, 9.2504e-01, - 5.2573e-01, 3.2436e-02, 9.4962e-02, 6.4277e-01, - 9.9252e-01, 6.9223e-01, 3.8798e-01, 2.0939e-01, - 8.2775e-01, 7.6169e-01, 8.5089e-01, 2.7587e-01, - 2.0360e-01, 1.6886e-01, 8.7825e-01, 1.6879e-01, - 3.1510e-03, 4.6880e-01, 8.0777e-01, 4.4160e-01, - 8.4465e-01, 5.9578e-02, 7.0169e-01, 4.3114e-01, - 7.1841e-01, 5.4612e-01, 2.6982e-01, 4.4678e-01, - 3.1014e-01, 6.8810e-01, 9.7878e-01, 3.4804e-01, - 3.3573e-01, 4.7867e-01, 5.0425e-01, 2.4231e-01, - 5.2775e-01, 3.3611e-01, 2.6111e-01, 7.3952e-01, - 8.4297e-01, 9.0409e-01, 3.6516e-01, 2.2077e-01, - 6.6859e-01, 4.0213e-01, 5.2519e-01, 8.2468e-01, - 7.6980e-01, 9.4835e-02, 1.7087e-01, 2.6707e-01, - 3.4409e-01, 9.4803e-01, 4.8875e-01, 8.2901e-01, - 5.0371e-01, 9.1676e-01, 8.4760e-01, 3.7325e-01, - 9.1492e-01, 2.7756e-01, 7.8704e-01, 9.6423e-01, - 6.6825e-01, 5.5767e-01, 1.6778e-01, 2.1423e-01, - 7.6494e-01, 4.1423e-01, 5.8547e-01, 4.9735e-01, - 5.9344e-01, 7.4411e-01, 1.6778e-01, 4.3753e-01, - 1.2092e-01, 5.8899e-01, 4.4759e-01, 9.2795e-01, - 7.8552e-01, 3.6512e-01, 2.7770e-01, 9.7002e-01, - 8.5269e-01, 1.7249e-01, 4.4557e-02, 7.9337e-01, - 3.9258e-01, 1.6638e-01, 8.7393e-02, 4.2894e-01, - 9.7344e-01, 8.6539e-01, 3.9129e-01, 9.8888e-01, - 8.5705e-01, 5.2376e-01, 9.1048e-01, 7.8188e-01, - 8.3763e-02, 4.9184e-01, 6.4964e-01, 1.8151e-01, - 7.0035e-01, 5.1668e-01, 3.6723e-01, 9.4863e-01, - 9.2009e-01, 2.9091e-01, 6.0006e-01, 1.4881e-01, - 8.7263e-01, 5.5714e-01, 8.9518e-01, 1.9135e-01, - 2.8367e-01, 6.0469e-01, 6.3090e-02, 4.5934e-01, - 8.5825e-01, 6.9705e-01, 2.5107e-01, 4.2451e-01, - 2.0143e-01, 8.4879e-01, 9.2691e-01, 4.7860e-01, - 5.7185e-01, 6.4853e-01, 1.2931e-01, 1.5035e-01, - 3.4904e-02, 6.9460e-01, 5.3142e-01, 4.6962e-01, - 9.9578e-01, 8.9967e-01, 9.7289e-01, 8.0918e-01, - 2.6973e-01, 4.3641e-01, 4.0857e-01, 1.2828e-01, - 3.9004e-01, 6.2152e-01, 1.4185e-02, 9.0961e-01, - 4.8715e-01, 8.0070e-02, 1.6869e-01, 8.1943e-01, - 2.0505e-02, 9.3012e-01, 3.0873e-02, 3.6259e-01, - 3.9670e-01, 6.9117e-01, 6.7714e-01, 5.8222e-01, - 5.7660e-01, 9.3747e-01, 5.4393e-01, 3.3248e-01]), + col_indices=tensor([9138, 88, 9925, 6110, 2605, 340, 5956, 3778, 8300, + 2766, 2415, 356, 3329, 6854, 5455, 793, 5522, 4476, + 5241, 1294, 7569, 8127, 9283, 6072, 8826, 6207, 2260, + 3124, 306, 7010, 7771, 4682, 3624, 7319, 82, 8448, + 4521, 4810, 3870, 1662, 7314, 1288, 1304, 5554, 9567, + 9579, 9692, 6033, 1049, 3607, 5801, 3226, 2607, 6224, + 5048, 5599, 9933, 3657, 199, 3603, 6580, 5494, 6052, + 8972, 727, 7003, 7712, 1465, 3313, 5370, 8225, 506, + 2132, 9159, 9298, 7883, 9321, 6558, 6192, 8563, 3021, + 5097, 6402, 3516, 4725, 3323, 5925, 7736, 934, 2152, + 415, 9779, 2459, 2032, 1017, 8577, 4353, 2307, 3707, + 7696, 2489, 6071, 6443, 6704, 9904, 2685, 4669, 5544, + 3959, 2983, 9040, 2033, 7320, 2161, 4481, 6668, 6026, + 7153, 9823, 4697, 1527, 5589, 6975, 8837, 3195, 644, + 7715, 2983, 8912, 5117, 1412, 537, 9828, 4072, 6000, + 2149, 554, 4346, 8996, 6426, 1022, 2966, 9384, 8537, + 9416, 5887, 6269, 5258, 4819, 9322, 4229, 6389, 6260, + 1541, 4070, 4003, 6524, 298, 6252, 7806, 4466, 5030, + 3423, 1169, 4895, 2782, 3384, 5163, 1707, 2179, 8192, + 4856, 4416, 8541, 1726, 6556, 5525, 641, 7685, 381, + 2461, 9802, 6078, 9726, 9306, 2118, 8753, 8575, 72, + 4711, 4403, 970, 5051, 2468, 8252, 3531, 859, 5120, + 2455, 4223, 8979, 470, 4221, 2490, 6986, 4019, 7557, + 9215, 798, 8794, 2097, 6389, 9800, 6503, 7889, 2311, + 3799, 8462, 8357, 7718, 7346, 188, 621, 2023, 2539, + 5495, 5438, 2018, 3425, 902, 3405, 3334, 1814, 7524, + 3720, 174, 5885, 5109, 7606, 980, 7935, 5490, 6670, + 4801, 67, 4661, 4738, 8527, 7468, 7340, 1660, 3760, + 1635, 4034, 8280, 4301, 5427, 4797, 8120, 1904, 3428, + 1341, 5108, 1943, 1285, 6206, 6232, 43, 320, 4474, + 1535, 6118, 6953, 9965, 80, 271, 3088, 5551, 6872, + 6949, 7239, 301, 1456, 8199, 8711, 3036, 5996, 958, + 2825, 1772, 1957, 5020, 487, 6842, 4166, 2955, 6492, + 17, 9760, 9135, 1765, 8481, 7530, 9891, 8597, 8556, + 5786, 1228, 1351, 1248, 8804, 1017, 7515, 2278, 5157, + 7588, 2633, 1729, 9878, 9927, 5939, 2378, 2550, 7075, + 4737, 7187, 5641, 3536, 3422, 4800, 9633, 813, 4145, + 9491, 5345, 6430, 9042, 887, 7605, 8283, 655, 5757, + 2626, 8734, 7264, 9682, 1062, 2309, 539, 2604, 5173, + 3568, 2970, 3578, 9495, 7355, 2235, 6445, 8654, 6372, + 608, 5409, 3440, 9396, 4059, 3336, 8176, 4379, 8456, + 1419, 7167, 6685, 2068, 4062, 4242, 7347, 4971, 6977, + 7281, 1182, 2411, 2287, 8877, 3821, 7978, 189, 6921, + 1884, 7608, 9661, 3998, 1182, 9277, 1249, 6224, 968, + 6355, 5060, 7903, 1166, 7823, 1212, 2431, 8400, 1271, + 9520, 6242, 9971, 4349, 938, 8969, 4270, 822, 5741, + 9394, 8293, 6301, 6981, 9908, 8255, 102, 7831, 5694, + 9414, 2284, 2590, 7248, 1466, 9859, 2377, 2143, 4535, + 4396, 5994, 2705, 2889, 5015, 3580, 8953, 2889, 4447, + 8786, 4942, 8218, 3896, 303, 2525, 247, 5833, 8258, + 2868, 2360, 9399, 9670, 3993, 9504, 1915, 919, 9928, + 1803, 4809, 1358, 7489, 9688, 7456, 4687, 7541, 3474, + 9779, 3007, 1683, 8615, 915, 2781, 8504, 9018, 3643, + 2290, 4220, 5519, 536, 6768, 24, 1161, 7604, 5855, + 39, 1761, 9899, 6103, 58, 6751, 2360, 8656, 3618, + 3212, 9556, 6123, 4185, 4460, 1357, 6925, 2551, 6573, + 4677, 154, 3766, 9863, 6079, 4532, 3616, 3110, 8315, + 4801, 2108, 8553, 3164, 9378, 6464, 595, 84, 474, + 2425, 9930, 399, 7869, 9698, 604, 5368, 9064, 4589, + 7633, 6312, 2970, 6489, 354, 773, 4003, 9913, 275, + 2849, 63, 7808, 7372, 9770, 9513, 4525, 4580, 3235, + 8544, 3833, 1302, 2568, 1970, 3174, 5901, 9943, 9832, + 235, 6570, 2725, 9042, 7813, 8730, 8969, 5958, 8913, + 5966, 6304, 8362, 9055, 4424, 4142, 5279, 7030, 5904, + 844, 8697, 734, 4613, 2966, 3356, 8238, 8366, 436, + 65, 984, 2844, 1847, 8328, 7443, 1253, 3985, 4330, + 9195, 2307, 2527, 1543, 2864, 5197, 4679, 328, 1120, + 6619, 4598, 1932, 6006, 6685, 3164, 9876, 7906, 8431, + 5737, 7724, 811, 2068, 8401, 4190, 4778, 9707, 3849, + 9387, 3280, 6050, 9898, 7004, 705, 6207, 6146, 9110, + 4514, 1880, 5102, 2440, 9719, 163, 9086, 9370, 9656, + 1567, 6024, 5289, 2489, 6082, 7211, 3889, 6975, 5002, + 6557, 375, 5863, 3640, 1925, 9842, 3159, 4914, 2750, + 7209, 9838, 4360, 3173, 9524, 2237, 1815, 5953, 43, + 3649, 9095, 774, 2795, 7, 6114, 6654, 440, 2827, + 761, 1411, 6825, 3090, 8856, 8355, 4804, 7164, 4684, + 4241, 810, 7302, 2782, 8059, 9235, 2523, 7832, 6608, + 9632, 2760, 2828, 9495, 9834, 6545, 257, 6386, 2900, + 9289, 3279, 2210, 7391, 2679, 4619, 8714, 550, 7613, + 4604, 4404, 6853, 3981, 6965, 7077, 1130, 5879, 2687, + 5429, 2819, 9333, 6926, 8259, 9706, 6593, 5588, 528, + 7685, 652, 9498, 3661, 4957, 7462, 9469, 2874, 8044, + 4397, 3011, 727, 1559, 7390, 2900, 5580, 8066, 9798, + 6182, 1555, 3257, 60, 8190, 8254, 597, 1516, 5695, + 8535, 1465, 9400, 7666, 1975, 6884, 287, 629, 1437, + 8969, 3725, 9466, 2595, 4269, 7965, 2948, 5212, 5251, + 4253, 7232, 3122, 9553, 9702, 6587, 9629, 1611, 4119, + 5654, 2677, 740, 2227, 9630, 4116, 4617, 8149, 2722, + 7825, 4599, 9413, 8923, 8264, 3267, 1948, 1366, 4183, + 1125, 434, 4634, 1781, 9113, 7808, 1594, 9332, 8888, + 8244, 8529, 8839, 5656, 2946, 9157, 5742, 2795, 5890, + 7056, 8353, 1065, 6737, 4715, 5752, 5285, 9265, 4485, + 357, 6187, 6936, 4388, 1372, 3404, 3737, 609, 1220, + 4090, 4155, 6031, 8841, 4568, 3455, 6307, 7126, 8160, + 4114, 1931, 6166, 9773, 4630, 1375, 8551, 7521, 5487, + 7930, 7327, 672, 6993, 5913, 2567, 3784, 5855, 526, + 9200, 9781, 1540, 7841, 4155, 4047, 9676, 1433, 3617, + 6628, 3109, 5877, 9679, 520, 5329, 6105, 9669, 1669, + 8437, 1775, 5363, 470, 404, 4948, 3208, 729, 6222, + 4480, 7058, 8542, 230, 5224, 7845, 4217, 6763, 4420, + 9187, 9965, 8691, 9222, 1176, 7124, 8517, 5433, 1730, + 2115, 9671, 8642, 6271, 5061, 1542, 4097, 866, 2675, + 6711, 2453, 1324, 7866, 7526, 8903, 2474, 9461, 4304, + 3845, 789, 4233, 8884, 2141, 6175, 6927, 3155, 5810, + 1369, 2205, 423, 7115, 506, 1652, 9547, 4630, 1558, + 1723, 4961, 397, 6480, 1563, 6211, 9295, 541, 6686, + 695, 4697, 6824, 7280, 5991, 8302, 4418, 6999, 6926, + 5960, 8158, 4528, 2341, 8197, 9543, 4560, 4949, 8473, + 3682, 6495, 6706, 5826, 2087, 2357, 4047, 2394, 3902, + 8306, 1129, 516, 8348, 4846, 9528, 9137, 6552, 3081, + 3850, 8060, 8811, 1105, 2938, 5556, 6295, 7391, 7753, + 5601]), + values=tensor([3.0546e-01, 5.0292e-01, 2.3257e-01, 7.6163e-01, + 1.3214e-01, 6.9043e-01, 1.2924e-01, 2.2895e-01, + 7.5991e-01, 4.5296e-01, 7.8851e-02, 8.0757e-01, + 8.4762e-01, 5.7022e-01, 9.7492e-01, 3.7199e-01, + 4.7085e-01, 3.6444e-01, 1.4732e-01, 8.5390e-01, + 7.6210e-01, 3.7304e-01, 6.6372e-01, 8.3452e-02, + 9.7907e-02, 7.9441e-01, 5.0442e-01, 5.3476e-01, + 2.5460e-01, 2.0463e-01, 5.2276e-02, 2.3757e-01, + 9.9098e-01, 2.4339e-02, 2.6705e-01, 2.7554e-01, + 5.2617e-01, 7.6818e-02, 4.1524e-01, 2.2617e-01, + 6.9888e-01, 5.1982e-01, 3.4986e-02, 8.2621e-01, + 6.4897e-01, 3.2938e-01, 6.1865e-01, 4.6707e-01, + 4.8730e-01, 5.9909e-01, 5.1563e-01, 6.1362e-01, + 9.9062e-01, 9.2120e-02, 1.7623e-01, 7.1483e-01, + 8.5852e-01, 4.3511e-01, 8.2575e-02, 3.5430e-01, + 3.9193e-01, 6.7349e-01, 6.9654e-01, 2.4910e-01, + 9.7526e-01, 9.2904e-02, 1.5211e-01, 2.2833e-01, + 2.7533e-03, 4.0291e-01, 6.8445e-01, 7.3628e-03, + 4.2183e-01, 7.4697e-01, 6.5569e-01, 8.1926e-01, + 2.9564e-01, 4.0009e-01, 6.3533e-01, 9.1378e-01, + 4.9246e-01, 1.1889e-01, 6.5282e-01, 7.5628e-01, + 3.3088e-01, 1.0018e-01, 5.2631e-01, 2.5813e-01, + 4.9862e-01, 2.0196e-02, 6.0420e-01, 9.1465e-01, + 3.0075e-01, 7.8392e-01, 8.4779e-01, 8.9399e-01, + 7.2792e-01, 5.9261e-02, 8.3345e-01, 4.8578e-01, + 5.5253e-01, 7.6723e-01, 9.4488e-01, 1.7353e-01, + 5.9492e-01, 9.6929e-01, 1.7141e-01, 4.1890e-01, + 5.2798e-01, 1.3193e-01, 9.9533e-01, 6.6182e-01, + 2.5471e-01, 6.5305e-01, 6.2762e-02, 8.6707e-01, + 7.1419e-01, 1.2133e-01, 4.3177e-01, 9.3699e-01, + 1.8417e-03, 7.5425e-01, 4.1020e-02, 9.4920e-02, + 1.3473e-02, 8.7261e-01, 7.1847e-01, 5.0429e-01, + 5.8822e-01, 6.9089e-01, 8.3394e-01, 4.9615e-01, + 6.0785e-01, 9.7205e-01, 8.1361e-01, 6.2625e-01, + 6.4523e-01, 8.2243e-01, 9.4970e-01, 2.6450e-01, + 3.8783e-01, 3.3954e-01, 2.2550e-01, 4.6048e-01, + 4.6912e-01, 6.6507e-02, 9.7823e-01, 9.6171e-01, + 3.6260e-01, 5.2658e-01, 5.2584e-01, 1.1201e-01, + 2.6101e-01, 7.8232e-01, 2.1812e-01, 9.1694e-01, + 4.0002e-01, 4.4305e-01, 9.7012e-01, 4.4321e-01, + 9.4755e-01, 7.2948e-01, 2.5811e-02, 3.3866e-01, + 4.2151e-01, 9.6390e-01, 9.4710e-01, 6.6181e-01, + 5.4910e-01, 4.8388e-01, 6.7655e-01, 8.2547e-01, + 4.2407e-01, 7.9124e-01, 2.9745e-01, 9.9058e-01, + 5.4692e-01, 2.1420e-01, 4.3865e-01, 4.6584e-01, + 6.6910e-01, 5.7028e-01, 9.4804e-01, 8.6551e-01, + 3.7726e-01, 2.7189e-01, 1.1550e-01, 8.0677e-01, + 6.0652e-01, 8.8394e-01, 7.8356e-01, 8.6341e-01, + 3.4722e-01, 8.4132e-01, 3.7332e-02, 1.0818e-01, + 7.4932e-01, 7.6341e-01, 5.0551e-01, 7.5321e-03, + 5.2469e-01, 3.6502e-01, 7.8288e-01, 4.7653e-01, + 4.6680e-01, 2.0783e-01, 9.7493e-01, 7.0423e-01, + 5.9691e-01, 3.5689e-01, 5.3460e-01, 4.1212e-01, + 5.2642e-01, 3.8752e-01, 7.1660e-01, 5.4532e-01, + 8.8719e-01, 1.8120e-01, 9.6824e-03, 7.5474e-01, + 1.7381e-01, 3.8886e-01, 1.4673e-01, 6.2935e-01, + 1.7229e-01, 9.5232e-01, 1.8018e-01, 8.2887e-01, + 6.1846e-01, 2.1702e-01, 9.0999e-01, 3.9455e-01, + 7.9801e-01, 9.7367e-01, 7.8526e-01, 4.4372e-01, + 2.2059e-01, 3.7747e-01, 5.0166e-01, 7.8276e-01, + 6.6970e-01, 6.5626e-01, 8.3801e-01, 1.5675e-01, + 1.1548e-01, 5.2576e-01, 7.5340e-01, 6.2927e-01, + 9.3639e-01, 5.7182e-01, 5.8135e-01, 6.3740e-01, + 1.5886e-01, 6.4819e-02, 8.5413e-01, 7.7295e-01, + 8.5744e-01, 4.5221e-01, 6.9424e-01, 1.7890e-02, + 6.1964e-01, 9.0600e-01, 2.3291e-01, 3.7068e-01, + 2.5649e-01, 2.3410e-01, 8.4699e-01, 8.1302e-01, + 8.4736e-01, 1.6048e-01, 6.7759e-01, 7.6442e-01, + 8.6527e-01, 9.7970e-01, 3.7073e-01, 3.3940e-01, + 7.6938e-01, 4.9124e-01, 8.5065e-01, 5.1735e-01, + 5.2195e-01, 1.4736e-01, 1.9850e-01, 4.9312e-01, + 4.8179e-01, 7.6481e-01, 6.5438e-01, 8.2282e-01, + 6.0339e-01, 8.0579e-01, 6.6200e-01, 1.9481e-01, + 9.8772e-01, 1.9110e-01, 3.2207e-01, 8.7963e-01, + 5.8877e-01, 2.4161e-01, 6.6151e-01, 2.9623e-01, + 3.4971e-01, 2.6078e-01, 3.3537e-01, 9.2709e-01, + 2.4880e-02, 4.4089e-01, 7.4854e-01, 5.3790e-01, + 5.2436e-01, 3.7063e-01, 5.7623e-01, 4.5259e-01, + 2.8959e-01, 9.0455e-01, 5.3153e-01, 8.5544e-01, + 8.6470e-01, 7.4891e-01, 9.4642e-01, 2.3182e-01, + 6.7584e-01, 7.1975e-01, 5.9854e-01, 7.6235e-01, + 8.9544e-01, 9.4037e-01, 9.0569e-01, 3.0626e-01, + 4.8516e-01, 3.7153e-01, 8.5329e-02, 7.5608e-02, + 6.3110e-01, 9.1665e-01, 7.6085e-02, 3.9318e-01, + 1.0273e-01, 3.4263e-01, 1.6887e-01, 2.9854e-01, + 8.9570e-01, 9.1980e-01, 4.6029e-01, 7.5297e-01, + 3.1607e-01, 9.6482e-01, 4.1780e-01, 5.3075e-01, + 6.3925e-01, 3.7261e-02, 6.5730e-01, 9.0225e-01, + 5.4019e-01, 3.3430e-01, 5.1348e-01, 2.5604e-01, + 2.8889e-01, 5.3161e-01, 7.9824e-01, 3.5820e-01, + 5.9880e-01, 8.8347e-01, 7.1055e-01, 1.8794e-01, + 9.1582e-01, 7.0727e-01, 1.6723e-01, 8.6642e-01, + 1.5342e-02, 5.4726e-02, 5.0492e-01, 5.5728e-01, + 2.5658e-01, 1.3191e-01, 4.2065e-01, 8.3828e-02, + 1.3786e-01, 5.8316e-01, 4.8969e-01, 9.7892e-01, + 5.2277e-02, 8.8689e-01, 9.1829e-01, 3.6404e-01, + 1.9245e-02, 4.5749e-01, 7.7959e-01, 4.6119e-01, + 4.2486e-01, 1.0301e-01, 2.2097e-01, 4.8645e-02, + 5.0412e-01, 6.5140e-01, 2.1781e-01, 1.7752e-01, + 7.4109e-01, 5.5933e-01, 7.8991e-01, 4.3416e-01, + 4.1863e-01, 1.0879e-01, 6.5186e-01, 5.2892e-01, + 3.9743e-01, 8.9794e-01, 3.4442e-01, 8.2869e-01, + 4.4873e-01, 2.7925e-04, 5.8907e-01, 1.5124e-02, + 1.3479e-01, 9.4760e-01, 7.6317e-01, 3.7230e-01, + 4.2500e-01, 1.1376e-01, 2.7854e-01, 7.8506e-01, + 9.3394e-01, 5.4152e-01, 6.3631e-01, 3.5839e-02, + 5.9531e-01, 2.9784e-01, 1.0418e-02, 7.6810e-03, + 1.1241e-01, 7.6110e-02, 6.9197e-02, 4.5353e-01, + 6.7057e-01, 5.2603e-01, 4.1057e-02, 7.7773e-01, + 4.3768e-01, 8.3132e-01, 2.8259e-01, 7.7937e-01, + 1.5139e-01, 9.9813e-01, 1.2777e-01, 2.5225e-01, + 6.3330e-01, 8.5029e-01, 4.4191e-01, 4.2503e-01, + 1.4328e-01, 1.4602e-01, 9.7526e-01, 1.5013e-01, + 7.7272e-01, 7.8392e-01, 1.1762e-01, 4.9639e-01, + 6.7972e-01, 5.5378e-01, 8.4642e-01, 3.9361e-02, + 5.7818e-01, 8.0488e-02, 8.9848e-01, 3.3267e-01, + 1.1292e-02, 5.9089e-01, 9.5134e-02, 8.4936e-01, + 3.4085e-01, 8.0495e-01, 2.6454e-01, 1.2924e-01, + 3.7618e-01, 4.0725e-01, 4.4484e-01, 5.2448e-01, + 4.2630e-01, 6.9288e-01, 4.2431e-01, 3.1540e-01, + 9.0537e-01, 4.3955e-01, 1.7328e-01, 5.8077e-01, + 1.3874e-01, 9.5254e-01, 4.8416e-01, 6.6566e-01, + 7.8665e-01, 6.1754e-01, 4.3441e-01, 8.8073e-01, + 5.3310e-01, 6.9216e-03, 1.7584e-01, 1.7340e-01, + 8.7511e-01, 6.6250e-01, 6.8190e-01, 8.4722e-01, + 9.0703e-01, 8.5408e-01, 6.3090e-01, 6.8000e-01, + 6.6170e-01, 2.0455e-03, 3.2688e-01, 8.2333e-01, + 1.2752e-01, 9.8958e-01, 2.7237e-01, 3.6839e-01, + 6.5476e-01, 5.2482e-01, 6.1125e-01, 3.1249e-01, + 8.4482e-01, 3.1180e-01, 5.8043e-01, 9.7963e-01, + 7.2669e-01, 8.9581e-01, 8.0359e-01, 2.6152e-01, + 2.1352e-01, 3.6305e-01, 5.7651e-01, 4.8231e-01, + 3.0981e-01, 3.8468e-01, 7.8548e-01, 7.9081e-01, + 9.8035e-01, 3.4162e-02, 7.6869e-01, 2.7208e-01, + 4.5993e-01, 7.4269e-01, 2.8992e-01, 5.3495e-01, + 7.9295e-01, 5.1936e-01, 7.6730e-01, 6.3963e-01, + 1.0063e-01, 7.9415e-01, 4.3675e-01, 1.3617e-01, + 8.6223e-02, 8.2227e-01, 4.8016e-01, 4.1606e-02, + 1.0678e-01, 1.4438e-01, 4.4704e-01, 9.4627e-01, + 1.2886e-01, 9.1496e-01, 3.4026e-01, 8.1768e-01, + 2.7796e-01, 7.0696e-01, 3.2645e-01, 6.5903e-01, + 6.5081e-01, 9.7303e-01, 6.1049e-01, 4.9549e-01, + 6.5319e-01, 6.3608e-01, 7.2694e-01, 9.2547e-01, + 5.3528e-01, 1.3038e-01, 6.3509e-01, 1.1397e-01, + 8.1481e-01, 3.5817e-01, 3.6134e-01, 5.3784e-01, + 5.3827e-01, 3.6388e-02, 7.9766e-01, 7.4626e-01, + 8.6300e-01, 3.9584e-01, 3.1814e-01, 2.9897e-01, + 2.5172e-01, 4.6373e-01, 2.8063e-01, 7.9467e-01, + 7.1274e-01, 4.5342e-01, 1.1660e-01, 1.7201e-01, + 2.8508e-01, 3.6230e-01, 1.2143e-01, 8.1280e-01, + 5.3614e-01, 2.2370e-01, 3.9702e-03, 8.1861e-01, + 3.2803e-01, 4.5384e-01, 7.4509e-02, 2.9656e-02, + 1.4092e-01, 8.1178e-01, 8.9354e-01, 9.5920e-01, + 6.9431e-01, 1.7812e-01, 2.6253e-01, 7.5330e-01, + 4.0264e-01, 4.7890e-01, 9.2839e-02, 7.5094e-01, + 8.4477e-01, 3.2959e-01, 2.3553e-01, 3.2271e-01, + 6.9121e-01, 1.2597e-01, 4.5588e-02, 4.9056e-01, + 5.3107e-01, 6.7603e-01, 3.9695e-01, 4.7252e-01, + 7.7230e-01, 1.9025e-01, 4.6398e-02, 7.3184e-02, + 4.1823e-01, 4.5858e-01, 1.3925e-01, 2.7749e-01, + 3.1086e-01, 7.3769e-01, 2.8863e-01, 1.6350e-01, + 5.8249e-01, 3.2862e-01, 8.6658e-01, 3.7424e-01, + 6.4688e-01, 8.8144e-01, 8.3387e-01, 2.4043e-01, + 3.0604e-01, 7.2186e-01, 9.8668e-01, 6.4301e-01, + 2.1333e-01, 2.1739e-01, 8.0956e-01, 8.4726e-01, + 5.4902e-01, 5.1985e-01, 4.3590e-01, 5.9178e-01, + 3.2764e-02, 8.0291e-01, 7.7345e-01, 8.8043e-01, + 6.8865e-01, 3.3215e-01, 5.7762e-01, 6.9598e-01, + 4.9078e-01, 8.2298e-01, 9.9004e-01, 2.2068e-02, + 2.8306e-01, 8.5364e-01, 4.1046e-02, 2.9296e-01, + 3.3647e-01, 2.1041e-01, 9.7390e-01, 3.9248e-01, + 2.9236e-01, 6.9974e-02, 4.4419e-01, 5.9413e-01, + 9.8070e-01, 6.2882e-01, 6.4546e-01, 7.3828e-01, + 3.5652e-01, 5.1730e-01, 9.6296e-02, 5.8745e-01, + 6.8194e-01, 8.4842e-01, 7.7397e-01, 6.8432e-01, + 4.9849e-01, 6.2490e-01, 1.3079e-01, 6.5648e-01, + 9.2427e-01, 1.3640e-01, 3.1600e-01, 2.3137e-01, + 1.7158e-01, 7.1635e-02, 6.5475e-01, 5.4606e-01, + 1.3754e-02, 5.3487e-01, 2.3732e-02, 3.5133e-01, + 2.8187e-01, 7.3303e-01, 5.2073e-01, 4.4826e-01, + 8.1048e-01, 6.7449e-01, 9.0832e-01, 4.1805e-01, + 8.0305e-01, 6.8775e-01, 3.7820e-01, 2.8105e-01, + 1.9347e-01, 2.9248e-03, 4.2067e-01, 5.6898e-01, + 5.5573e-01, 1.0322e-01, 6.8127e-02, 1.6413e-02, + 1.2364e-02, 6.8063e-01, 1.4124e-01, 6.6213e-01, + 4.5276e-01, 7.1009e-01, 2.9506e-01, 9.9728e-01, + 3.1321e-01, 5.2084e-02, 6.5518e-01, 5.7027e-01, + 5.5342e-01, 4.5039e-01, 8.3082e-01, 8.4419e-02, + 6.9061e-01, 1.4910e-01, 3.7919e-01, 6.9904e-01, + 2.7073e-01, 6.6293e-01, 6.3300e-01, 2.6337e-01, + 3.1362e-02, 3.2107e-03, 8.5961e-01, 7.4186e-01, + 8.9251e-01, 3.8121e-01, 2.8557e-02, 7.5863e-01, + 4.2271e-01, 7.7304e-01, 5.1314e-01, 3.3596e-01, + 1.5878e-02, 2.2314e-01, 8.6545e-01, 8.6219e-01, + 3.5144e-02, 7.3275e-01, 7.7644e-01, 6.9327e-02, + 2.8412e-01, 6.3838e-01, 5.2461e-02, 1.5141e-01, + 8.3297e-01, 8.3531e-01, 8.2986e-01, 2.8791e-01, + 4.3772e-01, 6.3906e-01, 7.3058e-01, 2.8604e-01, + 5.5652e-01, 4.1917e-01, 7.8109e-02, 8.7364e-01, + 3.2508e-01, 9.7158e-02, 7.0644e-01, 3.4530e-01, + 8.6102e-01, 7.0810e-01, 5.7266e-01, 1.7812e-01, + 2.4554e-01, 8.2889e-01, 8.9194e-01, 1.1344e-01, + 2.4368e-01, 7.7560e-02, 4.4172e-02, 8.8359e-01, + 8.2482e-01, 1.0655e-01, 8.7708e-01, 1.2792e-01, + 9.8616e-01, 4.6696e-01, 1.0845e-01, 1.3437e-01, + 2.1846e-02, 1.0611e-01, 9.9102e-02, 9.5604e-01, + 3.1153e-01, 4.8692e-01, 2.0342e-01, 1.2685e-01, + 9.5654e-01, 1.1792e-01, 5.1575e-01, 8.2682e-01, + 1.6512e-02, 3.1644e-01, 4.2779e-01, 8.7755e-01, + 5.2214e-01, 2.9411e-01, 1.6687e-01, 1.3020e-01, + 3.5626e-01, 6.3969e-01, 9.1821e-01, 6.3832e-01, + 5.5944e-02, 1.2907e-03, 3.3494e-01, 6.8836e-01, + 2.2071e-01, 1.3219e-02, 5.0061e-01, 3.6384e-01, + 1.8954e-01, 2.7547e-01, 1.5864e-01, 3.5372e-01, + 8.1700e-01, 3.6113e-01, 4.7142e-02, 5.4031e-02, + 6.9238e-02, 7.9583e-01, 7.9571e-02, 8.3656e-01, + 7.0337e-01, 4.2937e-01, 2.3133e-01, 1.1464e-01, + 3.0281e-01, 1.4357e-01, 5.9047e-01, 6.4976e-01, + 8.3026e-01, 6.6673e-01, 2.1384e-01, 2.3718e-01, + 5.5574e-02, 8.9535e-01, 5.1336e-01, 8.1470e-01, + 1.9989e-01, 7.3222e-01, 8.5078e-01, 8.6226e-01, + 2.6360e-01, 5.7030e-01, 4.2663e-01, 2.0116e-01, + 1.6326e-01, 2.2602e-02, 4.5302e-02, 5.7226e-01, + 1.3045e-01, 1.9958e-02, 7.3527e-02, 4.1662e-02, + 1.8889e-01, 8.5688e-01, 8.0053e-01, 4.1512e-01, + 7.1902e-01, 2.3940e-01, 3.0387e-01, 7.6819e-01, + 4.4629e-01, 1.5787e-01, 2.8335e-01, 9.7723e-01, + 5.6857e-02, 2.2885e-01, 6.2058e-02, 9.0482e-01, + 8.0187e-01, 5.2777e-01, 8.4498e-01, 4.7418e-01, + 6.9321e-01, 8.9847e-01, 2.5302e-01, 8.5480e-01, + 3.4442e-01, 3.0393e-01, 9.9281e-01, 2.9216e-01, + 7.4385e-03, 3.7192e-01, 5.0246e-01, 8.4051e-01, + 6.7121e-01, 9.1350e-01, 7.5249e-01, 7.3097e-02, + 4.3081e-01, 7.2258e-01, 3.9585e-01, 5.2846e-01, + 8.9372e-02, 1.5014e-02, 2.4111e-01, 6.7969e-03, + 7.4773e-01, 6.7682e-01, 8.9317e-01, 3.0863e-01, + 9.9853e-02, 1.3593e-01, 9.8550e-01, 7.6921e-01, + 1.7322e-01, 8.3813e-01, 6.3372e-01, 2.8917e-01, + 5.5668e-01, 1.0735e-01, 5.2302e-01, 1.1063e-01, + 6.7549e-01, 4.4770e-01, 1.4057e-02, 5.9494e-01, + 8.8817e-02, 8.4445e-01, 1.8611e-01, 2.3052e-01, + 6.1311e-01, 1.8401e-01, 1.9584e-01, 5.7546e-01, + 6.1910e-01, 2.9298e-01, 8.8160e-01, 1.5241e-01, + 9.1198e-01, 1.1290e-01, 6.5689e-02, 4.7480e-01, + 5.1616e-02, 1.8578e-01, 8.3774e-01, 9.4238e-01, + 3.5891e-01, 4.5842e-02, 1.2587e-01, 9.9257e-01, + 8.8057e-01, 1.8738e-01, 2.1821e-01, 2.5914e-01, + 7.3710e-03, 1.1278e-01, 9.4587e-01, 8.0280e-01, + 4.3127e-01, 7.8958e-01, 1.5023e-01, 5.9266e-01, + 4.6489e-01, 3.7186e-01, 4.7115e-01, 3.1902e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.9318, 0.2708, 0.1659, ..., 0.9519, 0.7638, 0.9831]) +tensor([0.0435, 0.9300, 0.8297, ..., 0.3222, 0.7823, 0.3267]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -375,378 +375,378 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 0.05549430847167969 seconds +Time: 0.05496048927307129 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '189208', '-ss', '10000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 7.102759838104248} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '191046', '-ss', '10000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 6.995845317840576} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([ 669, 3273, 5792, 9781, 3401, 2980, 8019, 7237, 2538, - 4477, 7482, 497, 1165, 1179, 8217, 7349, 5272, 9560, - 8988, 3708, 8899, 64, 4583, 4111, 7600, 3699, 4957, - 769, 1357, 74, 9202, 6103, 1121, 2235, 1229, 8638, - 4717, 7116, 8280, 5249, 1887, 4346, 1096, 6559, 370, - 5939, 6193, 450, 6742, 1437, 140, 922, 1107, 8788, - 4404, 1605, 3671, 5699, 8839, 9178, 3700, 366, 8176, - 7767, 6692, 8391, 2732, 4473, 1417, 6192, 3118, 3857, - 273, 6678, 6556, 2830, 9847, 1396, 4143, 8999, 1311, - 8607, 1524, 3289, 5756, 8868, 728, 8554, 1884, 115, - 5427, 9570, 4892, 5097, 1696, 9631, 4966, 79, 3458, - 519, 9574, 1822, 5669, 9689, 8411, 558, 8678, 6709, - 5081, 7029, 1222, 8895, 1768, 2808, 3701, 4049, 5985, - 6253, 6668, 8422, 3407, 5174, 7407, 2942, 436, 8501, - 6672, 4879, 4449, 8978, 3400, 9616, 1307, 3169, 6194, - 2284, 5731, 493, 7503, 8153, 9796, 6669, 8815, 4507, - 830, 2850, 9032, 9835, 36, 4609, 3470, 4714, 7348, - 2579, 6237, 1285, 6036, 1434, 3877, 8840, 1315, 6757, - 5979, 4458, 6295, 5680, 7250, 6818, 8485, 5605, 2108, - 1990, 9545, 3032, 4803, 4652, 5757, 602, 7629, 5018, - 2164, 2299, 6627, 8125, 548, 7490, 5291, 1196, 4922, - 5597, 1037, 8723, 7524, 3156, 8006, 9164, 4837, 1574, - 9474, 196, 2709, 5027, 7362, 5428, 8882, 1006, 4942, - 6577, 4559, 4029, 5103, 5135, 7267, 9100, 3712, 3529, - 9906, 8790, 8889, 9307, 5905, 587, 5795, 3356, 1530, - 9666, 82, 3292, 465, 5326, 5409, 3479, 2895, 6288, - 1994, 8050, 6791, 444, 9261, 9497, 8672, 191, 5685, - 2068, 5269, 3261, 9705, 6750, 9937, 9481, 88, 4039, - 1220, 2662, 9483, 5718, 3005, 4069, 1861, 3105, 9677, - 1215, 1219, 6942, 398, 2930, 3720, 5702, 9719, 1250, - 6838, 1879, 9671, 3377, 6020, 7993, 5317, 1984, 7155, - 7823, 5633, 4799, 3007, 4564, 4502, 4136, 9180, 8977, - 3724, 5070, 1244, 6337, 5698, 9110, 4445, 6809, 6634, - 5960, 5272, 9823, 9539, 5995, 2389, 8932, 2260, 9860, - 2985, 4669, 2719, 1786, 2375, 7495, 4369, 1869, 8586, - 3194, 7035, 4202, 3585, 7260, 8519, 462, 2526, 5041, - 7512, 1123, 3655, 6401, 8203, 5900, 6265, 8181, 8652, - 3766, 9778, 9596, 8715, 5766, 9029, 8783, 5335, 9566, - 1925, 6685, 118, 2966, 9066, 7638, 361, 9102, 3467, - 5579, 8844, 3229, 6691, 3634, 6412, 4554, 4019, 1726, - 1126, 6963, 6444, 1932, 5799, 7498, 8031, 9526, 6026, - 3004, 7190, 3306, 561, 9137, 4570, 130, 2226, 4985, - 578, 1712, 7945, 291, 1491, 6026, 3960, 1488, 8488, - 8308, 413, 4604, 6420, 272, 7793, 7487, 1911, 7684, - 165, 1386, 5709, 6913, 778, 9637, 4201, 8151, 9785, - 4991, 7608, 699, 9340, 2490, 5602, 6067, 4035, 34, - 3065, 1355, 5394, 2103, 119, 8595, 619, 8230, 6218, - 5316, 3381, 6568, 6873, 6320, 5322, 8921, 6403, 1018, - 7158, 4435, 753, 2535, 9363, 1754, 5141, 5970, 1713, - 2102, 7085, 1777, 9298, 2893, 2325, 280, 7493, 1984, - 2866, 3881, 187, 139, 3738, 8423, 6252, 7865, 1612, - 4794, 7749, 9625, 6088, 4829, 7886, 8816, 7872, 3553, - 7037, 462, 6369, 2797, 3594, 3706, 7595, 4319, 4553, - 1352, 2368, 3754, 5055, 9129, 2408, 5171, 4034, 8903, - 4301, 2899, 1031, 9137, 9130, 2887, 3050, 9748, 3002, - 4368, 1377, 14, 9835, 927, 1320, 8179, 5332, 2335, - 7473, 152, 1539, 8802, 4610, 7466, 3565, 7828, 8078, - 5603, 1111, 6957, 9253, 4303, 8691, 8941, 1151, 9521, - 4891, 8745, 2551, 776, 2431, 8334, 9221, 2900, 3488, - 3271, 6684, 8803, 6459, 510, 4225, 1421, 6583, 9483, - 5947, 7453, 3332, 7059, 805, 2231, 1381, 2035, 7583, - 6428, 5084, 5016, 9893, 1993, 2796, 2349, 77, 8708, - 9727, 3995, 7354, 2557, 8055, 1208, 4079, 1301, 1025, - 5791, 5768, 5492, 8780, 5653, 2073, 7129, 4067, 1016, - 1874, 4143, 6625, 6573, 171, 6311, 4357, 6817, 9985, - 5649, 8320, 3035, 9034, 4854, 4850, 4130, 230, 590, - 6434, 3310, 3821, 3484, 9256, 9819, 6426, 9101, 6546, - 9009, 3368, 3909, 9693, 6094, 8984, 8087, 841, 9319, - 9195, 7558, 7551, 9483, 9262, 1377, 6670, 8551, 719, - 2742, 7937, 3408, 9296, 3889, 4180, 2345, 2489, 2943, - 5352, 8372, 1011, 179, 8341, 9121, 8999, 3688, 6051, - 3269, 3959, 9841, 491, 4252, 4924, 8412, 2213, 7473, - 1225, 2673, 7194, 9127, 5222, 5573, 9146, 9491, 3900, - 1874, 4646, 7313, 2461, 7397, 2032, 1704, 240, 3146, - 9980, 184, 6465, 6659, 8935, 5850, 5168, 9151, 9678, - 4403, 5955, 804, 3478, 4032, 5790, 7883, 9176, 3272, - 6037, 1365, 4083, 8763, 1301, 1991, 513, 6850, 8878, - 2479, 3221, 9922, 1290, 2545, 3139, 7216, 9904, 7355, - 7635, 9068, 6518, 2208, 245, 6714, 3520, 3655, 7440, - 6470, 9416, 2926, 350, 4914, 8248, 8850, 4839, 9550, - 7474, 654, 7282, 6100, 8225, 5294, 2426, 5204, 7208, - 3916, 517, 588, 9882, 8712, 1943, 3935, 6884, 2183, - 9557, 5740, 1560, 3589, 8891, 1608, 3275, 5120, 7287, - 3495, 2888, 7, 8544, 4952, 7830, 4564, 8184, 7998, - 7658, 450, 3735, 1224, 9752, 4453, 1944, 8155, 1494, - 7153, 2740, 6538, 861, 3754, 7065, 7073, 2713, 7693, - 1251, 1423, 5641, 1615, 4884, 7109, 5653, 8673, 3932, - 5449, 5004, 7327, 3492, 2611, 506, 9862, 9330, 128, - 6638, 8767, 3105, 6047, 4833, 7180, 2548, 2096, 1493, - 2097, 537, 7125, 4678, 1882, 3953, 4426, 9718, 2323, - 2777, 8065, 3689, 1010, 1744, 2813, 620, 1120, 7302, - 2681, 4571, 3149, 89, 9446, 7410, 7861, 7945, 9771, - 2292, 2880, 8244, 1030, 2878, 379, 7121, 3243, 7554, - 6058, 7529, 804, 6025, 1204, 3358, 7738, 95, 6048, - 5680, 600, 8908, 7072, 2717, 5881, 6510, 7369, 1263, - 8744, 2195, 8992, 109, 900, 832, 9237, 6993, 583, - 8220, 6946, 7613, 7852, 3837, 8801, 1961, 1761, 592, - 4950, 4194, 9365, 7777, 4853, 8786, 9249, 3676, 2706, - 6237, 8426, 9876, 2294, 7641, 1970, 9815, 2918, 5305, - 8163, 6323, 1039, 3892, 725, 2962, 5654, 7689, 8212, - 2255, 440, 9742, 5932, 8122, 8142, 307, 2390, 3465, - 8923, 1417, 5164, 5076, 6776, 99, 9075, 8998, 7451, - 7010, 6432, 2375, 3077, 5809, 952, 9730, 3421, 8737, - 5844, 7586, 4727, 1388, 4968, 7262, 1509, 8249, 3516, - 8980, 4632, 1362, 709, 5861, 7077, 476, 2975, 4142, - 9186, 1750, 3910, 2408, 6682, 5820, 2100, 2802, 6566, - 4924, 5113, 1654, 6463, 8225, 7447, 9082, 9264, 1892, - 4377, 955, 4193, 532, 3309, 9803, 3933, 8896, 6328, - 3194, 4576, 5657, 5170, 5751, 9029, 6816, 1102, 4263, - 7949, 6500, 5410, 8005, 7682, 3589, 8923, 8988, 2184, - 9612, 6265, 4189, 3679, 9905, 7708, 7535, 5898, 840, - 181]), - values=tensor([6.2729e-01, 7.3269e-01, 1.4011e-01, 5.7256e-01, - 4.6404e-01, 8.6419e-01, 4.8553e-01, 5.2299e-01, - 2.2259e-01, 7.2459e-02, 3.9711e-01, 2.2592e-01, - 3.7829e-01, 5.3997e-01, 4.6560e-01, 9.9296e-01, - 6.7545e-01, 8.8184e-01, 9.9893e-01, 9.7721e-01, - 2.1983e-02, 8.9118e-01, 9.8011e-01, 3.0738e-01, - 7.6876e-01, 4.5732e-01, 9.6540e-01, 3.2112e-01, - 9.6402e-01, 8.9816e-01, 5.8684e-01, 6.9535e-01, - 6.7614e-01, 6.0590e-01, 2.9041e-01, 3.5443e-01, - 6.0474e-01, 3.0400e-01, 8.0883e-01, 2.6429e-01, - 6.1099e-01, 5.4707e-01, 7.2220e-01, 1.1165e-02, - 2.1338e-01, 1.6191e-01, 1.6317e-01, 8.6093e-01, - 8.1123e-01, 6.2055e-01, 1.3716e-01, 1.1630e-01, - 5.1404e-02, 8.9678e-02, 1.0958e-01, 4.8818e-01, - 9.7979e-01, 5.6995e-01, 3.8885e-01, 4.1000e-02, - 1.4109e-01, 9.2410e-01, 6.5508e-01, 5.0456e-01, - 2.4713e-01, 2.0393e-01, 9.1910e-01, 5.6309e-01, - 6.1609e-01, 1.4722e-01, 4.5299e-01, 8.7540e-01, - 8.9103e-01, 4.2814e-01, 5.3777e-01, 4.2699e-01, - 4.0868e-01, 2.4948e-02, 1.7498e-01, 9.8681e-01, - 7.7659e-01, 9.3437e-01, 4.3417e-01, 4.6672e-01, - 3.4370e-01, 4.5948e-01, 1.6391e-01, 4.9629e-02, - 7.8435e-01, 6.9200e-01, 3.2121e-01, 5.7188e-01, - 5.1763e-02, 3.5376e-01, 7.0938e-03, 7.9743e-01, - 5.3623e-01, 9.8474e-01, 6.2362e-01, 6.7182e-01, - 1.8283e-01, 4.1708e-01, 6.4726e-01, 2.5054e-01, - 1.1171e-01, 1.5578e-02, 4.5836e-01, 6.4168e-01, - 6.7429e-01, 2.2225e-01, 4.0102e-01, 4.2040e-01, - 6.6684e-01, 3.5071e-01, 2.1170e-01, 3.7533e-01, - 5.6750e-01, 2.0781e-01, 2.4007e-02, 7.8323e-01, - 5.3139e-01, 7.8923e-01, 8.0063e-03, 7.6172e-01, - 8.3808e-01, 8.8415e-01, 3.5045e-01, 8.0754e-01, - 7.2230e-01, 5.5223e-01, 8.8528e-01, 2.0912e-01, - 7.9720e-01, 2.0831e-01, 4.1355e-01, 5.8970e-01, - 9.4705e-01, 2.9442e-01, 8.6625e-02, 3.7290e-01, - 6.5613e-01, 3.9679e-01, 9.3502e-01, 3.6180e-01, - 4.6194e-01, 5.8367e-01, 7.1396e-02, 2.8825e-01, - 3.1681e-01, 7.6159e-01, 9.0903e-01, 6.1205e-02, - 6.6068e-03, 6.7888e-01, 9.2596e-01, 4.4799e-01, - 8.5587e-01, 7.2830e-01, 2.3655e-01, 5.2882e-01, - 4.7850e-01, 6.7400e-01, 3.5985e-01, 4.5315e-01, - 2.2708e-01, 7.1507e-01, 7.9339e-01, 4.4797e-01, - 5.4571e-01, 7.0024e-01, 9.1251e-01, 4.7261e-01, - 1.4885e-01, 9.5311e-01, 3.4245e-01, 4.0990e-01, - 7.8251e-01, 3.1655e-01, 8.2037e-01, 3.0109e-01, - 7.6031e-01, 2.6836e-01, 6.3985e-01, 3.6537e-01, - 8.6178e-01, 2.2921e-01, 6.5026e-01, 3.7107e-01, - 6.4690e-01, 5.1384e-01, 3.9093e-01, 1.9385e-01, - 6.0012e-01, 6.5105e-01, 1.2912e-01, 7.3366e-01, - 4.6448e-01, 4.9470e-01, 4.6330e-01, 1.4815e-01, - 1.1715e-01, 2.2752e-02, 8.5846e-01, 8.3793e-01, - 1.1339e-01, 6.0680e-01, 1.9977e-01, 7.4165e-01, - 9.6287e-01, 9.8254e-01, 7.9096e-01, 5.9999e-02, - 1.0208e-01, 1.8576e-02, 5.5092e-01, 5.2338e-01, - 4.1915e-01, 8.4881e-01, 1.9820e-01, 2.8250e-02, - 3.6828e-01, 4.3113e-01, 1.6525e-01, 3.0452e-02, - 9.2968e-01, 4.0898e-01, 6.7724e-01, 7.1939e-01, - 3.4321e-01, 1.8026e-01, 6.7853e-01, 2.5134e-01, - 1.5397e-01, 5.9862e-01, 8.5165e-01, 7.7047e-01, - 5.0775e-01, 9.3422e-02, 5.1094e-01, 1.7550e-01, - 1.3980e-01, 4.5491e-01, 7.7817e-01, 6.7138e-01, - 2.9636e-01, 5.2681e-01, 2.4524e-01, 4.9996e-01, - 6.3922e-01, 3.7008e-01, 5.3156e-01, 4.4374e-01, - 7.6642e-01, 9.4398e-01, 2.4797e-01, 2.7961e-01, - 4.8147e-01, 3.4135e-01, 9.7353e-01, 6.0490e-01, - 5.0593e-01, 5.1149e-01, 1.1033e-01, 2.0210e-02, - 9.6719e-01, 9.4683e-02, 6.4283e-01, 2.5575e-01, - 4.1408e-02, 4.7515e-01, 6.4545e-01, 6.6491e-01, - 2.4035e-01, 2.6893e-01, 5.8955e-01, 7.6850e-01, - 7.0816e-01, 7.0011e-01, 3.6507e-01, 4.9171e-01, - 1.5873e-01, 2.8284e-02, 1.7687e-01, 6.7565e-01, - 4.2655e-01, 3.8086e-01, 4.1332e-01, 7.1159e-01, - 8.0834e-01, 8.8106e-01, 6.1511e-01, 6.4173e-01, - 8.4008e-01, 1.4103e-01, 1.1837e-01, 5.3656e-01, - 2.9911e-01, 2.1830e-01, 7.6785e-02, 5.5486e-01, - 8.2098e-01, 8.7106e-01, 1.5375e-01, 3.9215e-01, - 5.4586e-01, 5.9053e-01, 7.5743e-01, 7.8766e-01, - 7.0938e-02, 2.8004e-02, 6.1547e-01, 2.7101e-01, - 8.8474e-01, 6.6229e-01, 9.4397e-01, 3.5876e-02, - 4.6401e-01, 1.9432e-01, 8.1651e-01, 5.7533e-01, - 9.7767e-01, 1.8401e-01, 1.4113e-01, 8.9552e-01, - 4.3863e-01, 2.9166e-01, 8.7681e-01, 7.3428e-01, - 6.6903e-03, 5.6369e-01, 8.8054e-01, 5.7105e-01, - 3.1705e-01, 7.8119e-01, 4.8018e-01, 2.6116e-01, - 5.4542e-01, 2.4433e-01, 9.6123e-01, 3.8911e-01, - 3.9970e-01, 1.0950e-01, 3.2153e-01, 7.4721e-01, - 1.4703e-01, 7.5921e-01, 9.4508e-01, 8.2200e-02, - 7.2335e-01, 4.8722e-01, 9.5258e-01, 2.9134e-01, - 6.1280e-01, 1.0707e-01, 9.2884e-01, 7.8170e-02, - 4.4927e-01, 8.2911e-01, 5.2247e-01, 4.1389e-02, - 5.9969e-01, 4.0325e-01, 3.6622e-01, 9.9662e-01, - 7.0803e-02, 2.5996e-01, 1.5595e-01, 7.4166e-01, - 9.6366e-01, 8.9608e-01, 5.1862e-01, 8.8883e-02, - 5.1180e-01, 8.9567e-02, 7.2407e-02, 4.6614e-01, - 3.1970e-01, 1.4377e-01, 7.8798e-02, 4.9036e-01, - 3.2420e-01, 5.5533e-01, 2.9683e-01, 7.0536e-01, - 3.8860e-01, 2.9802e-01, 3.2794e-01, 3.9565e-01, - 3.9728e-01, 6.5104e-01, 1.3085e-01, 2.4474e-01, - 7.3609e-01, 1.4047e-01, 4.5618e-01, 5.0260e-01, - 5.8306e-01, 8.8976e-01, 6.7250e-01, 9.5130e-01, - 3.4386e-01, 6.9455e-01, 2.2297e-01, 5.7352e-01, - 2.8992e-01, 5.3568e-01, 8.5388e-01, 1.1626e-01, - 9.2313e-01, 1.8447e-01, 6.7212e-01, 6.5107e-01, - 9.8333e-01, 7.9954e-01, 5.9670e-01, 7.8798e-01, - 4.1953e-01, 6.1565e-01, 5.6520e-02, 8.6847e-01, - 4.9179e-01, 4.5910e-01, 3.1671e-01, 5.6622e-01, - 6.6271e-01, 4.7549e-01, 6.2129e-01, 8.4983e-01, - 1.1296e-01, 6.4333e-01, 3.4967e-01, 7.9033e-01, - 2.7481e-01, 5.5575e-02, 4.0509e-02, 1.0579e-01, - 1.9117e-01, 2.3979e-01, 9.3613e-01, 9.6471e-01, - 4.2167e-01, 4.0105e-01, 5.0373e-02, 4.7076e-01, - 6.4992e-01, 3.8353e-01, 6.7114e-01, 1.7533e-01, - 8.2416e-01, 1.8514e-01, 1.7767e-02, 7.5403e-01, - 4.5672e-01, 1.3093e-01, 9.6986e-01, 7.1249e-01, - 3.9783e-01, 6.1905e-01, 8.7266e-01, 5.3731e-01, - 4.7423e-01, 8.0470e-01, 4.5452e-02, 4.7610e-01, - 2.2803e-01, 4.0530e-02, 4.1858e-02, 5.9514e-01, - 6.3205e-01, 6.0096e-01, 6.2022e-01, 8.1543e-01, - 5.1111e-01, 3.5180e-01, 5.4909e-01, 1.8459e-01, - 8.2028e-01, 1.1355e-02, 9.8226e-01, 8.7594e-01, - 6.8888e-01, 1.3915e-01, 5.3615e-01, 9.8624e-02, - 7.9793e-01, 4.1909e-01, 6.9968e-01, 9.5834e-01, - 5.8875e-01, 6.3159e-01, 3.8897e-01, 5.5712e-01, - 3.6204e-01, 9.8684e-01, 9.4012e-01, 2.7852e-01, - 4.4663e-01, 2.1708e-01, 6.8324e-01, 9.1279e-01, - 5.7231e-01, 7.1679e-01, 6.4524e-01, 6.7973e-01, - 2.1853e-01, 3.7827e-01, 3.8642e-02, 3.2938e-01, - 1.0284e-01, 3.7775e-01, 7.6478e-01, 6.0212e-02, - 2.1677e-01, 2.4655e-01, 6.0212e-01, 4.5151e-01, - 2.7481e-01, 9.8650e-01, 1.8648e-01, 1.5425e-01, - 8.0152e-01, 4.0228e-01, 7.1217e-01, 7.9542e-01, - 8.1777e-01, 2.3085e-01, 9.0356e-01, 5.1097e-01, - 1.7850e-01, 9.6780e-01, 8.5295e-01, 3.4071e-01, - 6.0767e-01, 3.5130e-01, 1.3407e-01, 1.4702e-01, - 3.6553e-01, 1.8709e-01, 1.6797e-01, 6.3116e-02, - 1.2918e-01, 4.1737e-01, 6.1060e-01, 6.4813e-01, - 1.8052e-02, 2.1266e-01, 5.6231e-01, 7.2320e-01, - 6.2235e-01, 4.5211e-01, 9.8902e-02, 8.0451e-01, - 7.7029e-01, 9.0034e-01, 7.9279e-01, 9.6039e-01, - 7.1891e-02, 4.7688e-01, 7.3555e-02, 6.4202e-01, - 3.9391e-01, 6.3140e-01, 1.9083e-01, 6.3775e-01, - 9.1027e-01, 6.4262e-01, 9.2598e-01, 2.1720e-01, - 7.8104e-01, 9.6831e-01, 6.5850e-01, 7.1365e-01, - 7.6756e-01, 7.5621e-01, 9.8870e-01, 1.3543e-02, - 9.8327e-02, 7.0130e-01, 5.7681e-01, 8.2024e-01, - 4.2123e-01, 2.7949e-01, 1.3116e-01, 9.2471e-01, - 6.6288e-01, 2.5985e-01, 9.7250e-01, 7.6245e-01, - 5.8311e-01, 7.1110e-01, 2.6468e-01, 5.6444e-01, - 8.4572e-03, 9.7073e-01, 7.4248e-01, 3.1398e-01, - 5.1773e-01, 4.7306e-01, 8.2435e-01, 4.9155e-01, - 7.8421e-01, 7.6197e-01, 9.0214e-01, 2.2417e-01, - 7.7195e-01, 3.6346e-01, 1.4458e-01, 7.6308e-01, - 6.3007e-01, 6.7815e-01, 2.1750e-01, 3.6172e-02, - 6.3774e-01, 6.2986e-01, 5.8031e-01, 5.7544e-01, - 1.3225e-01, 2.3535e-01, 4.4704e-01, 2.9656e-01, - 3.9964e-01, 1.5101e-01, 8.4983e-03, 5.6408e-01, - 5.9586e-01, 4.1728e-01, 4.0219e-02, 3.7802e-01, - 7.4307e-01, 6.5303e-01, 8.4581e-01, 6.1331e-01, - 5.3329e-01, 7.9341e-01, 4.1881e-01, 9.2960e-01, - 7.6497e-01, 5.5511e-01, 7.5763e-01, 7.9308e-01, - 6.9361e-01, 4.0626e-01, 6.8179e-01, 6.0305e-01, - 3.6152e-01, 8.4239e-02, 5.0495e-01, 5.8407e-02, - 6.8271e-01, 1.0964e-02, 3.3375e-01, 9.9418e-01, - 1.2562e-01, 1.2689e-01, 9.7371e-01, 9.3259e-01, - 1.7314e-01, 6.9581e-01, 1.8214e-01, 6.3245e-01, - 1.4618e-01, 9.6599e-01, 9.9474e-01, 9.8922e-01, - 6.1581e-01, 5.1813e-01, 4.5752e-01, 2.8704e-01, - 1.3009e-01, 6.2271e-01, 4.6080e-01, 6.5566e-01, - 2.7874e-01, 5.3746e-01, 8.0974e-01, 3.8211e-01, - 9.1333e-01, 2.3807e-02, 2.0105e-02, 8.0921e-01, - 9.2665e-01, 9.9128e-01, 1.2658e-01, 2.3710e-01, - 1.0020e-03, 5.9876e-01, 1.8658e-01, 5.8241e-01, - 4.1649e-01, 3.5493e-01, 3.3137e-01, 9.8297e-01, - 4.7033e-01, 7.5666e-01, 5.0930e-01, 6.8124e-01, - 4.1325e-02, 7.9368e-01, 7.2321e-01, 6.0061e-01, - 3.1876e-01, 9.9416e-01, 5.8169e-04, 7.4319e-01, - 3.3099e-01, 3.7974e-01, 7.1934e-02, 6.3701e-01, - 9.1148e-01, 3.1094e-01, 2.9625e-01, 1.8294e-01, - 8.6434e-01, 1.9260e-01, 4.9349e-01, 7.5786e-02, - 3.1607e-01, 9.4322e-01, 6.6033e-01, 5.6365e-01, - 3.0118e-01, 4.5157e-02, 2.0546e-01, 8.2876e-01, - 5.7725e-01, 3.6310e-01, 4.3402e-01, 2.0801e-01, - 3.9103e-01, 5.1079e-01, 9.0076e-01, 4.9366e-01, - 2.9850e-01, 7.2022e-01, 1.0559e-02, 4.6160e-01, - 5.4946e-02, 9.5719e-01, 8.5226e-01, 6.0667e-01, - 3.8093e-01, 2.9014e-01, 9.1948e-01, 3.3194e-01, - 1.9100e-01, 8.8089e-01, 6.8021e-01, 6.7095e-01, - 6.0126e-02, 6.2320e-01, 8.5566e-01, 4.1364e-01, - 1.9345e-01, 6.9092e-01, 5.4235e-01, 9.4123e-01, - 2.4836e-01, 8.0057e-01, 4.1002e-01, 8.9408e-02, - 8.7253e-01, 4.0866e-01, 5.1572e-01, 9.7529e-01, - 1.2355e-01, 8.0176e-01, 4.7289e-02, 9.3732e-01, - 7.1780e-01, 4.8504e-01, 2.1901e-01, 8.1755e-01, - 9.5949e-01, 3.7654e-01, 2.3246e-01, 5.0447e-01, - 1.0409e-01, 1.3490e-01, 5.6672e-01, 2.2502e-01, - 1.3722e-02, 1.9020e-01, 2.7153e-03, 8.4947e-01, - 5.8939e-01, 9.3802e-01, 9.7891e-01, 1.2499e-01, - 3.4396e-01, 6.1390e-02, 1.7986e-01, 8.5695e-02, - 1.3430e-02, 6.9983e-01, 7.4085e-02, 4.7995e-01, - 1.9388e-01, 3.2656e-01, 7.3759e-01, 5.4533e-02, - 5.2682e-01, 7.6967e-02, 7.8319e-01, 3.7177e-01, - 9.5697e-01, 9.7851e-01, 4.0804e-01, 3.4506e-01, - 2.8498e-01, 7.5580e-01, 7.4582e-02, 1.2315e-01, - 7.5747e-01, 7.0517e-01, 6.3063e-01, 9.6361e-01, - 9.7755e-01, 4.1124e-01, 2.5177e-01, 1.2686e-01, - 8.5456e-01, 5.7157e-01, 9.8108e-02, 3.0588e-01, - 8.0468e-01, 1.5074e-01, 9.7218e-01, 4.7413e-01, - 3.0587e-01, 2.3409e-01, 1.9548e-03, 8.7577e-02, - 4.3825e-01, 6.0466e-01, 9.1757e-01, 9.8141e-01, - 9.6317e-01, 8.7625e-02, 3.4495e-02, 7.6640e-01, - 4.5407e-01, 5.4942e-01, 1.8628e-01, 9.9891e-01, - 6.7663e-01, 9.8264e-02, 7.6553e-01, 6.6958e-01, - 2.2049e-01, 9.6927e-03, 2.7644e-01, 3.9587e-01, - 6.3097e-01, 7.9249e-01, 5.8334e-01, 2.0388e-01, - 8.7468e-01, 3.1819e-01, 4.4348e-01, 2.7880e-01, - 9.5765e-01, 7.5087e-01, 1.4134e-03, 4.9858e-01, - 8.5321e-01, 6.2616e-01, 8.1175e-01, 4.3769e-02, - 9.9726e-01, 3.5035e-02, 3.5457e-01, 3.2828e-01, - 5.5217e-01, 1.2494e-01, 7.6334e-01, 8.1562e-01, - 4.7494e-01, 8.8023e-01, 7.0453e-02, 4.0001e-01, - 3.1775e-01, 6.7118e-01, 4.9255e-01, 9.0360e-01, - 7.8849e-01, 9.6105e-01, 1.2373e-01, 4.8654e-01, - 6.3311e-01, 6.3369e-01, 2.4116e-01, 3.5376e-01, - 4.7714e-01, 4.0501e-01, 2.6574e-01, 7.7595e-01, - 9.1970e-01, 3.7320e-01, 5.3648e-01, 3.9685e-01, - 9.3524e-01, 2.0156e-01, 2.5406e-01, 9.6295e-01, - 5.8705e-01, 2.2268e-01, 7.8608e-01, 3.9429e-01, - 2.0286e-01, 6.7558e-01, 9.5683e-01, 2.8072e-01, - 9.1529e-01, 8.9882e-01, 2.6319e-01, 8.5254e-01, - 4.6633e-01, 1.9828e-01, 6.1942e-01, 3.2026e-01, - 1.1683e-01, 4.5824e-01, 8.4724e-01, 4.2830e-01, - 5.7214e-01, 3.7014e-01, 6.5405e-01, 6.4887e-01, - 2.2660e-01, 1.3989e-01, 3.4615e-01, 4.6773e-01, - 5.8001e-01, 1.8772e-01, 1.5619e-01, 4.0782e-01, - 7.5697e-01, 1.3234e-01, 8.6283e-01, 9.8274e-01, - 5.9024e-01, 5.3646e-01, 7.6153e-01, 7.1883e-01, - 9.3585e-01, 6.5195e-01, 2.2257e-02, 9.5115e-01, - 1.8153e-01, 7.1176e-01, 5.5621e-01, 9.1616e-02, - 3.9359e-01, 8.2000e-01, 1.0519e-01, 8.0766e-01, - 9.9239e-01, 8.5723e-01, 5.7034e-01, 8.2097e-01, - 1.3267e-01, 6.1833e-01, 5.6266e-01, 1.2467e-01, - 9.2567e-01, 1.0889e-01, 4.2614e-01, 9.5967e-02, - 9.6981e-01, 1.6191e-01, 7.3978e-01, 3.9748e-01, - 4.0224e-02, 8.8819e-01, 9.4310e-02, 1.2376e-01, - 7.6514e-02, 6.3316e-01, 9.1811e-01, 3.3660e-01, - 3.3721e-01, 5.7014e-01, 3.8001e-01, 5.1381e-01, - 7.8842e-01, 1.2570e-01, 8.1672e-01, 3.7175e-01, - 7.5008e-01, 2.9488e-02, 3.8041e-02, 2.3842e-01, - 2.3666e-01, 5.2995e-01, 4.4768e-01, 2.5264e-01, - 9.0564e-01, 3.4766e-01, 2.5763e-01, 6.1550e-01]), +tensor(crow_indices=tensor([ 0, 1, 2, ..., 1000, 1000, 1000]), + col_indices=tensor([7403, 728, 1285, 8550, 1815, 156, 5673, 7884, 804, + 2274, 8632, 558, 593, 2434, 9484, 9362, 4876, 1365, + 8726, 4434, 7468, 8388, 1142, 5354, 3450, 5015, 2167, + 5863, 8494, 4015, 1075, 3076, 5203, 7805, 8587, 6340, + 4721, 5410, 6601, 5936, 2358, 786, 8416, 3722, 7224, + 5416, 6954, 7430, 2080, 7410, 3608, 9068, 233, 5759, + 9777, 2647, 2414, 3959, 5249, 6789, 8618, 545, 1533, + 3253, 2595, 8333, 2258, 4520, 1162, 285, 8409, 3028, + 5291, 4349, 1073, 8045, 5220, 7737, 4880, 2756, 5810, + 1259, 3893, 4273, 4890, 7182, 1907, 6000, 4343, 5076, + 4585, 4488, 8537, 7870, 7713, 9502, 5846, 7896, 3564, + 1579, 5446, 9588, 1830, 3737, 9049, 4660, 8300, 3088, + 9875, 2180, 3118, 4817, 2026, 2159, 2896, 6145, 7437, + 5006, 2765, 5574, 6343, 9722, 9852, 1689, 5076, 1311, + 9122, 1883, 4106, 1720, 9328, 5882, 9768, 4762, 3098, + 5130, 8552, 3503, 7292, 5924, 4696, 6054, 3816, 9981, + 5211, 4445, 3287, 194, 5968, 675, 7691, 559, 8734, + 3360, 4551, 5634, 3143, 1346, 4016, 1356, 6510, 4609, + 9546, 8442, 654, 944, 2423, 2281, 267, 8782, 5027, + 2592, 3276, 9548, 425, 881, 5796, 3050, 1670, 1962, + 1441, 6040, 8321, 927, 1987, 6095, 3353, 3854, 9573, + 673, 1348, 8396, 7031, 9197, 7168, 3320, 8897, 1033, + 2188, 6451, 3907, 644, 5749, 6758, 8893, 959, 5264, + 1997, 5491, 2449, 7266, 4728, 7980, 3825, 198, 8545, + 6026, 6662, 5245, 4840, 448, 6283, 3877, 8879, 8978, + 9269, 8485, 231, 6825, 9514, 2279, 6392, 4293, 2300, + 5117, 8532, 5136, 9416, 4722, 3455, 5613, 1163, 1805, + 2844, 3695, 2372, 9138, 80, 4122, 3082, 5314, 5880, + 9027, 4915, 1798, 377, 7474, 6544, 6556, 7207, 1280, + 3832, 1703, 2970, 3046, 7797, 8443, 9061, 6639, 8642, + 7917, 6296, 2818, 8341, 4646, 8902, 7207, 4999, 7966, + 1512, 9785, 8126, 3762, 9839, 9481, 481, 5466, 4085, + 914, 7522, 5870, 8325, 3356, 9469, 3727, 9370, 1587, + 7291, 1660, 9622, 8806, 2694, 7061, 5895, 3123, 4575, + 5395, 5297, 8036, 3927, 8359, 9380, 1043, 7046, 942, + 5919, 6170, 9464, 1312, 8123, 8575, 6667, 713, 7311, + 9591, 3179, 2195, 7600, 1820, 4178, 6841, 6777, 6716, + 3925, 5310, 6497, 5108, 275, 563, 4119, 1120, 3800, + 2537, 6500, 4994, 7759, 9479, 6053, 613, 7128, 4781, + 7291, 5867, 6218, 8794, 4491, 4906, 4348, 45, 8652, + 3191, 3300, 9097, 4344, 928, 3139, 5368, 8972, 651, + 1667, 9007, 7260, 8298, 6067, 656, 930, 3773, 5706, + 8963, 6118, 3315, 7007, 3253, 1639, 5512, 7286, 2536, + 2949, 5825, 5608, 350, 4033, 5936, 4115, 4624, 4447, + 7819, 1222, 3789, 3404, 8968, 9433, 9983, 3532, 2350, + 3320, 5635, 9348, 3604, 4025, 158, 5249, 9966, 3682, + 4294, 6007, 621, 450, 9352, 3171, 8900, 2312, 5730, + 5756, 5828, 6209, 9360, 7909, 122, 6717, 8522, 367, + 6924, 9649, 8741, 6447, 4365, 117, 3939, 166, 4581, + 6628, 1635, 6595, 6205, 7942, 1530, 6926, 8307, 9018, + 518, 4477, 4186, 890, 8167, 3134, 5612, 1008, 1237, + 9767, 301, 8872, 3234, 1655, 9767, 7066, 6720, 646, + 3778, 7969, 7072, 2982, 8154, 5780, 7017, 9442, 3916, + 7338, 6654, 9506, 9386, 7911, 3439, 4488, 1318, 9278, + 2120, 5692, 8007, 8932, 1067, 3563, 406, 9736, 6429, + 2469, 5157, 8439, 6713, 2665, 4496, 2630, 5610, 1357, + 2167, 6597, 4229, 3422, 6037, 9839, 3006, 1669, 5295, + 1896, 8950, 897, 7260, 8424, 3342, 6261, 8823, 9847, + 81, 7207, 7928, 8508, 1180, 4322, 9610, 2700, 9270, + 2253, 4999, 2822, 799, 8895, 8943, 2494, 4665, 7481, + 6553, 6494, 4778, 9771, 8738, 6613, 3169, 7123, 7868, + 486, 6235, 4147, 9830, 2867, 4733, 9315, 2500, 5486, + 9660, 9562, 4813, 2022, 9928, 8445, 7193, 5662, 3995, + 4638, 9807, 3751, 3528, 5012, 7705, 7963, 8409, 9899, + 7720, 339, 6763, 6339, 4744, 4962, 9340, 662, 9729, + 9207, 2723, 2532, 358, 6880, 2101, 8578, 1727, 6383, + 684, 808, 3964, 526, 7427, 6064, 6196, 6614, 5677, + 7706, 3918, 1106, 8276, 5962, 2480, 1667, 2273, 1290, + 8432, 5456, 849, 206, 2265, 2455, 734, 2993, 2441, + 8922, 1369, 757, 9869, 9279, 5526, 7648, 6493, 7379, + 8031, 5455, 2619, 7722, 9544, 4602, 4835, 1730, 2276, + 8830, 7963, 390, 1845, 7449, 5996, 5864, 1101, 8071, + 2681, 7691, 9555, 7066, 9770, 3552, 8958, 8142, 1022, + 210, 9415, 4589, 5075, 9486, 1951, 4813, 5364, 7959, + 1651, 1539, 5573, 7617, 7645, 8080, 9190, 9014, 5227, + 6359, 8375, 1721, 452, 277, 572, 3324, 9266, 8706, + 2911, 4313, 108, 8824, 986, 2571, 9837, 7326, 9412, + 3860, 925, 4237, 8420, 483, 402, 3236, 9079, 3816, + 4506, 6161, 3338, 5889, 7859, 5541, 6135, 489, 8990, + 3725, 4563, 3462, 3303, 8536, 9548, 4181, 5827, 3982, + 3269, 3646, 297, 9534, 2316, 1541, 3523, 5814, 2238, + 1584, 5832, 4612, 136, 7639, 5518, 7889, 9816, 7827, + 7912, 3747, 8532, 2254, 9524, 9555, 8623, 9744, 1867, + 5645, 4826, 6467, 9779, 9419, 154, 1806, 2228, 8593, + 192, 1306, 5122, 2641, 5602, 275, 3868, 9202, 2138, + 352, 1080, 1620, 4867, 102, 4851, 2631, 7769, 1348, + 8068, 7624, 1115, 3211, 1976, 6103, 2512, 4767, 5585, + 6552, 253, 7092, 1637, 2359, 2280, 1944, 1937, 7161, + 5097, 7097, 6917, 1412, 3131, 923, 5544, 1044, 9853, + 2130, 422, 9618, 7896, 8489, 9926, 2957, 6244, 4986, + 3717, 9223, 6475, 1900, 7868, 7809, 7302, 1579, 9055, + 3280, 3718, 2669, 4188, 3507, 7928, 9427, 1259, 9928, + 561, 8471, 8745, 3811, 5135, 6274, 1078, 1056, 1275, + 8455, 9560, 5227, 8436, 4541, 2430, 7909, 6484, 3828, + 7217, 1558, 3143, 184, 4219, 4694, 1819, 4789, 8533, + 9749, 4639, 265, 4775, 8549, 3232, 3548, 4978, 8021, + 118, 9082, 6271, 5190, 7737, 7332, 4997, 2689, 4762, + 4116, 2253, 7100, 5906, 7437, 3678, 1299, 5659, 603, + 3513, 5385, 8361, 3560, 6174, 6509, 2095, 9081, 7288, + 3060, 5449, 1563, 9540, 5648, 2099, 4640, 4469, 9213, + 3458, 4363, 4687, 9325, 7205, 5051, 1383, 9720, 4880, + 6265, 759, 4202, 4331, 9088, 42, 4579, 3079, 2953, + 2367, 6110, 1228, 2187, 3983, 6551, 7849, 7307, 1087, + 6624, 4350, 9778, 4489, 1042, 518, 226, 4820, 5363, + 2688, 1416, 9167, 4319, 9005, 7753, 1017, 3305, 1055, + 172, 2840, 5680, 8252, 9837, 6305, 7483, 9510, 2281, + 6415, 8172, 9150, 1181, 9628, 6365, 8239, 6434, 1331, + 8292, 7326, 1890, 5694, 6982, 2358, 9428, 5946, 8542, + 3337, 9207, 5890, 4096, 7231, 2856, 5176, 8775, 5969, + 9243, 5152, 6215, 6321, 8749, 1753, 5522, 6434, 8223, + 5777, 8166, 536, 6813, 9761, 7396, 3652, 4437, 8870, + 2375]), + values=tensor([2.8230e-01, 5.3787e-01, 9.0316e-01, 8.6899e-01, + 3.5142e-01, 9.2070e-01, 6.6080e-01, 9.6886e-01, + 4.5757e-01, 9.8160e-01, 9.5205e-02, 2.5031e-01, + 3.5486e-01, 8.6237e-01, 6.0378e-01, 6.7704e-01, + 8.7432e-01, 1.7854e-02, 1.5057e-01, 9.1952e-01, + 5.5237e-01, 5.0374e-01, 8.2169e-01, 1.9191e-01, + 2.6186e-01, 7.3238e-01, 2.5831e-01, 2.2768e-01, + 8.9577e-01, 1.3629e-01, 7.4935e-01, 8.1262e-01, + 8.4277e-01, 8.0760e-01, 6.7226e-01, 3.5348e-01, + 2.2222e-01, 3.6301e-01, 8.2281e-01, 1.1419e-01, + 9.3289e-01, 2.9408e-01, 7.8503e-01, 6.7179e-01, + 5.3035e-02, 4.3059e-01, 3.9032e-01, 2.6405e-01, + 4.3534e-01, 6.6193e-01, 1.8458e-01, 9.3321e-01, + 3.9369e-01, 1.5642e-01, 5.6697e-01, 9.8146e-01, + 8.1174e-02, 8.3591e-01, 5.0625e-01, 6.2091e-01, + 3.4875e-01, 5.1572e-01, 8.5861e-01, 4.5264e-01, + 3.0169e-01, 2.4860e-01, 8.7848e-01, 2.1704e-01, + 2.3174e-01, 1.5844e-01, 6.4903e-01, 2.1421e-01, + 5.2656e-01, 4.5672e-01, 7.0355e-02, 4.9095e-01, + 5.5133e-01, 4.7274e-01, 9.2417e-01, 2.9473e-01, + 6.4978e-01, 9.4875e-01, 9.7030e-01, 1.8165e-01, + 9.5328e-01, 3.8524e-02, 4.4916e-02, 5.5294e-01, + 3.9613e-01, 4.3059e-01, 3.3924e-01, 2.7627e-02, + 3.2193e-01, 7.5578e-01, 5.1703e-01, 7.5168e-01, + 9.9840e-01, 9.8701e-01, 2.9976e-01, 4.1402e-02, + 4.1349e-01, 3.6650e-01, 3.6695e-01, 9.6976e-01, + 9.0391e-01, 9.9162e-01, 5.5791e-01, 9.1984e-02, + 6.2346e-01, 9.6193e-02, 6.6046e-01, 4.8892e-01, + 7.1733e-01, 4.4533e-01, 3.9615e-01, 1.8797e-01, + 4.1221e-01, 4.4789e-01, 1.9018e-01, 5.0749e-01, + 3.4543e-01, 1.1522e-01, 7.8096e-01, 5.1845e-01, + 9.0160e-01, 4.1666e-01, 1.8129e-01, 3.5302e-01, + 6.7895e-01, 6.2769e-01, 7.7233e-01, 5.2982e-01, + 6.1232e-01, 8.2022e-02, 4.7431e-01, 5.3530e-01, + 1.7521e-01, 3.8454e-01, 5.7823e-01, 3.2537e-01, + 5.4954e-01, 8.9358e-01, 8.1797e-01, 9.4123e-01, + 6.9884e-01, 1.7844e-01, 7.9125e-01, 8.2798e-01, + 5.6551e-01, 7.8310e-01, 8.4228e-01, 5.8557e-01, + 7.3096e-01, 5.3757e-01, 8.0687e-01, 5.3608e-01, + 9.7137e-01, 1.0411e-02, 4.1560e-01, 3.5703e-02, + 4.2274e-01, 3.2965e-03, 5.2300e-01, 1.8598e-01, + 8.4425e-01, 5.4561e-01, 6.6692e-01, 3.8831e-01, + 9.4335e-01, 5.0401e-01, 9.9745e-01, 4.4442e-01, + 6.9199e-01, 5.7018e-01, 5.8003e-01, 5.8966e-01, + 2.0870e-01, 6.8900e-01, 9.2247e-01, 9.5798e-01, + 7.7262e-01, 3.8366e-01, 2.4881e-01, 9.3450e-01, + 5.5886e-01, 1.9613e-01, 6.2664e-02, 9.9533e-01, + 8.5273e-02, 7.6192e-01, 9.8689e-01, 7.3898e-01, + 4.9975e-01, 5.8939e-01, 3.9308e-01, 9.4351e-02, + 1.9198e-01, 4.8111e-02, 7.6000e-01, 2.0746e-01, + 9.9383e-01, 5.0317e-01, 5.2760e-01, 3.0536e-01, + 9.6720e-01, 3.9620e-01, 1.0823e-01, 9.4807e-01, + 3.0499e-01, 4.4501e-01, 3.5625e-01, 1.2773e-01, + 4.7482e-02, 1.9498e-01, 4.5680e-01, 7.5560e-01, + 5.4493e-01, 3.1086e-01, 6.4490e-01, 9.3187e-01, + 3.5438e-01, 1.7700e-02, 1.5739e-02, 9.6099e-01, + 4.6738e-01, 5.5612e-01, 7.9348e-01, 9.7367e-01, + 3.9865e-01, 1.1288e-01, 2.2219e-02, 2.5582e-01, + 1.8013e-01, 8.8072e-01, 6.1261e-01, 4.2158e-01, + 3.6448e-01, 1.0854e-01, 3.4330e-01, 1.5254e-01, + 5.6236e-01, 6.1449e-02, 1.4463e-01, 9.9347e-01, + 8.9589e-01, 1.4066e-01, 8.4065e-01, 5.6322e-01, + 8.9380e-01, 1.8797e-02, 6.1893e-01, 9.1150e-01, + 5.5936e-01, 3.0968e-01, 3.0256e-01, 6.2585e-02, + 5.4710e-01, 5.6933e-01, 2.5988e-01, 6.2867e-01, + 6.1272e-01, 7.5780e-01, 4.6284e-01, 5.5982e-01, + 4.0855e-01, 3.3224e-01, 7.2796e-01, 8.2324e-01, + 4.7149e-01, 5.0106e-02, 5.3570e-01, 8.8603e-01, + 5.7756e-01, 2.4354e-01, 1.4955e-01, 9.2280e-01, + 8.8139e-01, 7.6525e-01, 9.1790e-01, 4.7411e-01, + 6.4076e-01, 1.8729e-01, 1.1207e-01, 6.1511e-01, + 4.4770e-01, 7.2418e-01, 2.3461e-01, 4.1290e-01, + 5.5197e-01, 8.6823e-01, 1.1988e-01, 8.1257e-01, + 3.2221e-01, 2.7063e-01, 8.2704e-01, 3.1990e-01, + 9.5619e-01, 8.0722e-01, 5.1394e-01, 8.0698e-01, + 5.5227e-01, 3.9684e-01, 7.4638e-01, 7.5519e-01, + 4.2651e-01, 3.7672e-01, 6.4270e-01, 3.5998e-01, + 6.6092e-01, 2.0814e-01, 1.1224e-01, 3.9653e-01, + 6.6199e-01, 4.9716e-01, 8.1779e-01, 7.1834e-01, + 9.4318e-01, 3.1778e-01, 7.4694e-01, 2.6639e-01, + 6.0868e-01, 7.9287e-01, 1.5927e-01, 4.0712e-01, + 5.3988e-01, 9.1454e-01, 8.9379e-02, 3.1543e-01, + 4.4029e-01, 3.6020e-01, 8.0943e-01, 2.4580e-01, + 3.0897e-01, 9.3469e-01, 8.1059e-01, 3.9868e-01, + 2.3106e-01, 5.3848e-01, 8.3832e-01, 8.3923e-01, + 5.2809e-01, 2.6115e-01, 7.4186e-02, 2.3872e-01, + 5.7522e-02, 4.8517e-01, 9.9668e-01, 5.6349e-01, + 4.2618e-01, 1.1280e-01, 4.1809e-01, 5.5704e-01, + 3.3944e-01, 2.4860e-01, 6.6615e-01, 1.0812e-01, + 7.6450e-01, 3.4114e-01, 8.2429e-01, 9.2346e-02, + 1.3097e-01, 5.6234e-01, 9.9781e-01, 6.8699e-02, + 4.3439e-01, 6.3050e-01, 2.7271e-01, 4.5312e-01, + 4.9182e-01, 8.9503e-01, 6.4194e-01, 2.9709e-01, + 8.7021e-01, 7.1784e-01, 6.8270e-01, 3.6942e-01, + 1.6561e-01, 6.6240e-01, 3.2360e-01, 8.2240e-01, + 6.8870e-01, 6.0278e-01, 9.9211e-01, 4.3589e-01, + 3.2636e-01, 8.1724e-01, 4.4169e-01, 1.0203e-01, + 3.6168e-01, 8.7661e-02, 4.0507e-01, 2.9383e-01, + 2.7115e-01, 4.6973e-01, 1.7735e-03, 9.6612e-01, + 4.4312e-01, 7.6621e-01, 1.8218e-01, 8.3507e-01, + 3.5109e-01, 2.6459e-01, 2.4844e-01, 2.4963e-01, + 3.0153e-01, 6.7972e-02, 6.4155e-01, 7.8728e-01, + 5.4841e-01, 8.3758e-01, 2.3422e-01, 8.1590e-01, + 9.2227e-01, 9.1343e-02, 8.8321e-01, 2.4270e-01, + 1.7708e-01, 1.1087e-01, 6.8447e-01, 3.0248e-01, + 5.3146e-01, 4.7859e-01, 1.1236e-01, 6.5350e-01, + 1.8996e-02, 6.1835e-01, 7.7411e-02, 2.4942e-01, + 3.3719e-01, 8.6763e-01, 5.2279e-01, 4.3488e-02, + 3.9154e-01, 4.6067e-01, 6.2581e-01, 8.6864e-01, + 7.0929e-01, 4.6582e-01, 2.9985e-01, 7.0105e-01, + 1.3644e-02, 1.2340e-01, 6.6567e-01, 9.8114e-01, + 2.1346e-01, 9.8781e-02, 7.3241e-01, 7.9594e-01, + 9.3850e-01, 4.5723e-01, 7.5320e-01, 8.0652e-01, + 4.9280e-01, 7.3869e-01, 9.4460e-01, 1.9219e-01, + 9.1108e-04, 8.2062e-01, 6.4154e-01, 5.8060e-01, + 2.6802e-01, 5.6534e-01, 1.3629e-02, 8.0096e-01, + 4.4681e-03, 1.3432e-01, 8.6326e-01, 3.5229e-01, + 4.1922e-01, 2.3116e-01, 4.8907e-01, 9.4010e-01, + 3.8584e-01, 3.9344e-01, 1.6084e-01, 8.2381e-01, + 4.3376e-01, 9.9305e-01, 9.5764e-02, 3.9798e-01, + 6.4945e-01, 4.5568e-02, 2.1547e-01, 8.6132e-01, + 5.5624e-01, 3.2310e-01, 1.6069e-01, 3.8394e-01, + 5.6633e-01, 4.6912e-01, 5.8230e-01, 7.0527e-01, + 5.1867e-01, 9.7576e-01, 2.9398e-01, 2.7352e-01, + 6.9329e-01, 1.6240e-01, 3.5226e-01, 6.1122e-01, + 3.0150e-01, 7.5510e-01, 5.8117e-01, 9.0081e-01, + 4.9601e-01, 5.6211e-02, 4.3973e-01, 1.2046e-01, + 9.6100e-01, 9.1511e-01, 3.6355e-01, 4.2499e-01, + 7.0139e-01, 9.2485e-03, 2.1730e-02, 5.5428e-01, + 7.7130e-01, 2.9907e-01, 9.4664e-02, 5.4725e-01, + 2.5707e-02, 6.3126e-01, 9.4471e-01, 4.3875e-01, + 8.7226e-01, 4.8046e-01, 4.2020e-01, 9.7338e-01, + 4.5294e-01, 7.2621e-01, 6.5641e-03, 2.0013e-01, + 5.0054e-01, 1.4999e-01, 4.5844e-01, 2.1320e-01, + 7.3521e-01, 5.5234e-01, 4.3196e-01, 4.5955e-01, + 7.4944e-01, 3.4281e-01, 2.4950e-01, 3.3971e-01, + 6.5536e-01, 4.9970e-01, 8.9743e-01, 5.7443e-01, + 9.8910e-01, 4.0661e-01, 5.0543e-01, 4.9067e-01, + 3.3417e-01, 7.1662e-01, 6.4197e-01, 3.9892e-01, + 7.1719e-01, 7.8083e-01, 5.8823e-01, 4.3577e-01, + 8.8006e-01, 6.9598e-01, 4.0056e-01, 1.2933e-01, + 5.8358e-01, 5.3340e-01, 6.9614e-01, 4.9337e-01, + 6.5904e-01, 9.5696e-01, 3.5189e-01, 8.2683e-01, + 2.5731e-01, 1.1868e-01, 1.0479e-01, 7.5226e-01, + 3.0903e-01, 7.3456e-01, 8.1586e-01, 1.6992e-01, + 6.4461e-02, 9.1855e-01, 1.9972e-01, 1.2567e-01, + 5.6581e-01, 9.4292e-01, 5.6080e-01, 8.3073e-01, + 6.2586e-01, 1.1142e-01, 4.1909e-01, 7.6041e-01, + 4.6996e-01, 5.8129e-01, 4.4302e-01, 8.7156e-01, + 1.6762e-01, 2.3036e-01, 8.3237e-02, 7.6133e-01, + 4.0479e-01, 7.7797e-01, 5.9596e-01, 3.2395e-01, + 5.0087e-01, 8.3374e-01, 9.5530e-01, 4.1306e-01, + 9.0357e-02, 1.2805e-01, 3.9607e-01, 7.3553e-01, + 5.7467e-01, 4.8834e-01, 6.6934e-02, 2.8103e-01, + 9.9274e-01, 4.5349e-01, 8.9209e-01, 2.8255e-01, + 2.6769e-01, 4.5655e-01, 4.3381e-01, 3.3164e-01, + 2.2435e-01, 7.1825e-01, 7.6762e-01, 5.8633e-01, + 4.4990e-01, 6.8736e-02, 6.4086e-01, 3.6746e-01, + 8.6449e-01, 4.1226e-02, 9.4499e-01, 6.1931e-01, + 6.5809e-01, 6.1511e-02, 3.9817e-01, 5.8898e-01, + 1.1640e-01, 1.8444e-01, 1.7129e-01, 1.3957e-01, + 5.0497e-01, 3.5269e-02, 8.3287e-01, 2.7613e-01, + 6.2228e-01, 3.9473e-01, 2.0126e-01, 2.8374e-01, + 8.8439e-01, 9.7131e-01, 6.2843e-01, 4.7693e-01, + 9.1883e-01, 5.0908e-01, 6.6146e-01, 8.6673e-01, + 2.6898e-01, 1.3334e-01, 6.1520e-01, 7.0400e-01, + 7.5991e-01, 1.3996e-01, 2.3457e-01, 3.8396e-02, + 9.4791e-01, 7.3823e-01, 4.9123e-02, 8.6286e-01, + 7.9549e-01, 3.0765e-01, 9.4513e-01, 5.8890e-01, + 3.4894e-01, 5.4304e-01, 2.5156e-01, 9.0338e-01, + 2.4495e-01, 6.8472e-01, 8.1935e-01, 2.3381e-01, + 4.9483e-01, 6.2551e-01, 3.7234e-01, 7.0448e-01, + 6.2310e-01, 2.8344e-01, 4.7395e-01, 5.9037e-01, + 2.4310e-01, 3.6817e-01, 9.0152e-01, 6.5760e-01, + 3.7778e-01, 8.5052e-01, 2.0082e-01, 7.5372e-01, + 4.9302e-01, 1.5876e-01, 5.7760e-01, 7.1324e-01, + 5.9721e-01, 8.6822e-01, 7.6822e-01, 2.0250e-01, + 4.7581e-02, 5.5039e-01, 3.2489e-01, 2.2342e-02, + 4.5940e-01, 5.6577e-01, 7.4127e-01, 7.7169e-01, + 3.6968e-01, 3.0514e-01, 3.3937e-01, 2.9788e-01, + 8.0180e-01, 5.4228e-01, 4.8803e-01, 6.8684e-01, + 2.7383e-02, 6.7660e-01, 5.9048e-01, 3.3204e-01, + 7.5358e-01, 6.8371e-01, 5.7914e-01, 4.9514e-03, + 9.5130e-01, 8.2896e-01, 1.4739e-01, 9.2714e-01, + 5.1794e-01, 8.3512e-01, 7.0925e-01, 5.6234e-02, + 3.2403e-02, 2.2621e-01, 2.5006e-01, 5.8868e-01, + 4.9311e-02, 7.5541e-01, 3.0749e-01, 2.6245e-01, + 6.6155e-01, 6.0573e-01, 4.6378e-01, 5.6330e-01, + 4.5337e-01, 3.2361e-01, 9.4007e-01, 6.7036e-01, + 9.8068e-01, 4.0231e-01, 3.2488e-02, 3.7037e-01, + 9.0509e-01, 4.0238e-01, 6.7454e-01, 7.3130e-01, + 8.6524e-02, 6.5598e-01, 1.6404e-01, 7.6128e-01, + 9.4613e-01, 5.8073e-01, 1.9589e-01, 6.8791e-01, + 8.4198e-01, 5.0929e-01, 4.7670e-02, 9.1999e-01, + 1.8910e-01, 8.0174e-01, 7.9824e-01, 1.4558e-01, + 1.2823e-01, 3.8654e-01, 7.4780e-01, 5.6556e-01, + 5.5206e-01, 8.0684e-01, 6.8299e-01, 3.2546e-01, + 7.2945e-01, 6.6192e-01, 3.2329e-01, 4.2399e-01, + 3.3985e-01, 9.9272e-01, 5.5675e-02, 8.7261e-01, + 6.3694e-03, 9.7629e-01, 2.0111e-01, 7.5176e-01, + 3.8167e-01, 1.2672e-01, 7.7416e-01, 1.7070e-01, + 2.0644e-01, 7.8767e-01, 8.9218e-01, 6.0558e-01, + 5.3327e-01, 5.6641e-01, 9.4370e-01, 1.6267e-01, + 1.9252e-01, 8.7195e-01, 6.1521e-01, 1.1721e-01, + 8.5444e-01, 2.7187e-01, 7.6550e-02, 7.4474e-01, + 7.2445e-01, 1.0476e-02, 8.0988e-01, 5.0372e-01, + 4.3200e-01, 6.2759e-01, 7.5095e-01, 2.7937e-02, + 7.4638e-01, 2.9933e-01, 9.7530e-01, 2.2081e-01, + 7.9240e-01, 1.6352e-01, 4.4792e-01, 2.4352e-01, + 7.4311e-01, 6.7548e-01, 4.5632e-01, 9.7318e-01, + 3.3698e-02, 6.5762e-01, 2.0938e-01, 2.4995e-01, + 4.9119e-01, 9.2264e-01, 8.9031e-01, 5.9815e-01, + 4.2057e-01, 2.3023e-01, 1.8825e-01, 1.3192e-01, + 3.9288e-01, 1.5796e-01, 7.5823e-01, 1.1544e-01, + 2.5045e-01, 7.2301e-01, 6.9550e-01, 5.5401e-01, + 1.8873e-01, 2.9939e-01, 3.9601e-02, 5.7007e-01, + 9.0702e-03, 4.0882e-01, 1.5415e-01, 5.8912e-01, + 1.7288e-02, 7.1645e-02, 6.1525e-01, 4.2975e-02, + 5.9958e-01, 5.8504e-01, 2.4112e-01, 9.1323e-01, + 4.5606e-01, 6.7429e-01, 9.8745e-01, 8.6021e-01, + 1.8142e-01, 1.7174e-01, 3.0004e-01, 4.1361e-01, + 8.4676e-01, 1.0392e-01, 8.1012e-01, 8.3536e-02, + 8.2639e-01, 3.7316e-01, 6.5078e-01, 3.9350e-01, + 6.8163e-02, 7.4110e-01, 3.7869e-01, 1.1714e-01, + 5.3909e-01, 7.1830e-01, 5.3985e-02, 9.0620e-02, + 1.1016e-01, 6.4977e-01, 4.2502e-01, 2.1501e-02, + 7.8835e-01, 3.7073e-01, 3.3476e-01, 9.8666e-01, + 6.2218e-01, 7.2765e-01, 5.9957e-01, 4.9871e-01, + 6.6661e-01, 4.0665e-01, 9.1455e-01, 2.7465e-01, + 2.4553e-01, 9.3303e-03, 6.8705e-01, 3.1716e-01, + 2.0975e-01, 4.1840e-01, 3.3576e-01, 3.4692e-01, + 9.1191e-01, 1.4705e-01, 7.5314e-01, 5.0550e-01, + 5.3977e-01, 9.1846e-01, 2.8283e-01, 6.9907e-02, + 2.1317e-02, 1.4722e-01, 9.6499e-01, 1.5081e-01, + 8.1423e-01, 7.4182e-01, 7.1844e-01, 3.9476e-02, + 7.8664e-02, 8.1219e-01, 8.8342e-01, 6.2100e-01, + 6.7191e-01, 4.7368e-01, 2.6861e-01, 9.3677e-02, + 3.2513e-01, 1.8817e-01, 5.7608e-01, 7.8823e-01, + 9.1581e-01, 7.6436e-01, 6.4218e-01, 5.8300e-01, + 5.9685e-01, 6.5916e-01, 7.8265e-01, 2.2609e-01, + 5.7788e-02, 6.1309e-01, 7.2349e-01, 6.1700e-01, + 7.9288e-01, 4.4908e-01, 6.2231e-01, 3.1993e-01, + 6.0491e-01, 3.5342e-01, 4.1608e-01, 5.1282e-01, + 4.7201e-01, 7.9027e-02, 9.3144e-01, 1.1720e-01, + 1.0942e-01, 5.6490e-01, 3.1483e-01, 3.4504e-01, + 2.1509e-01, 1.9644e-01, 8.9826e-01, 5.2693e-01, + 7.2234e-01, 4.5613e-01, 2.0993e-01, 3.8888e-01, + 3.4362e-01, 2.8391e-01, 4.7760e-01, 1.3578e-01, + 4.1355e-01, 5.4012e-01, 8.5277e-01, 2.8268e-01, + 2.1746e-01, 6.6617e-01, 3.8200e-01, 6.7181e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.4272, 0.4478, 0.5565, ..., 0.4220, 0.4867, 0.3940]) +tensor([0.7926, 0.8036, 0.9264, ..., 0.8315, 0.2335, 0.7753]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -754,378 +754,378 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 7.102759838104248 seconds +Time: 6.995845317840576 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '279705', '-ss', '10000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.192691802978516} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '286739', '-ss', '10000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.441989183425903} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 999, 999, 1000]), - col_indices=tensor([8952, 4000, 8166, 6597, 532, 6376, 6026, 9869, 7520, - 7179, 9261, 3880, 1825, 3183, 7673, 9449, 3683, 5956, - 1175, 9188, 3143, 3107, 7411, 4454, 602, 8234, 1772, - 7128, 697, 2579, 6192, 4803, 5677, 9960, 6436, 8271, - 7262, 970, 7301, 4426, 5443, 1245, 6562, 2078, 17, - 5156, 8485, 7276, 8067, 1486, 267, 1867, 2441, 2368, - 9094, 5268, 7382, 3883, 3736, 9730, 4478, 9182, 3080, - 3707, 1066, 4867, 2125, 6033, 2824, 3938, 8278, 1321, - 9817, 7979, 8727, 7687, 7915, 1214, 440, 5708, 5546, - 1111, 6567, 4866, 6297, 7245, 887, 2038, 4920, 2063, - 7927, 3268, 9646, 7587, 1863, 7946, 3596, 8591, 6781, - 7806, 9483, 1512, 3170, 9606, 4349, 2224, 451, 5245, - 4275, 2218, 1928, 3938, 364, 232, 3259, 3441, 8386, - 7579, 4888, 5900, 1901, 64, 199, 7448, 6195, 3174, - 3236, 8078, 6653, 1848, 3168, 1121, 3927, 5660, 4727, - 9512, 6481, 3551, 862, 305, 4340, 9131, 448, 1868, - 1150, 5339, 8301, 5390, 7716, 694, 2337, 622, 2979, - 6037, 4736, 1952, 5071, 2430, 2740, 920, 3292, 2201, - 6260, 8263, 8823, 8025, 8857, 5589, 6916, 6949, 6589, - 8185, 175, 3471, 5202, 9772, 2312, 7182, 1717, 7475, - 6068, 2334, 6417, 3655, 9800, 889, 253, 8367, 8765, - 6721, 6687, 2801, 8665, 2089, 6262, 1372, 8492, 4529, - 577, 2887, 61, 5460, 6067, 4062, 6111, 3584, 9452, - 5467, 2060, 877, 1231, 8941, 3078, 1406, 9681, 4349, - 8995, 4108, 3169, 3536, 599, 808, 216, 1045, 7476, - 118, 5373, 1705, 6476, 9055, 3423, 2513, 8743, 2070, - 3551, 3613, 3258, 7352, 9447, 908, 6360, 3702, 175, - 5227, 2614, 8048, 6329, 9842, 8071, 3162, 4559, 4957, - 9543, 4242, 2056, 4144, 9812, 6189, 8552, 1334, 8989, - 1341, 4418, 3240, 3860, 6744, 3583, 7263, 5364, 5783, - 281, 536, 2291, 101, 8394, 819, 8377, 1736, 7515, - 1698, 8880, 621, 7126, 306, 5036, 8953, 2037, 3764, - 5238, 4588, 547, 2080, 3133, 4721, 1404, 4382, 6483, - 364, 5201, 7106, 9087, 278, 1615, 5081, 9022, 5647, - 6859, 7777, 7201, 2862, 4501, 1625, 9418, 8793, 3062, - 7779, 7619, 249, 1017, 9394, 652, 7245, 6598, 6677, - 8056, 7739, 2584, 2710, 5836, 2439, 9676, 552, 753, - 1890, 9204, 3806, 280, 5975, 6539, 8803, 4939, 8180, - 8972, 1922, 3111, 6708, 8063, 843, 8554, 8192, 5252, - 1294, 6690, 2359, 4858, 5680, 7832, 2435, 7255, 9141, - 5661, 1145, 4606, 8502, 5241, 7839, 5585, 9311, 6977, - 3341, 4344, 5135, 880, 4279, 9071, 6398, 5804, 3668, - 9083, 2767, 1972, 9411, 9585, 9619, 3734, 5608, 4456, - 2712, 1619, 6062, 9539, 4547, 4066, 3381, 6204, 5829, - 1408, 8823, 8487, 9492, 6285, 9366, 304, 6949, 2130, - 5644, 5852, 5247, 2441, 507, 1362, 6464, 1943, 4566, - 4183, 9823, 9431, 3679, 7214, 6603, 2011, 6442, 8396, - 1474, 9058, 8407, 3097, 8137, 4939, 8142, 4167, 2297, - 6796, 3268, 8842, 9047, 506, 1644, 958, 2841, 8103, - 8807, 3190, 3921, 8205, 1359, 1320, 1313, 5985, 6264, - 9688, 7030, 3687, 8214, 9200, 8543, 8874, 9918, 3292, - 971, 1150, 12, 7721, 4843, 4655, 8118, 5951, 6341, - 3367, 5211, 3698, 5665, 6802, 311, 8944, 3871, 7380, - 1011, 1091, 6264, 2890, 7828, 4121, 8780, 8171, 5582, - 2487, 4088, 7245, 9121, 1475, 8032, 4892, 8481, 5781, - 2248, 7069, 9818, 1470, 8306, 290, 2398, 5807, 9442, - 3261, 2507, 2542, 9400, 146, 8598, 3116, 2240, 7102, - 9117, 5552, 2599, 3964, 4563, 6377, 4358, 2759, 3003, - 4038, 9137, 1151, 6472, 7773, 5238, 2731, 4173, 797, - 9566, 2707, 3580, 7994, 6794, 4036, 9990, 2244, 6453, - 2584, 8829, 9928, 4918, 4552, 1248, 3083, 2138, 3711, - 6676, 1954, 9299, 751, 9072, 8549, 3638, 3355, 8110, - 5838, 5956, 5829, 7581, 7230, 6974, 4178, 2501, 7620, - 3349, 7921, 9398, 8191, 526, 2316, 5467, 2362, 8661, - 5836, 9284, 5880, 4520, 5412, 5818, 7360, 2561, 2163, - 1419, 516, 4204, 1357, 4964, 2143, 7988, 9421, 4192, - 6075, 5232, 2382, 190, 2160, 4498, 3582, 7060, 4067, - 8832, 2519, 8074, 75, 2040, 7026, 2270, 2454, 3151, - 6754, 6128, 4864, 678, 2068, 7671, 3287, 4393, 6230, - 4141, 589, 9321, 5856, 5100, 949, 6882, 8430, 8774, - 2852, 6631, 7249, 7598, 8415, 2890, 3208, 4485, 9774, - 844, 2538, 5517, 8566, 3061, 4943, 1204, 7334, 603, - 1059, 5669, 7784, 6606, 8140, 8880, 7569, 6357, 1677, - 8060, 2276, 5176, 9288, 3726, 1205, 7679, 5004, 6724, - 4886, 1466, 9199, 9820, 1880, 856, 6733, 6005, 7606, - 9888, 3802, 1598, 5109, 1099, 257, 5705, 1491, 8717, - 8777, 340, 9514, 6111, 8977, 2088, 9572, 9799, 9085, - 8261, 67, 6996, 5276, 5212, 7381, 7754, 1731, 1151, - 2202, 664, 4664, 730, 831, 8074, 5175, 6613, 6734, - 6507, 2635, 8670, 2143, 8491, 7815, 2364, 9192, 8216, - 9671, 5069, 6574, 4597, 1439, 3233, 8936, 1122, 5241, - 7348, 4745, 6153, 2266, 3100, 9485, 8854, 8203, 5843, - 3322, 8284, 1824, 8965, 436, 9138, 2544, 6346, 2189, - 8690, 6203, 9601, 704, 8008, 9572, 677, 606, 5002, - 5710, 471, 6555, 7890, 6468, 1470, 4733, 2943, 6214, - 8234, 6199, 9968, 7234, 9500, 5185, 2919, 7000, 3791, - 2826, 7280, 4730, 9794, 1349, 2563, 1049, 4820, 3367, - 9543, 6774, 6136, 7295, 5145, 2152, 4761, 8582, 9375, - 319, 9447, 3340, 9425, 9559, 1116, 3732, 3610, 8427, - 9125, 1583, 1179, 7513, 5682, 9159, 241, 7984, 2293, - 3096, 3564, 3439, 2160, 8809, 9412, 7225, 1103, 9859, - 2289, 470, 3862, 5409, 2677, 2610, 1395, 2798, 2036, - 8168, 6913, 5376, 4906, 4747, 6428, 7591, 8164, 6849, - 2172, 81, 3394, 1456, 2103, 4980, 5120, 1434, 983, - 7931, 9343, 7415, 7679, 999, 1068, 3274, 8997, 2725, - 441, 411, 7933, 1003, 4519, 361, 4611, 3295, 8883, - 1465, 6331, 9338, 45, 5625, 6206, 6833, 2020, 3343, - 8451, 1048, 4752, 7366, 7152, 7566, 1669, 8462, 5182, - 7214, 7342, 7539, 3321, 619, 717, 8923, 530, 3482, - 5501, 7823, 5803, 8146, 5242, 6287, 6550, 2253, 8722, - 7178, 7449, 2026, 3537, 6705, 7065, 6435, 106, 8292, - 2198, 1874, 9805, 9582, 4954, 8722, 3049, 311, 3280, - 7342, 7761, 9051, 2171, 3069, 2124, 7388, 3664, 4062, - 2685, 6071, 7944, 2785, 8698, 883, 4873, 7645, 2810, - 2063, 7924, 3670, 6947, 9774, 2476, 6878, 6227, 4296, - 5746, 3133, 1635, 699, 7633, 4675, 2663, 323, 4267, - 6089, 779, 3222, 31, 9957, 1311, 8344, 2224, 5480, - 5329, 4616, 5394, 5932, 6025, 6656, 4043, 4590, 3624, - 7644, 2721, 7452, 5063, 1603, 231, 5078, 4764, 8810, - 1897, 9824, 3546, 8099, 7057, 3188, 6863, 4616, 8965, - 3807, 4031, 6190, 2213, 1928, 4075, 999, 1021, 5541, - 9055]), - values=tensor([6.9021e-01, 2.6494e-01, 8.6904e-01, 6.1119e-01, - 5.3447e-01, 5.9746e-01, 1.3103e-01, 7.6542e-01, - 2.7449e-01, 4.8776e-01, 3.7944e-01, 5.9667e-01, - 5.0080e-01, 9.7087e-01, 6.0515e-02, 3.4796e-01, - 1.9513e-01, 4.2481e-03, 6.0086e-01, 9.0850e-01, - 8.6957e-02, 7.2780e-01, 3.2540e-01, 8.0855e-01, - 2.0093e-01, 7.5193e-01, 6.5520e-01, 2.0556e-01, - 1.9036e-01, 6.6375e-01, 5.1418e-01, 6.8871e-01, - 4.1430e-01, 3.7771e-01, 9.1043e-01, 8.2144e-02, - 8.9391e-01, 2.2392e-01, 3.8724e-01, 7.3980e-01, - 9.2229e-01, 6.5016e-01, 2.6737e-01, 7.9846e-01, - 5.0336e-01, 8.1942e-02, 7.3018e-02, 2.8059e-02, - 5.8586e-01, 4.8189e-02, 9.1110e-03, 6.7474e-01, - 2.4410e-01, 6.0846e-01, 6.7598e-01, 2.5246e-02, - 8.3369e-01, 3.6420e-01, 1.0160e-02, 4.2093e-01, - 5.6656e-01, 9.7199e-01, 3.0686e-01, 5.9587e-01, - 8.1818e-01, 7.4731e-01, 4.7909e-01, 3.5376e-01, - 4.5246e-01, 4.3801e-01, 4.1419e-01, 6.9403e-02, - 2.4639e-01, 5.5338e-02, 1.4805e-01, 8.8478e-02, - 3.3491e-01, 7.1203e-02, 5.8558e-01, 7.5183e-01, - 9.5173e-01, 2.7971e-01, 4.4284e-01, 4.5204e-01, - 3.1782e-01, 8.4286e-01, 7.9427e-01, 3.4835e-01, - 9.8082e-01, 2.1853e-01, 6.2534e-01, 5.9584e-01, - 2.6834e-01, 2.8874e-01, 8.6305e-01, 1.3411e-01, - 8.4825e-01, 3.9467e-01, 1.0367e-01, 7.2007e-01, - 5.8375e-01, 7.9120e-01, 6.7044e-01, 1.4679e-01, - 4.8278e-01, 7.0316e-01, 5.5202e-01, 3.5939e-01, - 4.1124e-01, 1.4507e-01, 9.1288e-01, 1.6695e-01, - 2.9452e-01, 1.9779e-01, 6.8077e-01, 5.5683e-01, - 8.9496e-02, 6.0423e-01, 9.4298e-02, 7.6601e-01, - 3.6445e-01, 4.6071e-01, 6.7016e-01, 9.2399e-02, - 6.0505e-01, 6.7426e-01, 5.3761e-01, 5.0130e-01, - 1.9624e-01, 6.9980e-01, 7.3370e-01, 6.4631e-01, - 9.0546e-01, 3.3420e-01, 3.3065e-01, 5.9336e-01, - 8.9448e-01, 8.3937e-01, 3.3441e-01, 2.3028e-02, - 4.3796e-01, 3.3782e-01, 9.8037e-01, 3.3054e-01, - 7.9836e-01, 7.5184e-01, 8.3353e-01, 9.4721e-01, - 9.1431e-01, 4.9030e-01, 2.4945e-01, 7.4274e-01, - 1.6174e-01, 1.3313e-01, 1.4827e-03, 9.9424e-01, - 3.8576e-01, 8.8554e-01, 5.9676e-01, 9.2759e-01, - 3.3991e-01, 6.2610e-01, 4.0877e-01, 4.2900e-01, - 4.5137e-02, 8.9195e-01, 4.1368e-01, 8.6278e-01, - 1.0556e-01, 9.7864e-01, 8.1699e-01, 2.6514e-01, - 2.7527e-01, 9.8235e-01, 5.4621e-01, 5.6385e-01, - 4.1602e-01, 3.0271e-01, 6.4653e-01, 7.1380e-01, - 1.0594e-01, 7.8515e-01, 2.8439e-01, 5.7238e-01, - 9.2610e-01, 2.6277e-01, 8.1736e-01, 2.0732e-01, - 7.0628e-01, 8.6999e-01, 9.1036e-02, 6.6737e-01, - 5.5567e-01, 8.1035e-01, 8.7453e-01, 1.8492e-01, - 1.9537e-01, 7.6366e-01, 6.4800e-01, 3.5352e-01, - 1.5043e-01, 3.5364e-02, 4.2615e-01, 3.2155e-01, - 2.4376e-01, 2.5069e-01, 5.2658e-01, 8.0086e-01, - 7.7663e-01, 6.3489e-01, 5.8258e-01, 5.4888e-01, - 7.5488e-01, 6.9768e-01, 3.6104e-01, 8.8427e-04, - 4.5213e-01, 2.8577e-01, 2.8619e-01, 2.9793e-01, - 2.9937e-01, 6.2428e-01, 4.5005e-01, 5.9825e-01, - 7.4638e-01, 3.3929e-01, 3.9970e-01, 3.9800e-01, - 3.8389e-01, 4.8231e-01, 5.6456e-02, 1.2928e-01, - 6.5330e-01, 9.1622e-01, 4.6852e-01, 3.6000e-02, - 8.7926e-01, 4.7800e-02, 6.8565e-01, 4.1381e-02, - 5.5927e-01, 6.8543e-01, 6.8545e-01, 8.8616e-01, - 2.9807e-02, 8.1468e-01, 6.1063e-01, 5.5566e-01, - 9.4726e-01, 2.1762e-02, 5.8385e-01, 6.7073e-01, - 3.3283e-02, 4.7096e-01, 7.8371e-01, 3.0060e-01, - 3.9184e-01, 9.7716e-01, 7.4200e-01, 2.7027e-01, - 1.1125e-04, 3.9202e-01, 5.9210e-01, 5.0785e-01, - 5.4634e-01, 3.7514e-01, 2.4891e-01, 3.0456e-01, - 6.5497e-01, 9.4974e-01, 7.6803e-01, 8.1229e-01, - 8.8187e-01, 6.0880e-01, 8.2083e-02, 1.5656e-01, - 6.6665e-01, 8.7158e-01, 4.2618e-01, 5.6251e-01, - 8.3536e-01, 9.7124e-01, 1.8108e-01, 8.2899e-01, - 9.1553e-01, 3.9430e-01, 8.9998e-01, 1.9012e-01, - 6.8518e-01, 4.8339e-01, 8.2522e-01, 4.4773e-01, - 9.1372e-01, 9.7072e-01, 8.0657e-02, 9.5106e-03, - 6.1564e-01, 7.6053e-01, 5.9682e-01, 1.0497e-01, - 3.0736e-01, 2.1734e-01, 4.0924e-01, 4.4851e-01, - 2.9939e-01, 9.0104e-01, 6.1371e-01, 3.1504e-01, - 3.7298e-01, 1.8350e-01, 8.8489e-01, 5.6146e-01, - 9.3832e-01, 8.5583e-01, 7.9017e-01, 7.2342e-01, - 4.9658e-01, 4.2914e-01, 4.2011e-01, 1.1432e-01, - 8.5825e-02, 3.7515e-01, 4.7402e-01, 3.3258e-01, - 2.6866e-01, 1.3750e-01, 8.2397e-01, 6.9852e-02, - 3.4150e-01, 9.4441e-01, 1.2748e-01, 4.5960e-01, - 4.7030e-01, 7.9737e-01, 9.6600e-01, 9.9113e-01, - 2.2881e-01, 4.9952e-02, 6.3082e-01, 6.6610e-01, - 9.9033e-01, 7.6301e-01, 6.0189e-01, 2.2890e-01, - 2.8764e-01, 1.1288e-01, 7.1805e-01, 6.1911e-01, - 1.6911e-01, 5.0612e-01, 5.0980e-01, 2.7104e-01, - 7.7033e-01, 1.4709e-01, 3.8285e-01, 1.9758e-01, - 9.7692e-01, 2.0252e-01, 1.3576e-01, 5.9482e-01, - 4.6062e-01, 2.2658e-01, 9.3619e-01, 1.4065e-01, - 9.5016e-01, 3.9461e-01, 1.4931e-01, 1.9422e-02, - 6.3961e-01, 7.1243e-01, 2.0281e-01, 5.6959e-01, - 6.2530e-01, 2.7959e-01, 8.7650e-01, 9.4001e-02, - 2.9309e-01, 4.7514e-01, 2.7278e-01, 4.9113e-01, - 7.6886e-02, 3.4814e-01, 5.1059e-01, 2.6430e-01, - 8.3288e-02, 8.9817e-01, 9.0972e-01, 5.1965e-01, - 3.8288e-01, 5.2763e-01, 8.3820e-01, 7.2633e-02, - 6.9137e-01, 5.7207e-01, 6.9966e-01, 5.5597e-01, - 2.3496e-02, 6.9673e-01, 8.5223e-01, 6.0371e-01, - 2.2841e-01, 8.8026e-01, 1.5762e-01, 8.5663e-01, - 1.0839e-02, 9.9699e-01, 6.3255e-01, 2.7027e-01, - 8.2796e-01, 4.2669e-01, 7.2641e-01, 2.0806e-01, - 9.5672e-01, 5.8838e-01, 2.1049e-01, 1.9808e-01, - 1.6176e-01, 6.5233e-01, 7.8844e-01, 5.8046e-01, - 1.3680e-01, 8.0943e-01, 1.9157e-01, 4.2986e-01, - 6.3052e-01, 7.1615e-01, 2.6878e-01, 7.8997e-01, - 6.7314e-01, 6.3196e-01, 7.9154e-01, 4.9725e-01, - 9.1566e-01, 2.2627e-01, 3.2786e-01, 8.1764e-01, - 4.7210e-01, 5.5515e-01, 6.0145e-01, 1.4357e-02, - 8.1100e-01, 3.8395e-01, 3.8342e-01, 7.6662e-01, - 4.3084e-01, 9.6058e-01, 8.3922e-02, 6.6857e-01, - 8.9712e-02, 9.1497e-01, 4.3132e-01, 6.1332e-01, - 4.9678e-01, 4.4038e-01, 3.0533e-01, 4.2072e-01, - 9.3282e-01, 4.7686e-01, 6.3890e-01, 3.7317e-01, - 4.8358e-01, 8.8682e-01, 5.6572e-01, 4.1761e-01, - 7.6652e-03, 6.7885e-01, 7.1170e-01, 6.6835e-01, - 4.8988e-01, 4.6220e-01, 9.8191e-01, 6.9759e-01, - 4.8281e-01, 5.4214e-01, 8.2354e-01, 7.7899e-01, - 3.5689e-01, 7.6049e-01, 1.0611e-01, 1.0681e-01, - 1.7387e-01, 1.4750e-01, 9.4994e-01, 3.1736e-01, - 8.2294e-01, 4.0109e-01, 3.5677e-01, 3.8062e-01, - 9.1137e-01, 5.2391e-01, 9.9066e-01, 9.3521e-01, - 5.9154e-01, 3.8119e-01, 4.4447e-01, 1.3827e-01, - 5.9704e-01, 4.1350e-01, 4.7667e-01, 1.3318e-01, - 9.4496e-01, 2.7844e-01, 8.9444e-01, 6.8493e-01, - 3.6281e-01, 6.2923e-01, 4.4846e-01, 4.9145e-01, - 4.0848e-01, 5.2789e-01, 6.8349e-01, 8.3098e-01, - 3.7655e-01, 9.5767e-01, 5.4283e-01, 5.6569e-01, - 6.9416e-01, 9.8685e-01, 4.5627e-01, 1.6481e-01, - 4.2808e-01, 2.0602e-02, 5.6870e-01, 2.5733e-01, - 7.8495e-01, 9.1720e-01, 5.7174e-01, 1.2028e-01, - 5.7304e-01, 2.8855e-01, 5.6472e-01, 1.2898e-01, - 6.2866e-01, 9.7365e-01, 5.5967e-02, 7.7710e-01, - 7.4851e-01, 2.5569e-01, 6.0528e-01, 3.3695e-01, - 5.6405e-01, 9.2411e-01, 2.6299e-01, 2.4215e-01, - 6.8728e-01, 4.4106e-01, 2.8442e-01, 6.6217e-01, - 8.7020e-01, 6.1069e-01, 6.2397e-01, 1.1008e-01, - 2.7446e-02, 4.9098e-01, 1.2525e-02, 1.2965e-01, - 3.4990e-01, 5.8782e-01, 3.1021e-01, 3.2855e-01, - 2.5425e-01, 8.5780e-01, 4.3205e-01, 4.0140e-01, - 9.4921e-01, 2.8895e-01, 1.3664e-01, 5.4369e-01, - 1.3138e-01, 3.8208e-01, 2.5228e-01, 1.2853e-01, - 8.8617e-01, 2.0333e-01, 3.5159e-01, 7.2213e-01, - 9.9955e-02, 2.7135e-01, 9.5831e-01, 3.8118e-01, - 9.1375e-01, 6.2030e-03, 8.3011e-01, 3.1173e-01, - 4.9222e-01, 2.2401e-01, 1.3662e-02, 3.7139e-01, - 8.0654e-01, 5.4896e-01, 4.1262e-01, 1.8721e-01, - 7.7430e-02, 8.0972e-02, 5.0091e-01, 4.6883e-01, - 1.7023e-01, 1.8393e-01, 9.5285e-01, 5.7148e-01, - 6.9912e-01, 3.9318e-01, 7.4717e-01, 5.2313e-01, - 4.9731e-02, 4.5290e-01, 3.7548e-01, 9.6320e-01, - 5.1588e-01, 3.8349e-01, 7.3780e-01, 2.3311e-01, - 4.0890e-01, 3.2506e-01, 5.5615e-02, 2.9904e-01, - 8.2626e-01, 9.3003e-01, 1.2220e-01, 9.4114e-02, - 7.0027e-01, 7.3137e-01, 4.4929e-01, 5.5520e-01, - 4.0184e-01, 5.4932e-02, 4.8883e-01, 3.6205e-01, - 6.5451e-01, 3.4122e-01, 5.2936e-01, 1.1556e-01, - 5.9763e-02, 2.6121e-01, 2.5086e-01, 7.1156e-01, - 4.2264e-01, 1.4160e-02, 5.6807e-01, 4.5961e-01, - 7.2037e-01, 1.8238e-01, 3.0287e-02, 5.9799e-01, - 2.2550e-01, 3.5091e-01, 2.5029e-01, 5.2054e-01, - 3.2325e-01, 1.5222e-01, 6.3257e-02, 1.2849e-01, - 4.8715e-01, 9.2972e-01, 8.2158e-01, 1.0913e-01, - 7.7983e-01, 4.0809e-01, 2.7056e-01, 5.4364e-01, - 7.1329e-01, 5.9023e-02, 2.3800e-02, 9.3218e-01, - 9.3150e-01, 3.6454e-01, 7.2436e-01, 6.1873e-01, - 5.3740e-01, 5.0529e-01, 8.6148e-02, 4.1804e-01, - 8.0293e-01, 5.1850e-01, 9.3091e-01, 9.8654e-02, - 1.0539e-01, 8.0444e-01, 2.1966e-01, 5.5457e-01, - 8.7033e-01, 3.6440e-01, 3.9487e-01, 6.9752e-01, - 7.2524e-02, 6.6343e-01, 9.7866e-01, 7.8611e-01, - 6.4857e-01, 7.0267e-01, 3.4511e-01, 9.9013e-02, - 6.2771e-01, 9.1286e-01, 5.3326e-01, 6.2374e-01, - 9.4684e-01, 8.8022e-02, 7.0509e-01, 9.1168e-01, - 3.7023e-01, 1.8339e-01, 9.1427e-01, 8.3401e-01, - 6.8658e-01, 6.1568e-01, 7.6375e-01, 5.4908e-01, - 8.3087e-01, 3.9498e-01, 5.7352e-01, 4.6879e-01, - 6.5833e-01, 6.5259e-01, 5.0829e-01, 2.0479e-01, - 2.9408e-01, 6.1872e-01, 9.5146e-01, 3.5305e-02, - 5.2991e-01, 9.4404e-01, 5.9917e-02, 9.2284e-01, - 3.5918e-01, 8.6231e-03, 8.7401e-01, 9.3098e-01, - 3.0548e-01, 7.0431e-01, 8.9211e-01, 2.4648e-01, - 4.2458e-01, 9.1205e-01, 1.2130e-01, 8.7176e-01, - 9.0386e-01, 5.2306e-01, 6.3604e-01, 6.1930e-01, - 8.2531e-01, 1.0289e-01, 4.5119e-01, 6.7952e-01, - 8.9554e-01, 8.3142e-01, 8.2380e-01, 4.1571e-01, - 8.2644e-01, 1.2717e-02, 2.6499e-01, 8.0200e-01, - 2.4798e-01, 2.8711e-01, 4.5776e-01, 4.3801e-01, - 3.1192e-03, 6.9631e-01, 5.3407e-02, 1.6257e-01, - 6.9635e-01, 6.6998e-01, 8.4720e-01, 3.3243e-01, - 7.2922e-01, 3.6090e-01, 2.2898e-01, 2.9594e-01, - 6.8091e-01, 2.1217e-01, 9.8868e-01, 1.4633e-01, - 8.4921e-01, 7.7243e-02, 5.9675e-01, 4.8946e-01, - 9.6285e-01, 3.6923e-01, 8.4080e-01, 6.3500e-02, - 2.9872e-02, 6.1151e-01, 7.2200e-02, 8.0859e-01, - 3.4433e-01, 1.2536e-01, 2.1054e-01, 9.5582e-01, - 2.6041e-01, 7.7947e-02, 1.9011e-01, 2.2693e-01, - 2.3351e-01, 7.6995e-01, 8.3425e-01, 1.6202e-01, - 8.3173e-01, 8.7492e-01, 7.3985e-01, 8.2109e-01, - 8.6781e-02, 2.7681e-01, 8.2058e-01, 7.1643e-01, - 9.4730e-01, 5.0349e-01, 7.6573e-01, 3.0347e-01, - 5.4480e-02, 2.5138e-01, 7.5252e-01, 7.2319e-01, - 9.7687e-01, 7.8910e-01, 5.9541e-01, 1.8523e-01, - 3.1381e-01, 9.8509e-01, 7.2709e-01, 8.7484e-01, - 9.6507e-01, 2.3023e-01, 6.4537e-01, 6.1581e-01, - 2.8341e-01, 5.7248e-01, 5.0137e-01, 4.1691e-01, - 6.3468e-01, 3.1422e-01, 2.5091e-01, 6.7273e-01, - 4.4779e-01, 8.2945e-01, 9.3750e-01, 5.1588e-01, - 2.0285e-01, 3.6308e-01, 3.5611e-01, 5.4065e-01, - 4.8746e-01, 6.5741e-02, 8.7308e-01, 7.8992e-01, - 8.8977e-01, 2.5722e-01, 2.9273e-01, 5.9995e-01, - 7.0971e-01, 1.9099e-02, 1.0801e-02, 1.6717e-01, - 7.4491e-01, 2.7112e-01, 4.9981e-02, 4.7593e-01, - 1.5556e-01, 7.8769e-01, 5.8034e-01, 8.9890e-01, - 7.9945e-01, 2.8180e-01, 5.1082e-01, 6.1444e-01, - 3.7184e-01, 4.0382e-03, 2.1606e-02, 4.3808e-01, - 5.0513e-01, 6.3717e-01, 9.3086e-01, 7.8150e-01, - 7.9119e-01, 9.9653e-01, 2.2193e-01, 5.7006e-01, - 3.8318e-01, 3.0525e-01, 3.7836e-01, 1.0429e-01, - 5.6076e-01, 9.9794e-01, 1.1212e-01, 5.4908e-01, - 2.6159e-01, 4.7710e-01, 9.0573e-01, 5.5417e-01, - 3.3196e-01, 8.1578e-02, 3.7247e-01, 4.0187e-01, - 5.5198e-01, 4.4530e-01, 9.6819e-02, 2.8385e-01, - 8.7880e-01, 1.2411e-01, 7.6113e-01, 1.6076e-01, - 6.1742e-01, 8.2361e-01, 7.5982e-01, 9.4500e-02, - 8.8119e-01, 4.6913e-02, 1.6702e-01, 1.5461e-02, - 4.3414e-01, 5.2958e-01, 1.7767e-01, 8.4862e-01, - 6.8289e-02, 4.2057e-01, 1.4544e-01, 6.3535e-01, - 4.5003e-01, 9.0562e-01, 5.2552e-01, 3.8356e-02, - 1.7539e-01, 9.2623e-01, 1.1373e-02, 9.3857e-01, - 1.1486e-01, 1.8618e-01, 5.3666e-01, 7.4373e-01, - 8.2920e-01, 7.2840e-01, 3.2981e-01, 8.6552e-01, - 2.8637e-01, 4.2375e-02, 3.1103e-01, 4.4349e-01, - 6.1835e-01, 9.1394e-01, 1.7645e-01, 1.6782e-01, - 2.1151e-01, 1.5871e-01, 1.0135e-01, 2.3550e-01, - 4.2094e-01, 7.0734e-01, 6.7606e-01, 8.2297e-01, - 2.8533e-01, 8.8901e-01, 2.8296e-01, 8.0720e-01, - 6.8096e-01, 5.3266e-01, 4.2825e-01, 5.4890e-01, - 7.7546e-01, 3.2703e-01, 2.1498e-02, 3.2856e-01, - 1.8357e-01, 7.1752e-01, 7.0180e-01, 6.4785e-01, - 3.1735e-01, 1.2403e-01, 5.3043e-01, 6.6860e-01, - 8.2441e-01, 9.8910e-01, 9.9001e-01, 5.2653e-01, - 8.2241e-01, 1.7261e-01, 5.2411e-01, 4.5643e-01, - 5.9400e-01, 5.3555e-01, 1.3555e-01, 4.4974e-01, - 3.5391e-02, 3.2882e-02, 2.3493e-01, 4.9364e-01, - 5.0618e-01, 9.3427e-01, 8.4335e-03, 6.7023e-01, - 7.1920e-01, 3.4899e-01, 8.9985e-01, 2.4044e-01, - 1.2979e-01, 2.7126e-01, 3.1386e-01, 7.7817e-02, - 6.6896e-01, 9.4509e-01, 1.4801e-01, 6.1207e-01]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 999, 1000, 1000]), + col_indices=tensor([6994, 9622, 3726, 4663, 3213, 8774, 6907, 1710, 9507, + 9063, 1177, 307, 2649, 3020, 5730, 348, 5131, 6359, + 8504, 4801, 4568, 6837, 9016, 8482, 4653, 2975, 3080, + 4702, 1162, 2867, 9079, 2978, 6909, 7908, 6001, 669, + 6371, 3485, 6275, 4253, 4720, 5707, 6535, 1953, 4011, + 2661, 8932, 9981, 7530, 1720, 6368, 6848, 4567, 8841, + 2627, 1161, 5134, 5413, 7960, 4391, 665, 2396, 6608, + 5733, 3484, 4971, 6121, 2312, 3666, 4281, 9424, 1980, + 9643, 9373, 7415, 1900, 9019, 8546, 7553, 8417, 4373, + 8130, 306, 3044, 8544, 6070, 624, 7124, 3983, 3400, + 4627, 4287, 7616, 3298, 7343, 5990, 9821, 2523, 7048, + 5830, 1403, 6233, 8985, 3175, 9364, 5853, 4072, 6366, + 4753, 1727, 9435, 1851, 44, 3040, 1326, 7076, 9651, + 4066, 4865, 6453, 1996, 8040, 329, 9927, 9673, 8524, + 1244, 8553, 442, 5027, 3135, 8060, 9329, 7526, 9796, + 5659, 4377, 3292, 2834, 6136, 3061, 7456, 161, 5999, + 3353, 8111, 5290, 5483, 5811, 5230, 1150, 3704, 746, + 3442, 1238, 6471, 9072, 1057, 739, 7167, 4646, 8968, + 2988, 7699, 3204, 5157, 9948, 7623, 3878, 8967, 4047, + 5067, 7682, 6449, 902, 8157, 5855, 2999, 277, 1240, + 5282, 8712, 210, 3306, 9191, 7693, 9401, 5920, 9695, + 4565, 3107, 3028, 8501, 4399, 270, 1536, 2767, 5311, + 4430, 3474, 7404, 5813, 4610, 264, 84, 3863, 8109, + 628, 315, 7160, 1931, 6079, 150, 1187, 9113, 9881, + 3157, 2458, 7923, 8536, 6368, 1416, 2818, 8122, 3131, + 6956, 9736, 5972, 2516, 3886, 1336, 3782, 3144, 145, + 6154, 3374, 5726, 2019, 5176, 3029, 1833, 512, 805, + 7854, 2374, 6926, 5039, 7581, 1084, 6788, 3567, 9085, + 5115, 8344, 1282, 694, 1416, 4928, 6205, 5066, 6172, + 4951, 2342, 9823, 4409, 453, 2737, 5221, 1182, 1349, + 6104, 1153, 4341, 155, 1744, 6045, 3402, 4537, 6425, + 7377, 9353, 2622, 6364, 5568, 8412, 5977, 8154, 5352, + 1014, 1489, 2546, 9921, 4972, 1344, 4570, 7259, 5658, + 9551, 1403, 4427, 7144, 9461, 8515, 8419, 8672, 8531, + 7797, 6642, 675, 2679, 7615, 8705, 7384, 8267, 6149, + 4211, 8074, 1628, 358, 1640, 228, 5022, 8976, 3921, + 9469, 3778, 3398, 8456, 5802, 1958, 2520, 2489, 1881, + 8614, 3680, 4323, 377, 8462, 5871, 306, 3186, 5351, + 7352, 2219, 9782, 6546, 2214, 3533, 4538, 9746, 3606, + 1981, 5289, 895, 4170, 5822, 348, 6276, 6640, 9900, + 1987, 7527, 3484, 1950, 3683, 214, 8776, 6746, 5717, + 9072, 9171, 1569, 7598, 3877, 3876, 2138, 2024, 1416, + 7752, 4390, 80, 5006, 5274, 9896, 1501, 9077, 5280, + 9121, 9705, 3846, 4311, 7818, 2418, 5744, 70, 755, + 8481, 65, 9496, 8688, 8913, 4086, 4497, 9328, 649, + 7040, 1646, 7888, 6578, 8377, 9319, 9462, 8922, 351, + 4888, 4474, 5081, 3249, 9700, 1729, 5652, 3555, 8733, + 3993, 2648, 2157, 3316, 7194, 4864, 2759, 5666, 9313, + 646, 1058, 9286, 107, 8347, 4296, 5597, 1128, 2987, + 8269, 5363, 8830, 1937, 8083, 6124, 9818, 4872, 1172, + 1916, 9823, 8685, 968, 7144, 4173, 2638, 7667, 2255, + 5893, 145, 2804, 9830, 3730, 2378, 5965, 8443, 924, + 7520, 4001, 8030, 3655, 8642, 637, 4875, 9776, 3422, + 3864, 669, 3034, 7460, 9790, 914, 5157, 7812, 6653, + 5620, 1888, 8250, 4575, 2821, 7519, 5789, 7492, 3910, + 9516, 2254, 109, 3644, 4900, 8347, 1943, 4680, 1973, + 8435, 1113, 456, 1927, 7879, 8357, 9307, 9711, 2787, + 3889, 7117, 5810, 4599, 3798, 3588, 9982, 2090, 9372, + 5907, 2625, 4618, 1365, 7855, 3966, 6642, 2806, 3282, + 1538, 1603, 9281, 6130, 9122, 5424, 4039, 1685, 7585, + 463, 1054, 377, 4412, 670, 3953, 829, 8297, 5284, + 8072, 8388, 5978, 2905, 5528, 8606, 9760, 3824, 3381, + 3569, 8800, 1093, 8598, 5326, 5098, 3325, 3476, 3574, + 5831, 6739, 3709, 3431, 8854, 5412, 1395, 8493, 8413, + 723, 7482, 7437, 8192, 4561, 3563, 7930, 3095, 1395, + 1702, 4680, 4164, 7646, 3073, 2135, 8064, 9233, 8734, + 8891, 1412, 4297, 327, 9444, 6261, 4124, 7080, 3150, + 499, 7634, 5504, 2709, 2323, 6259, 7701, 1590, 782, + 9214, 1434, 7842, 7154, 3406, 6314, 2490, 1995, 2869, + 9300, 7244, 2058, 4782, 9191, 7843, 9366, 3161, 5867, + 7670, 79, 8654, 4679, 6862, 6457, 3396, 3040, 8585, + 3782, 1459, 9368, 4901, 2433, 6705, 8973, 6929, 2667, + 7169, 1587, 5204, 8532, 6578, 9598, 8213, 9081, 6214, + 7437, 7012, 2856, 7632, 7138, 445, 7701, 4052, 6739, + 3480, 3311, 3082, 7805, 6404, 5363, 2870, 9509, 6167, + 7565, 8554, 8204, 832, 9129, 7469, 2782, 5167, 5838, + 3078, 7487, 3680, 6742, 2359, 4016, 7521, 9001, 6405, + 684, 4253, 806, 2076, 1259, 8434, 8503, 8586, 7923, + 4800, 1701, 4338, 6559, 6339, 6352, 1277, 9721, 8278, + 3394, 1222, 9331, 2813, 1759, 1393, 329, 3229, 5728, + 8721, 5742, 6894, 4045, 4303, 9790, 1095, 8299, 5200, + 5067, 4411, 7646, 150, 1925, 981, 1650, 729, 4988, + 39, 6418, 4585, 6285, 3017, 2737, 9097, 1945, 3287, + 5687, 1268, 2568, 5063, 1464, 1353, 8458, 2802, 7424, + 7900, 573, 4279, 1282, 9304, 8048, 5196, 4207, 1316, + 2021, 4121, 7278, 4538, 1620, 3410, 1337, 26, 1524, + 7522, 8145, 4092, 6346, 2473, 5819, 1406, 7284, 7069, + 8477, 7361, 3903, 7212, 8344, 4533, 1123, 5383, 6628, + 5346, 4870, 8614, 6605, 5113, 8241, 2239, 2169, 4658, + 3492, 2436, 6670, 5784, 3502, 9896, 2954, 1899, 1155, + 4148, 2121, 2395, 1045, 4965, 9274, 9110, 6156, 801, + 2656, 5820, 6227, 6325, 135, 209, 4673, 9324, 9454, + 260, 8365, 2431, 2800, 6755, 2430, 7387, 1961, 7463, + 9402, 3000, 2768, 757, 5088, 1602, 9085, 3251, 3315, + 4349, 8652, 5989, 3177, 1166, 3583, 3161, 7236, 4641, + 1623, 3315, 5326, 8408, 4234, 2598, 7670, 1383, 3904, + 372, 9975, 9339, 5246, 7358, 228, 845, 6253, 9078, + 8969, 941, 5589, 812, 3206, 9576, 7426, 9564, 250, + 5639, 2420, 4119, 3196, 3078, 9183, 2554, 867, 5100, + 1465, 9987, 2915, 5151, 9058, 273, 5338, 4182, 802, + 5524, 4895, 3055, 3478, 4538, 8413, 1383, 9768, 9291, + 7392, 8171, 2516, 6216, 7200, 1324, 9549, 2831, 6274, + 7046, 7793, 8156, 8922, 1536, 4903, 8438, 7980, 8795, + 3692, 1865, 2467, 2873, 1023, 8165, 4606, 9922, 7937, + 1789, 1830, 8242, 6979, 7067, 360, 8686, 4259, 417, + 1071, 8219, 513, 1913, 750, 2857, 4490, 9807, 4091, + 2519, 3644, 392, 5240, 341, 5422, 1252, 3012, 4481, + 2472, 8199, 5103, 2599, 3525, 5909, 78, 2754, 5345, + 3013, 6272, 5946, 3907, 4155, 8832, 5606, 7682, 1172, + 192, 7276, 8882, 698, 7468, 1013, 2117, 3917, 4641, + 4529, 2993, 5, 3925, 8235, 5401, 1760, 7427, 3417, + 1077]), + values=tensor([4.8059e-01, 2.6068e-01, 3.7083e-01, 5.4455e-01, + 5.5087e-01, 9.1571e-01, 3.0550e-01, 8.8604e-01, + 5.5442e-01, 3.9848e-01, 4.1127e-01, 2.3695e-01, + 8.8779e-01, 9.7368e-01, 3.7378e-01, 3.5178e-01, + 1.3697e-01, 7.5506e-01, 4.8064e-01, 3.7498e-03, + 7.9736e-01, 8.1659e-01, 6.1290e-01, 4.5901e-01, + 8.9740e-01, 2.9042e-01, 4.4829e-01, 7.0372e-01, + 3.3279e-01, 7.6293e-01, 6.6927e-02, 4.8552e-01, + 9.9505e-01, 7.0259e-02, 3.6985e-01, 1.1795e-01, + 6.8194e-01, 3.0421e-01, 4.1789e-01, 7.9479e-01, + 8.3575e-01, 6.1092e-01, 5.0699e-02, 2.0850e-01, + 2.8512e-01, 2.1458e-01, 1.9815e-01, 1.8111e-01, + 2.1230e-01, 8.7346e-01, 9.3908e-01, 5.0252e-02, + 5.9668e-01, 1.4455e-01, 1.3858e-01, 6.3809e-02, + 4.3928e-01, 3.1486e-02, 2.7234e-02, 7.4906e-02, + 3.5749e-01, 5.7851e-01, 5.2036e-01, 2.5703e-01, + 6.0030e-01, 1.7537e-01, 5.9778e-01, 1.5196e-01, + 3.6708e-01, 9.8464e-01, 5.9912e-01, 2.2195e-01, + 4.6650e-01, 9.6754e-01, 9.5639e-01, 1.5470e-01, + 1.5256e-01, 3.0298e-01, 2.4096e-02, 5.9170e-01, + 1.9686e-01, 1.1759e-01, 1.0457e-01, 5.9024e-01, + 7.4080e-01, 4.1405e-01, 1.2789e-01, 3.8225e-02, + 3.8505e-02, 4.8747e-01, 6.4712e-01, 5.4537e-01, + 7.2839e-01, 6.5434e-01, 4.7429e-01, 2.8609e-02, + 6.9941e-01, 5.0746e-01, 2.0928e-01, 4.3227e-01, + 5.0692e-01, 4.3792e-01, 4.9812e-01, 6.6989e-01, + 9.6967e-01, 9.7297e-01, 1.8827e-01, 7.4347e-01, + 1.5631e-01, 5.8115e-01, 8.2606e-01, 6.1017e-01, + 4.5324e-01, 6.1732e-01, 4.3508e-01, 2.7436e-01, + 8.6329e-01, 6.4097e-01, 1.0318e-01, 4.0200e-01, + 8.6444e-01, 4.3614e-01, 5.2110e-01, 7.7002e-01, + 7.8091e-01, 2.6182e-01, 9.3631e-01, 6.5569e-01, + 7.5455e-01, 1.1819e-01, 8.7432e-02, 8.5494e-01, + 7.9642e-01, 2.4008e-01, 9.6818e-01, 3.1184e-01, + 5.5783e-02, 1.7763e-01, 3.4710e-01, 3.1949e-01, + 1.1640e-01, 2.3357e-01, 7.4482e-01, 6.5708e-01, + 7.8686e-01, 5.7169e-01, 1.9176e-01, 7.4776e-01, + 8.8674e-01, 1.1710e-01, 8.7365e-02, 5.8070e-01, + 4.5757e-01, 2.3275e-01, 1.4821e-02, 5.4216e-01, + 4.2147e-01, 5.6869e-01, 6.9741e-01, 9.5911e-01, + 4.6716e-01, 7.6178e-01, 3.4794e-02, 9.5093e-01, + 5.1131e-01, 5.6037e-01, 7.2508e-01, 1.7181e-01, + 5.9834e-01, 2.8725e-01, 1.4470e-01, 8.5043e-01, + 3.4409e-01, 2.9904e-01, 2.4931e-01, 5.5706e-01, + 1.5667e-01, 2.9028e-01, 4.6574e-01, 7.5432e-01, + 1.1361e-01, 4.5574e-01, 1.8736e-01, 6.5302e-03, + 8.6205e-01, 8.7909e-01, 7.1674e-01, 7.1278e-01, + 9.9996e-01, 6.9678e-01, 9.3855e-01, 2.5683e-01, + 5.9314e-01, 2.0634e-01, 1.9915e-01, 1.7218e-01, + 3.0449e-01, 3.7677e-02, 9.2688e-01, 2.1308e-01, + 1.5730e-01, 9.2634e-01, 8.2956e-02, 6.1692e-01, + 5.1919e-01, 3.2704e-01, 4.0703e-01, 5.0143e-01, + 6.7723e-01, 8.1753e-01, 2.6635e-01, 1.3191e-02, + 8.9840e-01, 9.9355e-01, 8.7904e-01, 7.1659e-01, + 2.9610e-01, 4.5606e-01, 5.9045e-01, 8.9510e-02, + 8.4238e-01, 2.1662e-01, 7.1281e-01, 4.2497e-01, + 4.5220e-01, 6.1156e-02, 5.2940e-01, 5.8593e-01, + 2.5460e-01, 9.4797e-01, 6.6812e-01, 3.2547e-01, + 7.3813e-01, 7.9380e-01, 3.1305e-01, 5.2360e-01, + 5.7839e-01, 3.4606e-01, 7.0736e-01, 6.4167e-01, + 1.1426e-01, 7.1442e-01, 9.5551e-01, 3.4859e-01, + 6.8092e-04, 6.6756e-01, 2.2472e-01, 3.6603e-02, + 6.4753e-01, 4.6649e-01, 4.2337e-01, 5.1645e-02, + 6.0180e-02, 8.3215e-01, 2.8754e-02, 4.1647e-02, + 2.4904e-01, 4.0011e-01, 5.8432e-01, 5.0926e-01, + 8.2460e-01, 2.7598e-01, 4.5140e-01, 1.8915e-01, + 1.0612e-01, 4.0020e-01, 9.0858e-02, 4.2965e-01, + 3.1573e-01, 1.0305e-02, 9.4588e-01, 8.5378e-01, + 4.7280e-01, 9.4753e-01, 5.0855e-01, 6.0263e-01, + 5.3343e-02, 8.1332e-01, 1.6241e-01, 8.2746e-01, + 2.4580e-01, 6.0980e-02, 3.6788e-01, 9.0945e-02, + 5.2880e-01, 5.8751e-01, 3.6803e-01, 9.8383e-01, + 8.0574e-01, 2.9759e-01, 5.9896e-01, 5.5146e-01, + 9.8346e-02, 7.5541e-01, 9.0311e-01, 7.1468e-02, + 2.2938e-01, 7.1939e-01, 6.3102e-01, 9.3064e-01, + 9.9349e-01, 9.3896e-01, 2.8836e-01, 7.8170e-01, + 4.6400e-01, 1.4346e-01, 7.8384e-02, 5.9983e-01, + 6.5895e-01, 4.6100e-01, 4.6900e-01, 2.1629e-01, + 9.6051e-01, 6.5077e-01, 5.2054e-01, 4.7614e-01, + 7.6397e-02, 5.1015e-01, 7.9634e-01, 4.6907e-01, + 9.6766e-01, 8.1380e-01, 7.4731e-01, 9.8854e-01, + 3.5440e-02, 9.6596e-01, 5.3848e-01, 2.9996e-01, + 4.0293e-02, 6.8423e-01, 6.1380e-01, 3.3767e-01, + 9.1466e-01, 3.8488e-01, 5.0871e-01, 9.9911e-01, + 5.5936e-02, 5.3818e-01, 8.4011e-01, 4.9164e-01, + 9.0611e-01, 2.9127e-01, 7.7511e-02, 6.8092e-01, + 4.4948e-01, 8.0411e-01, 8.0856e-01, 1.7749e-01, + 6.0065e-01, 1.2962e-01, 5.3925e-01, 8.6809e-01, + 3.1247e-01, 5.4289e-01, 5.0400e-01, 8.6523e-01, + 6.1521e-01, 1.8618e-01, 6.4956e-01, 6.7938e-01, + 4.8913e-03, 8.8041e-01, 2.2916e-01, 2.9907e-01, + 2.4663e-01, 7.4980e-01, 7.8897e-01, 2.2230e-02, + 4.0795e-01, 6.5666e-02, 5.0611e-03, 8.6068e-01, + 9.0253e-01, 3.9248e-02, 9.1534e-01, 7.2456e-01, + 6.7679e-01, 3.8350e-01, 3.6286e-01, 8.5937e-01, + 1.8495e-01, 3.0385e-01, 5.7674e-01, 5.5443e-01, + 6.7577e-01, 3.2599e-01, 6.4667e-01, 7.7571e-01, + 5.4077e-01, 3.4173e-02, 6.6062e-01, 5.5497e-01, + 3.2429e-01, 2.3905e-01, 9.2743e-01, 8.2636e-01, + 6.2779e-01, 2.0618e-01, 6.5440e-01, 4.8835e-02, + 6.1570e-02, 7.9004e-01, 8.5253e-01, 6.4570e-01, + 4.3657e-01, 5.6724e-01, 6.6125e-01, 2.1790e-01, + 9.0854e-01, 1.0131e-02, 8.3074e-01, 8.0931e-02, + 1.2273e-01, 9.1632e-01, 7.5735e-01, 6.1709e-01, + 4.5239e-01, 7.6859e-01, 4.5898e-01, 7.7444e-01, + 2.2262e-01, 4.2228e-01, 6.4247e-04, 6.8685e-01, + 4.5280e-01, 9.8831e-01, 6.3551e-01, 2.4349e-01, + 6.3998e-01, 2.4863e-01, 7.4978e-01, 9.6525e-02, + 8.5432e-01, 2.8565e-01, 8.6400e-03, 6.9887e-01, + 9.8123e-01, 5.5568e-01, 6.5677e-02, 2.1300e-01, + 3.8626e-01, 1.7121e-01, 6.2299e-01, 8.5313e-01, + 9.7331e-01, 4.4678e-01, 6.2567e-01, 3.4985e-01, + 3.7997e-02, 1.7816e-01, 4.6951e-01, 2.5928e-01, + 8.1800e-01, 9.8585e-01, 9.9241e-01, 9.4591e-01, + 6.4886e-01, 6.6587e-01, 3.0200e-01, 1.9709e-01, + 6.9655e-01, 7.7186e-01, 4.6370e-01, 2.0512e-01, + 2.9187e-01, 6.5662e-01, 1.9881e-01, 1.8878e-01, + 8.3759e-01, 8.1497e-01, 3.8474e-01, 9.3922e-01, + 1.1980e-01, 7.9274e-01, 2.4139e-03, 4.0606e-01, + 9.4741e-01, 3.5579e-01, 7.3689e-01, 7.4013e-01, + 1.6341e-01, 4.0976e-01, 1.2106e-02, 6.9624e-01, + 3.4058e-01, 9.2252e-01, 5.7347e-01, 1.7341e-01, + 4.7453e-01, 5.3293e-01, 7.2677e-01, 7.6843e-01, + 5.5980e-01, 9.1006e-01, 6.5609e-01, 2.0080e-01, + 7.6444e-01, 7.5927e-01, 3.7025e-01, 2.2248e-01, + 2.7530e-01, 7.3119e-01, 4.2414e-01, 2.5233e-01, + 7.8818e-01, 2.3316e-01, 4.3970e-01, 9.2583e-02, + 1.2232e-01, 5.7076e-01, 5.1912e-01, 9.8308e-01, + 7.4081e-01, 1.3379e-01, 9.3796e-01, 9.1761e-01, + 5.4932e-01, 5.4914e-01, 6.8584e-01, 6.1119e-01, + 2.9282e-01, 8.8707e-01, 9.8210e-02, 7.5203e-01, + 5.7105e-02, 8.3390e-01, 4.3245e-01, 6.0756e-01, + 9.5487e-01, 1.4073e-01, 9.3377e-01, 5.3720e-01, + 2.9851e-01, 2.3323e-01, 5.7038e-01, 1.7177e-01, + 4.2263e-01, 3.6571e-01, 2.4465e-01, 2.3924e-01, + 7.4506e-01, 3.7908e-01, 1.0900e-01, 3.7748e-01, + 5.8194e-01, 1.9196e-01, 9.6828e-01, 6.5906e-01, + 5.4583e-01, 6.8981e-01, 3.3761e-01, 3.0262e-01, + 1.4093e-01, 8.9661e-01, 8.8664e-01, 3.6191e-01, + 7.2603e-01, 1.8928e-01, 1.5570e-01, 4.7355e-01, + 5.0235e-01, 7.0181e-01, 1.5485e-01, 6.3084e-01, + 6.8009e-01, 1.9700e-01, 5.5915e-01, 7.1118e-01, + 8.4480e-01, 8.8653e-01, 9.3805e-02, 8.1394e-01, + 7.6043e-01, 2.2537e-01, 3.5050e-01, 9.4824e-01, + 6.4804e-02, 8.9447e-02, 2.5136e-01, 8.4746e-01, + 4.4124e-01, 5.3570e-01, 7.2660e-01, 9.9153e-01, + 5.5409e-01, 5.7607e-01, 6.9001e-01, 6.8634e-01, + 6.5678e-01, 8.7622e-01, 6.9088e-01, 4.1804e-01, + 4.2696e-01, 7.1106e-01, 7.7869e-01, 9.0423e-01, + 5.0743e-01, 6.4079e-01, 3.9395e-01, 8.6100e-02, + 7.4019e-01, 4.5245e-01, 3.2098e-01, 7.4052e-01, + 5.5819e-03, 9.6355e-01, 4.3721e-01, 6.7750e-01, + 6.2113e-01, 4.9398e-01, 1.7439e-01, 1.9619e-01, + 6.3345e-01, 7.7621e-02, 5.8501e-01, 3.1614e-01, + 9.1062e-01, 2.9312e-01, 6.2033e-01, 7.4267e-01, + 8.8925e-01, 7.8148e-01, 6.9515e-01, 5.1877e-01, + 6.7792e-01, 8.1468e-01, 9.2343e-01, 9.2067e-01, + 5.8445e-01, 7.1713e-01, 7.6645e-01, 9.5688e-01, + 2.2005e-01, 5.8916e-02, 6.8496e-01, 2.7123e-01, + 5.3625e-01, 2.2256e-01, 1.1104e-01, 4.1722e-03, + 7.7575e-01, 3.4159e-01, 6.4283e-01, 9.6679e-01, + 1.7163e-01, 2.4263e-01, 6.1465e-01, 5.1415e-01, + 8.7361e-01, 6.0521e-01, 7.6585e-01, 3.6699e-01, + 4.6723e-01, 7.0614e-01, 5.9241e-01, 5.2480e-01, + 2.3362e-01, 2.8520e-01, 2.4362e-01, 1.8558e-01, + 7.9483e-01, 6.5965e-01, 4.1345e-01, 1.2104e-01, + 7.5543e-01, 9.2465e-01, 4.8631e-01, 1.8207e-01, + 7.4888e-01, 3.0846e-01, 9.4452e-01, 8.2471e-01, + 2.2609e-01, 8.6210e-01, 1.5446e-01, 5.2700e-01, + 1.4129e-01, 6.8153e-02, 7.1222e-01, 5.4379e-02, + 8.2369e-01, 6.7305e-02, 6.9326e-01, 9.5006e-01, + 7.8894e-01, 5.0686e-01, 1.0844e-02, 8.8476e-01, + 3.4081e-01, 7.1386e-01, 5.8332e-01, 8.0538e-02, + 1.2881e-01, 6.6073e-01, 6.6982e-01, 9.4311e-01, + 5.7920e-01, 3.4038e-01, 5.1929e-01, 6.6344e-01, + 3.6283e-01, 8.4071e-01, 8.3032e-02, 3.7224e-01, + 2.8825e-01, 2.1346e-01, 6.6572e-01, 9.1219e-01, + 7.5086e-01, 5.9660e-01, 6.4939e-01, 8.3636e-02, + 2.1276e-01, 8.1840e-01, 9.1241e-01, 4.1267e-01, + 1.2384e-01, 3.2306e-01, 1.8222e-01, 1.6973e-01, + 2.4016e-01, 5.0092e-02, 6.0148e-01, 2.9363e-01, + 7.7028e-01, 6.2133e-01, 4.0709e-01, 4.9819e-01, + 1.9079e-01, 8.4728e-01, 5.7285e-01, 4.6834e-01, + 2.6392e-01, 8.2958e-01, 9.0930e-01, 7.7450e-01, + 4.9029e-01, 6.0270e-02, 9.5693e-01, 2.5910e-01, + 4.3595e-01, 3.2220e-01, 7.7687e-02, 8.6974e-01, + 4.2140e-01, 9.3963e-01, 1.5046e-02, 1.9120e-01, + 6.3065e-01, 5.0110e-02, 8.6206e-01, 1.5593e-01, + 3.8009e-01, 9.0525e-01, 3.1360e-01, 2.8014e-01, + 8.6922e-01, 8.0054e-02, 1.5774e-01, 7.8264e-01, + 5.5830e-01, 8.2156e-01, 5.4363e-02, 6.1422e-01, + 6.4211e-01, 1.8056e-01, 4.8874e-01, 2.7600e-01, + 7.3967e-01, 6.3099e-01, 9.2851e-01, 2.9826e-01, + 7.2043e-02, 1.9501e-01, 2.0723e-01, 4.8988e-01, + 1.1743e-01, 6.0389e-01, 2.4019e-01, 5.3080e-01, + 2.4116e-01, 6.4832e-01, 1.3206e-01, 5.7881e-01, + 2.4751e-01, 2.5004e-01, 1.0644e-01, 6.4024e-02, + 8.1402e-01, 4.5228e-01, 6.7548e-01, 9.3771e-01, + 2.0818e-01, 4.7722e-01, 2.0221e-01, 6.3364e-01, + 2.8929e-01, 5.9029e-01, 4.2652e-01, 9.1620e-01, + 2.2454e-01, 6.7134e-01, 7.9748e-02, 3.2710e-01, + 8.1974e-01, 1.5549e-02, 4.9868e-01, 5.7492e-01, + 7.2008e-02, 7.9730e-01, 1.5602e-01, 2.0940e-01, + 5.3816e-01, 1.9379e-01, 2.7025e-02, 4.9766e-01, + 6.0189e-01, 3.2022e-01, 8.5769e-01, 6.5720e-01, + 3.0809e-01, 1.1742e-01, 9.2720e-01, 1.9132e-01, + 3.9789e-01, 3.7697e-02, 6.0352e-01, 3.9733e-01, + 4.4928e-01, 1.5419e-01, 9.8145e-01, 4.1892e-01, + 7.0064e-01, 3.8871e-01, 6.2793e-01, 7.7207e-01, + 4.4089e-01, 3.6800e-01, 4.9602e-01, 7.8104e-01, + 1.0657e-02, 8.0591e-01, 9.9727e-01, 1.3379e-01, + 5.4798e-01, 7.6658e-01, 6.3649e-01, 2.8939e-01, + 8.1990e-01, 7.9348e-01, 8.2543e-01, 6.7492e-01, + 6.7563e-02, 9.7314e-01, 2.2021e-01, 2.0069e-01, + 8.7506e-01, 7.2409e-01, 3.6865e-01, 1.9488e-01, + 7.4566e-01, 9.7425e-01, 9.4802e-01, 3.3832e-01, + 8.4433e-01, 4.1426e-02, 2.6255e-01, 1.7428e-01, + 1.8207e-01, 1.8488e-01, 8.2856e-03, 1.1348e-01, + 9.2197e-01, 9.2179e-01, 6.1482e-01, 1.7686e-01, + 6.5075e-02, 1.5011e-01, 7.1724e-01, 1.9623e-01, + 7.7691e-01, 8.8594e-01, 5.6303e-01, 7.3530e-01, + 9.8857e-01, 7.4483e-01, 8.5336e-01, 7.1468e-01, + 3.4586e-01, 6.2753e-01, 3.4994e-01, 9.4314e-01, + 5.9523e-01, 2.4831e-01, 6.7030e-01, 7.2279e-01, + 7.4167e-01, 3.7774e-01, 9.7774e-01, 2.7848e-01, + 5.9253e-01, 2.2646e-01, 7.3907e-02, 1.8840e-01, + 1.6017e-01, 9.0665e-01, 7.6825e-01, 1.7432e-01, + 5.2899e-02, 4.0457e-01, 9.7040e-01, 9.0688e-02, + 5.8056e-01, 2.6439e-01, 8.2605e-01, 9.1309e-01, + 6.8101e-01, 8.0757e-01, 6.2304e-01, 9.9518e-01, + 4.7896e-01, 3.1904e-01, 7.0082e-01, 4.5077e-01, + 1.7951e-01, 3.2747e-02, 1.3088e-01, 9.1100e-01, + 2.8097e-01, 7.7241e-01, 2.7702e-01, 8.9268e-01, + 7.4964e-02, 4.5346e-01, 3.5535e-01, 7.4730e-01, + 1.0795e-01, 9.2068e-01, 2.5310e-01, 9.3425e-01, + 8.6243e-03, 3.4566e-01, 1.2041e-01, 8.5141e-02, + 6.8159e-01, 6.3087e-01, 4.5251e-02, 3.5864e-01, + 2.6782e-01, 7.8826e-02, 8.7299e-01, 6.6518e-01, + 7.0044e-01, 1.8992e-01, 8.0560e-01, 7.7462e-01, + 3.8679e-01, 6.1189e-01, 1.5892e-01, 6.3252e-01, + 6.9613e-01, 3.5041e-01, 8.1632e-01, 4.0743e-01, + 7.7173e-01, 4.3097e-01, 4.1902e-01, 7.2770e-01, + 8.4816e-01, 2.2977e-01, 7.1589e-01, 9.0275e-01, + 9.9999e-01, 4.1852e-01, 2.2491e-02, 7.9153e-01, + 9.7639e-01, 3.3819e-01, 7.4342e-01, 8.5250e-01, + 9.9385e-01, 6.3891e-02, 9.7464e-01, 3.0992e-01, + 9.9006e-01, 1.1081e-01, 7.5155e-01, 8.4844e-01, + 7.0384e-01, 4.6698e-01, 1.6751e-01, 6.1651e-01, + 4.3979e-01, 6.1587e-01, 3.4869e-01, 4.9449e-01, + 9.1105e-02, 9.8166e-01, 3.3675e-02, 3.1123e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.3844, 0.6738, 0.8785, ..., 0.1267, 0.6920, 0.7757]) +tensor([0.1121, 0.3867, 0.9452, ..., 0.7986, 0.0788, 0.8344]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -1133,375 +1133,375 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 10.192691802978516 seconds +Time: 10.441989183425903 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 999, 999, 1000]), - col_indices=tensor([8952, 4000, 8166, 6597, 532, 6376, 6026, 9869, 7520, - 7179, 9261, 3880, 1825, 3183, 7673, 9449, 3683, 5956, - 1175, 9188, 3143, 3107, 7411, 4454, 602, 8234, 1772, - 7128, 697, 2579, 6192, 4803, 5677, 9960, 6436, 8271, - 7262, 970, 7301, 4426, 5443, 1245, 6562, 2078, 17, - 5156, 8485, 7276, 8067, 1486, 267, 1867, 2441, 2368, - 9094, 5268, 7382, 3883, 3736, 9730, 4478, 9182, 3080, - 3707, 1066, 4867, 2125, 6033, 2824, 3938, 8278, 1321, - 9817, 7979, 8727, 7687, 7915, 1214, 440, 5708, 5546, - 1111, 6567, 4866, 6297, 7245, 887, 2038, 4920, 2063, - 7927, 3268, 9646, 7587, 1863, 7946, 3596, 8591, 6781, - 7806, 9483, 1512, 3170, 9606, 4349, 2224, 451, 5245, - 4275, 2218, 1928, 3938, 364, 232, 3259, 3441, 8386, - 7579, 4888, 5900, 1901, 64, 199, 7448, 6195, 3174, - 3236, 8078, 6653, 1848, 3168, 1121, 3927, 5660, 4727, - 9512, 6481, 3551, 862, 305, 4340, 9131, 448, 1868, - 1150, 5339, 8301, 5390, 7716, 694, 2337, 622, 2979, - 6037, 4736, 1952, 5071, 2430, 2740, 920, 3292, 2201, - 6260, 8263, 8823, 8025, 8857, 5589, 6916, 6949, 6589, - 8185, 175, 3471, 5202, 9772, 2312, 7182, 1717, 7475, - 6068, 2334, 6417, 3655, 9800, 889, 253, 8367, 8765, - 6721, 6687, 2801, 8665, 2089, 6262, 1372, 8492, 4529, - 577, 2887, 61, 5460, 6067, 4062, 6111, 3584, 9452, - 5467, 2060, 877, 1231, 8941, 3078, 1406, 9681, 4349, - 8995, 4108, 3169, 3536, 599, 808, 216, 1045, 7476, - 118, 5373, 1705, 6476, 9055, 3423, 2513, 8743, 2070, - 3551, 3613, 3258, 7352, 9447, 908, 6360, 3702, 175, - 5227, 2614, 8048, 6329, 9842, 8071, 3162, 4559, 4957, - 9543, 4242, 2056, 4144, 9812, 6189, 8552, 1334, 8989, - 1341, 4418, 3240, 3860, 6744, 3583, 7263, 5364, 5783, - 281, 536, 2291, 101, 8394, 819, 8377, 1736, 7515, - 1698, 8880, 621, 7126, 306, 5036, 8953, 2037, 3764, - 5238, 4588, 547, 2080, 3133, 4721, 1404, 4382, 6483, - 364, 5201, 7106, 9087, 278, 1615, 5081, 9022, 5647, - 6859, 7777, 7201, 2862, 4501, 1625, 9418, 8793, 3062, - 7779, 7619, 249, 1017, 9394, 652, 7245, 6598, 6677, - 8056, 7739, 2584, 2710, 5836, 2439, 9676, 552, 753, - 1890, 9204, 3806, 280, 5975, 6539, 8803, 4939, 8180, - 8972, 1922, 3111, 6708, 8063, 843, 8554, 8192, 5252, - 1294, 6690, 2359, 4858, 5680, 7832, 2435, 7255, 9141, - 5661, 1145, 4606, 8502, 5241, 7839, 5585, 9311, 6977, - 3341, 4344, 5135, 880, 4279, 9071, 6398, 5804, 3668, - 9083, 2767, 1972, 9411, 9585, 9619, 3734, 5608, 4456, - 2712, 1619, 6062, 9539, 4547, 4066, 3381, 6204, 5829, - 1408, 8823, 8487, 9492, 6285, 9366, 304, 6949, 2130, - 5644, 5852, 5247, 2441, 507, 1362, 6464, 1943, 4566, - 4183, 9823, 9431, 3679, 7214, 6603, 2011, 6442, 8396, - 1474, 9058, 8407, 3097, 8137, 4939, 8142, 4167, 2297, - 6796, 3268, 8842, 9047, 506, 1644, 958, 2841, 8103, - 8807, 3190, 3921, 8205, 1359, 1320, 1313, 5985, 6264, - 9688, 7030, 3687, 8214, 9200, 8543, 8874, 9918, 3292, - 971, 1150, 12, 7721, 4843, 4655, 8118, 5951, 6341, - 3367, 5211, 3698, 5665, 6802, 311, 8944, 3871, 7380, - 1011, 1091, 6264, 2890, 7828, 4121, 8780, 8171, 5582, - 2487, 4088, 7245, 9121, 1475, 8032, 4892, 8481, 5781, - 2248, 7069, 9818, 1470, 8306, 290, 2398, 5807, 9442, - 3261, 2507, 2542, 9400, 146, 8598, 3116, 2240, 7102, - 9117, 5552, 2599, 3964, 4563, 6377, 4358, 2759, 3003, - 4038, 9137, 1151, 6472, 7773, 5238, 2731, 4173, 797, - 9566, 2707, 3580, 7994, 6794, 4036, 9990, 2244, 6453, - 2584, 8829, 9928, 4918, 4552, 1248, 3083, 2138, 3711, - 6676, 1954, 9299, 751, 9072, 8549, 3638, 3355, 8110, - 5838, 5956, 5829, 7581, 7230, 6974, 4178, 2501, 7620, - 3349, 7921, 9398, 8191, 526, 2316, 5467, 2362, 8661, - 5836, 9284, 5880, 4520, 5412, 5818, 7360, 2561, 2163, - 1419, 516, 4204, 1357, 4964, 2143, 7988, 9421, 4192, - 6075, 5232, 2382, 190, 2160, 4498, 3582, 7060, 4067, - 8832, 2519, 8074, 75, 2040, 7026, 2270, 2454, 3151, - 6754, 6128, 4864, 678, 2068, 7671, 3287, 4393, 6230, - 4141, 589, 9321, 5856, 5100, 949, 6882, 8430, 8774, - 2852, 6631, 7249, 7598, 8415, 2890, 3208, 4485, 9774, - 844, 2538, 5517, 8566, 3061, 4943, 1204, 7334, 603, - 1059, 5669, 7784, 6606, 8140, 8880, 7569, 6357, 1677, - 8060, 2276, 5176, 9288, 3726, 1205, 7679, 5004, 6724, - 4886, 1466, 9199, 9820, 1880, 856, 6733, 6005, 7606, - 9888, 3802, 1598, 5109, 1099, 257, 5705, 1491, 8717, - 8777, 340, 9514, 6111, 8977, 2088, 9572, 9799, 9085, - 8261, 67, 6996, 5276, 5212, 7381, 7754, 1731, 1151, - 2202, 664, 4664, 730, 831, 8074, 5175, 6613, 6734, - 6507, 2635, 8670, 2143, 8491, 7815, 2364, 9192, 8216, - 9671, 5069, 6574, 4597, 1439, 3233, 8936, 1122, 5241, - 7348, 4745, 6153, 2266, 3100, 9485, 8854, 8203, 5843, - 3322, 8284, 1824, 8965, 436, 9138, 2544, 6346, 2189, - 8690, 6203, 9601, 704, 8008, 9572, 677, 606, 5002, - 5710, 471, 6555, 7890, 6468, 1470, 4733, 2943, 6214, - 8234, 6199, 9968, 7234, 9500, 5185, 2919, 7000, 3791, - 2826, 7280, 4730, 9794, 1349, 2563, 1049, 4820, 3367, - 9543, 6774, 6136, 7295, 5145, 2152, 4761, 8582, 9375, - 319, 9447, 3340, 9425, 9559, 1116, 3732, 3610, 8427, - 9125, 1583, 1179, 7513, 5682, 9159, 241, 7984, 2293, - 3096, 3564, 3439, 2160, 8809, 9412, 7225, 1103, 9859, - 2289, 470, 3862, 5409, 2677, 2610, 1395, 2798, 2036, - 8168, 6913, 5376, 4906, 4747, 6428, 7591, 8164, 6849, - 2172, 81, 3394, 1456, 2103, 4980, 5120, 1434, 983, - 7931, 9343, 7415, 7679, 999, 1068, 3274, 8997, 2725, - 441, 411, 7933, 1003, 4519, 361, 4611, 3295, 8883, - 1465, 6331, 9338, 45, 5625, 6206, 6833, 2020, 3343, - 8451, 1048, 4752, 7366, 7152, 7566, 1669, 8462, 5182, - 7214, 7342, 7539, 3321, 619, 717, 8923, 530, 3482, - 5501, 7823, 5803, 8146, 5242, 6287, 6550, 2253, 8722, - 7178, 7449, 2026, 3537, 6705, 7065, 6435, 106, 8292, - 2198, 1874, 9805, 9582, 4954, 8722, 3049, 311, 3280, - 7342, 7761, 9051, 2171, 3069, 2124, 7388, 3664, 4062, - 2685, 6071, 7944, 2785, 8698, 883, 4873, 7645, 2810, - 2063, 7924, 3670, 6947, 9774, 2476, 6878, 6227, 4296, - 5746, 3133, 1635, 699, 7633, 4675, 2663, 323, 4267, - 6089, 779, 3222, 31, 9957, 1311, 8344, 2224, 5480, - 5329, 4616, 5394, 5932, 6025, 6656, 4043, 4590, 3624, - 7644, 2721, 7452, 5063, 1603, 231, 5078, 4764, 8810, - 1897, 9824, 3546, 8099, 7057, 3188, 6863, 4616, 8965, - 3807, 4031, 6190, 2213, 1928, 4075, 999, 1021, 5541, - 9055]), - values=tensor([6.9021e-01, 2.6494e-01, 8.6904e-01, 6.1119e-01, - 5.3447e-01, 5.9746e-01, 1.3103e-01, 7.6542e-01, - 2.7449e-01, 4.8776e-01, 3.7944e-01, 5.9667e-01, - 5.0080e-01, 9.7087e-01, 6.0515e-02, 3.4796e-01, - 1.9513e-01, 4.2481e-03, 6.0086e-01, 9.0850e-01, - 8.6957e-02, 7.2780e-01, 3.2540e-01, 8.0855e-01, - 2.0093e-01, 7.5193e-01, 6.5520e-01, 2.0556e-01, - 1.9036e-01, 6.6375e-01, 5.1418e-01, 6.8871e-01, - 4.1430e-01, 3.7771e-01, 9.1043e-01, 8.2144e-02, - 8.9391e-01, 2.2392e-01, 3.8724e-01, 7.3980e-01, - 9.2229e-01, 6.5016e-01, 2.6737e-01, 7.9846e-01, - 5.0336e-01, 8.1942e-02, 7.3018e-02, 2.8059e-02, - 5.8586e-01, 4.8189e-02, 9.1110e-03, 6.7474e-01, - 2.4410e-01, 6.0846e-01, 6.7598e-01, 2.5246e-02, - 8.3369e-01, 3.6420e-01, 1.0160e-02, 4.2093e-01, - 5.6656e-01, 9.7199e-01, 3.0686e-01, 5.9587e-01, - 8.1818e-01, 7.4731e-01, 4.7909e-01, 3.5376e-01, - 4.5246e-01, 4.3801e-01, 4.1419e-01, 6.9403e-02, - 2.4639e-01, 5.5338e-02, 1.4805e-01, 8.8478e-02, - 3.3491e-01, 7.1203e-02, 5.8558e-01, 7.5183e-01, - 9.5173e-01, 2.7971e-01, 4.4284e-01, 4.5204e-01, - 3.1782e-01, 8.4286e-01, 7.9427e-01, 3.4835e-01, - 9.8082e-01, 2.1853e-01, 6.2534e-01, 5.9584e-01, - 2.6834e-01, 2.8874e-01, 8.6305e-01, 1.3411e-01, - 8.4825e-01, 3.9467e-01, 1.0367e-01, 7.2007e-01, - 5.8375e-01, 7.9120e-01, 6.7044e-01, 1.4679e-01, - 4.8278e-01, 7.0316e-01, 5.5202e-01, 3.5939e-01, - 4.1124e-01, 1.4507e-01, 9.1288e-01, 1.6695e-01, - 2.9452e-01, 1.9779e-01, 6.8077e-01, 5.5683e-01, - 8.9496e-02, 6.0423e-01, 9.4298e-02, 7.6601e-01, - 3.6445e-01, 4.6071e-01, 6.7016e-01, 9.2399e-02, - 6.0505e-01, 6.7426e-01, 5.3761e-01, 5.0130e-01, - 1.9624e-01, 6.9980e-01, 7.3370e-01, 6.4631e-01, - 9.0546e-01, 3.3420e-01, 3.3065e-01, 5.9336e-01, - 8.9448e-01, 8.3937e-01, 3.3441e-01, 2.3028e-02, - 4.3796e-01, 3.3782e-01, 9.8037e-01, 3.3054e-01, - 7.9836e-01, 7.5184e-01, 8.3353e-01, 9.4721e-01, - 9.1431e-01, 4.9030e-01, 2.4945e-01, 7.4274e-01, - 1.6174e-01, 1.3313e-01, 1.4827e-03, 9.9424e-01, - 3.8576e-01, 8.8554e-01, 5.9676e-01, 9.2759e-01, - 3.3991e-01, 6.2610e-01, 4.0877e-01, 4.2900e-01, - 4.5137e-02, 8.9195e-01, 4.1368e-01, 8.6278e-01, - 1.0556e-01, 9.7864e-01, 8.1699e-01, 2.6514e-01, - 2.7527e-01, 9.8235e-01, 5.4621e-01, 5.6385e-01, - 4.1602e-01, 3.0271e-01, 6.4653e-01, 7.1380e-01, - 1.0594e-01, 7.8515e-01, 2.8439e-01, 5.7238e-01, - 9.2610e-01, 2.6277e-01, 8.1736e-01, 2.0732e-01, - 7.0628e-01, 8.6999e-01, 9.1036e-02, 6.6737e-01, - 5.5567e-01, 8.1035e-01, 8.7453e-01, 1.8492e-01, - 1.9537e-01, 7.6366e-01, 6.4800e-01, 3.5352e-01, - 1.5043e-01, 3.5364e-02, 4.2615e-01, 3.2155e-01, - 2.4376e-01, 2.5069e-01, 5.2658e-01, 8.0086e-01, - 7.7663e-01, 6.3489e-01, 5.8258e-01, 5.4888e-01, - 7.5488e-01, 6.9768e-01, 3.6104e-01, 8.8427e-04, - 4.5213e-01, 2.8577e-01, 2.8619e-01, 2.9793e-01, - 2.9937e-01, 6.2428e-01, 4.5005e-01, 5.9825e-01, - 7.4638e-01, 3.3929e-01, 3.9970e-01, 3.9800e-01, - 3.8389e-01, 4.8231e-01, 5.6456e-02, 1.2928e-01, - 6.5330e-01, 9.1622e-01, 4.6852e-01, 3.6000e-02, - 8.7926e-01, 4.7800e-02, 6.8565e-01, 4.1381e-02, - 5.5927e-01, 6.8543e-01, 6.8545e-01, 8.8616e-01, - 2.9807e-02, 8.1468e-01, 6.1063e-01, 5.5566e-01, - 9.4726e-01, 2.1762e-02, 5.8385e-01, 6.7073e-01, - 3.3283e-02, 4.7096e-01, 7.8371e-01, 3.0060e-01, - 3.9184e-01, 9.7716e-01, 7.4200e-01, 2.7027e-01, - 1.1125e-04, 3.9202e-01, 5.9210e-01, 5.0785e-01, - 5.4634e-01, 3.7514e-01, 2.4891e-01, 3.0456e-01, - 6.5497e-01, 9.4974e-01, 7.6803e-01, 8.1229e-01, - 8.8187e-01, 6.0880e-01, 8.2083e-02, 1.5656e-01, - 6.6665e-01, 8.7158e-01, 4.2618e-01, 5.6251e-01, - 8.3536e-01, 9.7124e-01, 1.8108e-01, 8.2899e-01, - 9.1553e-01, 3.9430e-01, 8.9998e-01, 1.9012e-01, - 6.8518e-01, 4.8339e-01, 8.2522e-01, 4.4773e-01, - 9.1372e-01, 9.7072e-01, 8.0657e-02, 9.5106e-03, - 6.1564e-01, 7.6053e-01, 5.9682e-01, 1.0497e-01, - 3.0736e-01, 2.1734e-01, 4.0924e-01, 4.4851e-01, - 2.9939e-01, 9.0104e-01, 6.1371e-01, 3.1504e-01, - 3.7298e-01, 1.8350e-01, 8.8489e-01, 5.6146e-01, - 9.3832e-01, 8.5583e-01, 7.9017e-01, 7.2342e-01, - 4.9658e-01, 4.2914e-01, 4.2011e-01, 1.1432e-01, - 8.5825e-02, 3.7515e-01, 4.7402e-01, 3.3258e-01, - 2.6866e-01, 1.3750e-01, 8.2397e-01, 6.9852e-02, - 3.4150e-01, 9.4441e-01, 1.2748e-01, 4.5960e-01, - 4.7030e-01, 7.9737e-01, 9.6600e-01, 9.9113e-01, - 2.2881e-01, 4.9952e-02, 6.3082e-01, 6.6610e-01, - 9.9033e-01, 7.6301e-01, 6.0189e-01, 2.2890e-01, - 2.8764e-01, 1.1288e-01, 7.1805e-01, 6.1911e-01, - 1.6911e-01, 5.0612e-01, 5.0980e-01, 2.7104e-01, - 7.7033e-01, 1.4709e-01, 3.8285e-01, 1.9758e-01, - 9.7692e-01, 2.0252e-01, 1.3576e-01, 5.9482e-01, - 4.6062e-01, 2.2658e-01, 9.3619e-01, 1.4065e-01, - 9.5016e-01, 3.9461e-01, 1.4931e-01, 1.9422e-02, - 6.3961e-01, 7.1243e-01, 2.0281e-01, 5.6959e-01, - 6.2530e-01, 2.7959e-01, 8.7650e-01, 9.4001e-02, - 2.9309e-01, 4.7514e-01, 2.7278e-01, 4.9113e-01, - 7.6886e-02, 3.4814e-01, 5.1059e-01, 2.6430e-01, - 8.3288e-02, 8.9817e-01, 9.0972e-01, 5.1965e-01, - 3.8288e-01, 5.2763e-01, 8.3820e-01, 7.2633e-02, - 6.9137e-01, 5.7207e-01, 6.9966e-01, 5.5597e-01, - 2.3496e-02, 6.9673e-01, 8.5223e-01, 6.0371e-01, - 2.2841e-01, 8.8026e-01, 1.5762e-01, 8.5663e-01, - 1.0839e-02, 9.9699e-01, 6.3255e-01, 2.7027e-01, - 8.2796e-01, 4.2669e-01, 7.2641e-01, 2.0806e-01, - 9.5672e-01, 5.8838e-01, 2.1049e-01, 1.9808e-01, - 1.6176e-01, 6.5233e-01, 7.8844e-01, 5.8046e-01, - 1.3680e-01, 8.0943e-01, 1.9157e-01, 4.2986e-01, - 6.3052e-01, 7.1615e-01, 2.6878e-01, 7.8997e-01, - 6.7314e-01, 6.3196e-01, 7.9154e-01, 4.9725e-01, - 9.1566e-01, 2.2627e-01, 3.2786e-01, 8.1764e-01, - 4.7210e-01, 5.5515e-01, 6.0145e-01, 1.4357e-02, - 8.1100e-01, 3.8395e-01, 3.8342e-01, 7.6662e-01, - 4.3084e-01, 9.6058e-01, 8.3922e-02, 6.6857e-01, - 8.9712e-02, 9.1497e-01, 4.3132e-01, 6.1332e-01, - 4.9678e-01, 4.4038e-01, 3.0533e-01, 4.2072e-01, - 9.3282e-01, 4.7686e-01, 6.3890e-01, 3.7317e-01, - 4.8358e-01, 8.8682e-01, 5.6572e-01, 4.1761e-01, - 7.6652e-03, 6.7885e-01, 7.1170e-01, 6.6835e-01, - 4.8988e-01, 4.6220e-01, 9.8191e-01, 6.9759e-01, - 4.8281e-01, 5.4214e-01, 8.2354e-01, 7.7899e-01, - 3.5689e-01, 7.6049e-01, 1.0611e-01, 1.0681e-01, - 1.7387e-01, 1.4750e-01, 9.4994e-01, 3.1736e-01, - 8.2294e-01, 4.0109e-01, 3.5677e-01, 3.8062e-01, - 9.1137e-01, 5.2391e-01, 9.9066e-01, 9.3521e-01, - 5.9154e-01, 3.8119e-01, 4.4447e-01, 1.3827e-01, - 5.9704e-01, 4.1350e-01, 4.7667e-01, 1.3318e-01, - 9.4496e-01, 2.7844e-01, 8.9444e-01, 6.8493e-01, - 3.6281e-01, 6.2923e-01, 4.4846e-01, 4.9145e-01, - 4.0848e-01, 5.2789e-01, 6.8349e-01, 8.3098e-01, - 3.7655e-01, 9.5767e-01, 5.4283e-01, 5.6569e-01, - 6.9416e-01, 9.8685e-01, 4.5627e-01, 1.6481e-01, - 4.2808e-01, 2.0602e-02, 5.6870e-01, 2.5733e-01, - 7.8495e-01, 9.1720e-01, 5.7174e-01, 1.2028e-01, - 5.7304e-01, 2.8855e-01, 5.6472e-01, 1.2898e-01, - 6.2866e-01, 9.7365e-01, 5.5967e-02, 7.7710e-01, - 7.4851e-01, 2.5569e-01, 6.0528e-01, 3.3695e-01, - 5.6405e-01, 9.2411e-01, 2.6299e-01, 2.4215e-01, - 6.8728e-01, 4.4106e-01, 2.8442e-01, 6.6217e-01, - 8.7020e-01, 6.1069e-01, 6.2397e-01, 1.1008e-01, - 2.7446e-02, 4.9098e-01, 1.2525e-02, 1.2965e-01, - 3.4990e-01, 5.8782e-01, 3.1021e-01, 3.2855e-01, - 2.5425e-01, 8.5780e-01, 4.3205e-01, 4.0140e-01, - 9.4921e-01, 2.8895e-01, 1.3664e-01, 5.4369e-01, - 1.3138e-01, 3.8208e-01, 2.5228e-01, 1.2853e-01, - 8.8617e-01, 2.0333e-01, 3.5159e-01, 7.2213e-01, - 9.9955e-02, 2.7135e-01, 9.5831e-01, 3.8118e-01, - 9.1375e-01, 6.2030e-03, 8.3011e-01, 3.1173e-01, - 4.9222e-01, 2.2401e-01, 1.3662e-02, 3.7139e-01, - 8.0654e-01, 5.4896e-01, 4.1262e-01, 1.8721e-01, - 7.7430e-02, 8.0972e-02, 5.0091e-01, 4.6883e-01, - 1.7023e-01, 1.8393e-01, 9.5285e-01, 5.7148e-01, - 6.9912e-01, 3.9318e-01, 7.4717e-01, 5.2313e-01, - 4.9731e-02, 4.5290e-01, 3.7548e-01, 9.6320e-01, - 5.1588e-01, 3.8349e-01, 7.3780e-01, 2.3311e-01, - 4.0890e-01, 3.2506e-01, 5.5615e-02, 2.9904e-01, - 8.2626e-01, 9.3003e-01, 1.2220e-01, 9.4114e-02, - 7.0027e-01, 7.3137e-01, 4.4929e-01, 5.5520e-01, - 4.0184e-01, 5.4932e-02, 4.8883e-01, 3.6205e-01, - 6.5451e-01, 3.4122e-01, 5.2936e-01, 1.1556e-01, - 5.9763e-02, 2.6121e-01, 2.5086e-01, 7.1156e-01, - 4.2264e-01, 1.4160e-02, 5.6807e-01, 4.5961e-01, - 7.2037e-01, 1.8238e-01, 3.0287e-02, 5.9799e-01, - 2.2550e-01, 3.5091e-01, 2.5029e-01, 5.2054e-01, - 3.2325e-01, 1.5222e-01, 6.3257e-02, 1.2849e-01, - 4.8715e-01, 9.2972e-01, 8.2158e-01, 1.0913e-01, - 7.7983e-01, 4.0809e-01, 2.7056e-01, 5.4364e-01, - 7.1329e-01, 5.9023e-02, 2.3800e-02, 9.3218e-01, - 9.3150e-01, 3.6454e-01, 7.2436e-01, 6.1873e-01, - 5.3740e-01, 5.0529e-01, 8.6148e-02, 4.1804e-01, - 8.0293e-01, 5.1850e-01, 9.3091e-01, 9.8654e-02, - 1.0539e-01, 8.0444e-01, 2.1966e-01, 5.5457e-01, - 8.7033e-01, 3.6440e-01, 3.9487e-01, 6.9752e-01, - 7.2524e-02, 6.6343e-01, 9.7866e-01, 7.8611e-01, - 6.4857e-01, 7.0267e-01, 3.4511e-01, 9.9013e-02, - 6.2771e-01, 9.1286e-01, 5.3326e-01, 6.2374e-01, - 9.4684e-01, 8.8022e-02, 7.0509e-01, 9.1168e-01, - 3.7023e-01, 1.8339e-01, 9.1427e-01, 8.3401e-01, - 6.8658e-01, 6.1568e-01, 7.6375e-01, 5.4908e-01, - 8.3087e-01, 3.9498e-01, 5.7352e-01, 4.6879e-01, - 6.5833e-01, 6.5259e-01, 5.0829e-01, 2.0479e-01, - 2.9408e-01, 6.1872e-01, 9.5146e-01, 3.5305e-02, - 5.2991e-01, 9.4404e-01, 5.9917e-02, 9.2284e-01, - 3.5918e-01, 8.6231e-03, 8.7401e-01, 9.3098e-01, - 3.0548e-01, 7.0431e-01, 8.9211e-01, 2.4648e-01, - 4.2458e-01, 9.1205e-01, 1.2130e-01, 8.7176e-01, - 9.0386e-01, 5.2306e-01, 6.3604e-01, 6.1930e-01, - 8.2531e-01, 1.0289e-01, 4.5119e-01, 6.7952e-01, - 8.9554e-01, 8.3142e-01, 8.2380e-01, 4.1571e-01, - 8.2644e-01, 1.2717e-02, 2.6499e-01, 8.0200e-01, - 2.4798e-01, 2.8711e-01, 4.5776e-01, 4.3801e-01, - 3.1192e-03, 6.9631e-01, 5.3407e-02, 1.6257e-01, - 6.9635e-01, 6.6998e-01, 8.4720e-01, 3.3243e-01, - 7.2922e-01, 3.6090e-01, 2.2898e-01, 2.9594e-01, - 6.8091e-01, 2.1217e-01, 9.8868e-01, 1.4633e-01, - 8.4921e-01, 7.7243e-02, 5.9675e-01, 4.8946e-01, - 9.6285e-01, 3.6923e-01, 8.4080e-01, 6.3500e-02, - 2.9872e-02, 6.1151e-01, 7.2200e-02, 8.0859e-01, - 3.4433e-01, 1.2536e-01, 2.1054e-01, 9.5582e-01, - 2.6041e-01, 7.7947e-02, 1.9011e-01, 2.2693e-01, - 2.3351e-01, 7.6995e-01, 8.3425e-01, 1.6202e-01, - 8.3173e-01, 8.7492e-01, 7.3985e-01, 8.2109e-01, - 8.6781e-02, 2.7681e-01, 8.2058e-01, 7.1643e-01, - 9.4730e-01, 5.0349e-01, 7.6573e-01, 3.0347e-01, - 5.4480e-02, 2.5138e-01, 7.5252e-01, 7.2319e-01, - 9.7687e-01, 7.8910e-01, 5.9541e-01, 1.8523e-01, - 3.1381e-01, 9.8509e-01, 7.2709e-01, 8.7484e-01, - 9.6507e-01, 2.3023e-01, 6.4537e-01, 6.1581e-01, - 2.8341e-01, 5.7248e-01, 5.0137e-01, 4.1691e-01, - 6.3468e-01, 3.1422e-01, 2.5091e-01, 6.7273e-01, - 4.4779e-01, 8.2945e-01, 9.3750e-01, 5.1588e-01, - 2.0285e-01, 3.6308e-01, 3.5611e-01, 5.4065e-01, - 4.8746e-01, 6.5741e-02, 8.7308e-01, 7.8992e-01, - 8.8977e-01, 2.5722e-01, 2.9273e-01, 5.9995e-01, - 7.0971e-01, 1.9099e-02, 1.0801e-02, 1.6717e-01, - 7.4491e-01, 2.7112e-01, 4.9981e-02, 4.7593e-01, - 1.5556e-01, 7.8769e-01, 5.8034e-01, 8.9890e-01, - 7.9945e-01, 2.8180e-01, 5.1082e-01, 6.1444e-01, - 3.7184e-01, 4.0382e-03, 2.1606e-02, 4.3808e-01, - 5.0513e-01, 6.3717e-01, 9.3086e-01, 7.8150e-01, - 7.9119e-01, 9.9653e-01, 2.2193e-01, 5.7006e-01, - 3.8318e-01, 3.0525e-01, 3.7836e-01, 1.0429e-01, - 5.6076e-01, 9.9794e-01, 1.1212e-01, 5.4908e-01, - 2.6159e-01, 4.7710e-01, 9.0573e-01, 5.5417e-01, - 3.3196e-01, 8.1578e-02, 3.7247e-01, 4.0187e-01, - 5.5198e-01, 4.4530e-01, 9.6819e-02, 2.8385e-01, - 8.7880e-01, 1.2411e-01, 7.6113e-01, 1.6076e-01, - 6.1742e-01, 8.2361e-01, 7.5982e-01, 9.4500e-02, - 8.8119e-01, 4.6913e-02, 1.6702e-01, 1.5461e-02, - 4.3414e-01, 5.2958e-01, 1.7767e-01, 8.4862e-01, - 6.8289e-02, 4.2057e-01, 1.4544e-01, 6.3535e-01, - 4.5003e-01, 9.0562e-01, 5.2552e-01, 3.8356e-02, - 1.7539e-01, 9.2623e-01, 1.1373e-02, 9.3857e-01, - 1.1486e-01, 1.8618e-01, 5.3666e-01, 7.4373e-01, - 8.2920e-01, 7.2840e-01, 3.2981e-01, 8.6552e-01, - 2.8637e-01, 4.2375e-02, 3.1103e-01, 4.4349e-01, - 6.1835e-01, 9.1394e-01, 1.7645e-01, 1.6782e-01, - 2.1151e-01, 1.5871e-01, 1.0135e-01, 2.3550e-01, - 4.2094e-01, 7.0734e-01, 6.7606e-01, 8.2297e-01, - 2.8533e-01, 8.8901e-01, 2.8296e-01, 8.0720e-01, - 6.8096e-01, 5.3266e-01, 4.2825e-01, 5.4890e-01, - 7.7546e-01, 3.2703e-01, 2.1498e-02, 3.2856e-01, - 1.8357e-01, 7.1752e-01, 7.0180e-01, 6.4785e-01, - 3.1735e-01, 1.2403e-01, 5.3043e-01, 6.6860e-01, - 8.2441e-01, 9.8910e-01, 9.9001e-01, 5.2653e-01, - 8.2241e-01, 1.7261e-01, 5.2411e-01, 4.5643e-01, - 5.9400e-01, 5.3555e-01, 1.3555e-01, 4.4974e-01, - 3.5391e-02, 3.2882e-02, 2.3493e-01, 4.9364e-01, - 5.0618e-01, 9.3427e-01, 8.4335e-03, 6.7023e-01, - 7.1920e-01, 3.4899e-01, 8.9985e-01, 2.4044e-01, - 1.2979e-01, 2.7126e-01, 3.1386e-01, 7.7817e-02, - 6.6896e-01, 9.4509e-01, 1.4801e-01, 6.1207e-01]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 999, 1000, 1000]), + col_indices=tensor([6994, 9622, 3726, 4663, 3213, 8774, 6907, 1710, 9507, + 9063, 1177, 307, 2649, 3020, 5730, 348, 5131, 6359, + 8504, 4801, 4568, 6837, 9016, 8482, 4653, 2975, 3080, + 4702, 1162, 2867, 9079, 2978, 6909, 7908, 6001, 669, + 6371, 3485, 6275, 4253, 4720, 5707, 6535, 1953, 4011, + 2661, 8932, 9981, 7530, 1720, 6368, 6848, 4567, 8841, + 2627, 1161, 5134, 5413, 7960, 4391, 665, 2396, 6608, + 5733, 3484, 4971, 6121, 2312, 3666, 4281, 9424, 1980, + 9643, 9373, 7415, 1900, 9019, 8546, 7553, 8417, 4373, + 8130, 306, 3044, 8544, 6070, 624, 7124, 3983, 3400, + 4627, 4287, 7616, 3298, 7343, 5990, 9821, 2523, 7048, + 5830, 1403, 6233, 8985, 3175, 9364, 5853, 4072, 6366, + 4753, 1727, 9435, 1851, 44, 3040, 1326, 7076, 9651, + 4066, 4865, 6453, 1996, 8040, 329, 9927, 9673, 8524, + 1244, 8553, 442, 5027, 3135, 8060, 9329, 7526, 9796, + 5659, 4377, 3292, 2834, 6136, 3061, 7456, 161, 5999, + 3353, 8111, 5290, 5483, 5811, 5230, 1150, 3704, 746, + 3442, 1238, 6471, 9072, 1057, 739, 7167, 4646, 8968, + 2988, 7699, 3204, 5157, 9948, 7623, 3878, 8967, 4047, + 5067, 7682, 6449, 902, 8157, 5855, 2999, 277, 1240, + 5282, 8712, 210, 3306, 9191, 7693, 9401, 5920, 9695, + 4565, 3107, 3028, 8501, 4399, 270, 1536, 2767, 5311, + 4430, 3474, 7404, 5813, 4610, 264, 84, 3863, 8109, + 628, 315, 7160, 1931, 6079, 150, 1187, 9113, 9881, + 3157, 2458, 7923, 8536, 6368, 1416, 2818, 8122, 3131, + 6956, 9736, 5972, 2516, 3886, 1336, 3782, 3144, 145, + 6154, 3374, 5726, 2019, 5176, 3029, 1833, 512, 805, + 7854, 2374, 6926, 5039, 7581, 1084, 6788, 3567, 9085, + 5115, 8344, 1282, 694, 1416, 4928, 6205, 5066, 6172, + 4951, 2342, 9823, 4409, 453, 2737, 5221, 1182, 1349, + 6104, 1153, 4341, 155, 1744, 6045, 3402, 4537, 6425, + 7377, 9353, 2622, 6364, 5568, 8412, 5977, 8154, 5352, + 1014, 1489, 2546, 9921, 4972, 1344, 4570, 7259, 5658, + 9551, 1403, 4427, 7144, 9461, 8515, 8419, 8672, 8531, + 7797, 6642, 675, 2679, 7615, 8705, 7384, 8267, 6149, + 4211, 8074, 1628, 358, 1640, 228, 5022, 8976, 3921, + 9469, 3778, 3398, 8456, 5802, 1958, 2520, 2489, 1881, + 8614, 3680, 4323, 377, 8462, 5871, 306, 3186, 5351, + 7352, 2219, 9782, 6546, 2214, 3533, 4538, 9746, 3606, + 1981, 5289, 895, 4170, 5822, 348, 6276, 6640, 9900, + 1987, 7527, 3484, 1950, 3683, 214, 8776, 6746, 5717, + 9072, 9171, 1569, 7598, 3877, 3876, 2138, 2024, 1416, + 7752, 4390, 80, 5006, 5274, 9896, 1501, 9077, 5280, + 9121, 9705, 3846, 4311, 7818, 2418, 5744, 70, 755, + 8481, 65, 9496, 8688, 8913, 4086, 4497, 9328, 649, + 7040, 1646, 7888, 6578, 8377, 9319, 9462, 8922, 351, + 4888, 4474, 5081, 3249, 9700, 1729, 5652, 3555, 8733, + 3993, 2648, 2157, 3316, 7194, 4864, 2759, 5666, 9313, + 646, 1058, 9286, 107, 8347, 4296, 5597, 1128, 2987, + 8269, 5363, 8830, 1937, 8083, 6124, 9818, 4872, 1172, + 1916, 9823, 8685, 968, 7144, 4173, 2638, 7667, 2255, + 5893, 145, 2804, 9830, 3730, 2378, 5965, 8443, 924, + 7520, 4001, 8030, 3655, 8642, 637, 4875, 9776, 3422, + 3864, 669, 3034, 7460, 9790, 914, 5157, 7812, 6653, + 5620, 1888, 8250, 4575, 2821, 7519, 5789, 7492, 3910, + 9516, 2254, 109, 3644, 4900, 8347, 1943, 4680, 1973, + 8435, 1113, 456, 1927, 7879, 8357, 9307, 9711, 2787, + 3889, 7117, 5810, 4599, 3798, 3588, 9982, 2090, 9372, + 5907, 2625, 4618, 1365, 7855, 3966, 6642, 2806, 3282, + 1538, 1603, 9281, 6130, 9122, 5424, 4039, 1685, 7585, + 463, 1054, 377, 4412, 670, 3953, 829, 8297, 5284, + 8072, 8388, 5978, 2905, 5528, 8606, 9760, 3824, 3381, + 3569, 8800, 1093, 8598, 5326, 5098, 3325, 3476, 3574, + 5831, 6739, 3709, 3431, 8854, 5412, 1395, 8493, 8413, + 723, 7482, 7437, 8192, 4561, 3563, 7930, 3095, 1395, + 1702, 4680, 4164, 7646, 3073, 2135, 8064, 9233, 8734, + 8891, 1412, 4297, 327, 9444, 6261, 4124, 7080, 3150, + 499, 7634, 5504, 2709, 2323, 6259, 7701, 1590, 782, + 9214, 1434, 7842, 7154, 3406, 6314, 2490, 1995, 2869, + 9300, 7244, 2058, 4782, 9191, 7843, 9366, 3161, 5867, + 7670, 79, 8654, 4679, 6862, 6457, 3396, 3040, 8585, + 3782, 1459, 9368, 4901, 2433, 6705, 8973, 6929, 2667, + 7169, 1587, 5204, 8532, 6578, 9598, 8213, 9081, 6214, + 7437, 7012, 2856, 7632, 7138, 445, 7701, 4052, 6739, + 3480, 3311, 3082, 7805, 6404, 5363, 2870, 9509, 6167, + 7565, 8554, 8204, 832, 9129, 7469, 2782, 5167, 5838, + 3078, 7487, 3680, 6742, 2359, 4016, 7521, 9001, 6405, + 684, 4253, 806, 2076, 1259, 8434, 8503, 8586, 7923, + 4800, 1701, 4338, 6559, 6339, 6352, 1277, 9721, 8278, + 3394, 1222, 9331, 2813, 1759, 1393, 329, 3229, 5728, + 8721, 5742, 6894, 4045, 4303, 9790, 1095, 8299, 5200, + 5067, 4411, 7646, 150, 1925, 981, 1650, 729, 4988, + 39, 6418, 4585, 6285, 3017, 2737, 9097, 1945, 3287, + 5687, 1268, 2568, 5063, 1464, 1353, 8458, 2802, 7424, + 7900, 573, 4279, 1282, 9304, 8048, 5196, 4207, 1316, + 2021, 4121, 7278, 4538, 1620, 3410, 1337, 26, 1524, + 7522, 8145, 4092, 6346, 2473, 5819, 1406, 7284, 7069, + 8477, 7361, 3903, 7212, 8344, 4533, 1123, 5383, 6628, + 5346, 4870, 8614, 6605, 5113, 8241, 2239, 2169, 4658, + 3492, 2436, 6670, 5784, 3502, 9896, 2954, 1899, 1155, + 4148, 2121, 2395, 1045, 4965, 9274, 9110, 6156, 801, + 2656, 5820, 6227, 6325, 135, 209, 4673, 9324, 9454, + 260, 8365, 2431, 2800, 6755, 2430, 7387, 1961, 7463, + 9402, 3000, 2768, 757, 5088, 1602, 9085, 3251, 3315, + 4349, 8652, 5989, 3177, 1166, 3583, 3161, 7236, 4641, + 1623, 3315, 5326, 8408, 4234, 2598, 7670, 1383, 3904, + 372, 9975, 9339, 5246, 7358, 228, 845, 6253, 9078, + 8969, 941, 5589, 812, 3206, 9576, 7426, 9564, 250, + 5639, 2420, 4119, 3196, 3078, 9183, 2554, 867, 5100, + 1465, 9987, 2915, 5151, 9058, 273, 5338, 4182, 802, + 5524, 4895, 3055, 3478, 4538, 8413, 1383, 9768, 9291, + 7392, 8171, 2516, 6216, 7200, 1324, 9549, 2831, 6274, + 7046, 7793, 8156, 8922, 1536, 4903, 8438, 7980, 8795, + 3692, 1865, 2467, 2873, 1023, 8165, 4606, 9922, 7937, + 1789, 1830, 8242, 6979, 7067, 360, 8686, 4259, 417, + 1071, 8219, 513, 1913, 750, 2857, 4490, 9807, 4091, + 2519, 3644, 392, 5240, 341, 5422, 1252, 3012, 4481, + 2472, 8199, 5103, 2599, 3525, 5909, 78, 2754, 5345, + 3013, 6272, 5946, 3907, 4155, 8832, 5606, 7682, 1172, + 192, 7276, 8882, 698, 7468, 1013, 2117, 3917, 4641, + 4529, 2993, 5, 3925, 8235, 5401, 1760, 7427, 3417, + 1077]), + values=tensor([4.8059e-01, 2.6068e-01, 3.7083e-01, 5.4455e-01, + 5.5087e-01, 9.1571e-01, 3.0550e-01, 8.8604e-01, + 5.5442e-01, 3.9848e-01, 4.1127e-01, 2.3695e-01, + 8.8779e-01, 9.7368e-01, 3.7378e-01, 3.5178e-01, + 1.3697e-01, 7.5506e-01, 4.8064e-01, 3.7498e-03, + 7.9736e-01, 8.1659e-01, 6.1290e-01, 4.5901e-01, + 8.9740e-01, 2.9042e-01, 4.4829e-01, 7.0372e-01, + 3.3279e-01, 7.6293e-01, 6.6927e-02, 4.8552e-01, + 9.9505e-01, 7.0259e-02, 3.6985e-01, 1.1795e-01, + 6.8194e-01, 3.0421e-01, 4.1789e-01, 7.9479e-01, + 8.3575e-01, 6.1092e-01, 5.0699e-02, 2.0850e-01, + 2.8512e-01, 2.1458e-01, 1.9815e-01, 1.8111e-01, + 2.1230e-01, 8.7346e-01, 9.3908e-01, 5.0252e-02, + 5.9668e-01, 1.4455e-01, 1.3858e-01, 6.3809e-02, + 4.3928e-01, 3.1486e-02, 2.7234e-02, 7.4906e-02, + 3.5749e-01, 5.7851e-01, 5.2036e-01, 2.5703e-01, + 6.0030e-01, 1.7537e-01, 5.9778e-01, 1.5196e-01, + 3.6708e-01, 9.8464e-01, 5.9912e-01, 2.2195e-01, + 4.6650e-01, 9.6754e-01, 9.5639e-01, 1.5470e-01, + 1.5256e-01, 3.0298e-01, 2.4096e-02, 5.9170e-01, + 1.9686e-01, 1.1759e-01, 1.0457e-01, 5.9024e-01, + 7.4080e-01, 4.1405e-01, 1.2789e-01, 3.8225e-02, + 3.8505e-02, 4.8747e-01, 6.4712e-01, 5.4537e-01, + 7.2839e-01, 6.5434e-01, 4.7429e-01, 2.8609e-02, + 6.9941e-01, 5.0746e-01, 2.0928e-01, 4.3227e-01, + 5.0692e-01, 4.3792e-01, 4.9812e-01, 6.6989e-01, + 9.6967e-01, 9.7297e-01, 1.8827e-01, 7.4347e-01, + 1.5631e-01, 5.8115e-01, 8.2606e-01, 6.1017e-01, + 4.5324e-01, 6.1732e-01, 4.3508e-01, 2.7436e-01, + 8.6329e-01, 6.4097e-01, 1.0318e-01, 4.0200e-01, + 8.6444e-01, 4.3614e-01, 5.2110e-01, 7.7002e-01, + 7.8091e-01, 2.6182e-01, 9.3631e-01, 6.5569e-01, + 7.5455e-01, 1.1819e-01, 8.7432e-02, 8.5494e-01, + 7.9642e-01, 2.4008e-01, 9.6818e-01, 3.1184e-01, + 5.5783e-02, 1.7763e-01, 3.4710e-01, 3.1949e-01, + 1.1640e-01, 2.3357e-01, 7.4482e-01, 6.5708e-01, + 7.8686e-01, 5.7169e-01, 1.9176e-01, 7.4776e-01, + 8.8674e-01, 1.1710e-01, 8.7365e-02, 5.8070e-01, + 4.5757e-01, 2.3275e-01, 1.4821e-02, 5.4216e-01, + 4.2147e-01, 5.6869e-01, 6.9741e-01, 9.5911e-01, + 4.6716e-01, 7.6178e-01, 3.4794e-02, 9.5093e-01, + 5.1131e-01, 5.6037e-01, 7.2508e-01, 1.7181e-01, + 5.9834e-01, 2.8725e-01, 1.4470e-01, 8.5043e-01, + 3.4409e-01, 2.9904e-01, 2.4931e-01, 5.5706e-01, + 1.5667e-01, 2.9028e-01, 4.6574e-01, 7.5432e-01, + 1.1361e-01, 4.5574e-01, 1.8736e-01, 6.5302e-03, + 8.6205e-01, 8.7909e-01, 7.1674e-01, 7.1278e-01, + 9.9996e-01, 6.9678e-01, 9.3855e-01, 2.5683e-01, + 5.9314e-01, 2.0634e-01, 1.9915e-01, 1.7218e-01, + 3.0449e-01, 3.7677e-02, 9.2688e-01, 2.1308e-01, + 1.5730e-01, 9.2634e-01, 8.2956e-02, 6.1692e-01, + 5.1919e-01, 3.2704e-01, 4.0703e-01, 5.0143e-01, + 6.7723e-01, 8.1753e-01, 2.6635e-01, 1.3191e-02, + 8.9840e-01, 9.9355e-01, 8.7904e-01, 7.1659e-01, + 2.9610e-01, 4.5606e-01, 5.9045e-01, 8.9510e-02, + 8.4238e-01, 2.1662e-01, 7.1281e-01, 4.2497e-01, + 4.5220e-01, 6.1156e-02, 5.2940e-01, 5.8593e-01, + 2.5460e-01, 9.4797e-01, 6.6812e-01, 3.2547e-01, + 7.3813e-01, 7.9380e-01, 3.1305e-01, 5.2360e-01, + 5.7839e-01, 3.4606e-01, 7.0736e-01, 6.4167e-01, + 1.1426e-01, 7.1442e-01, 9.5551e-01, 3.4859e-01, + 6.8092e-04, 6.6756e-01, 2.2472e-01, 3.6603e-02, + 6.4753e-01, 4.6649e-01, 4.2337e-01, 5.1645e-02, + 6.0180e-02, 8.3215e-01, 2.8754e-02, 4.1647e-02, + 2.4904e-01, 4.0011e-01, 5.8432e-01, 5.0926e-01, + 8.2460e-01, 2.7598e-01, 4.5140e-01, 1.8915e-01, + 1.0612e-01, 4.0020e-01, 9.0858e-02, 4.2965e-01, + 3.1573e-01, 1.0305e-02, 9.4588e-01, 8.5378e-01, + 4.7280e-01, 9.4753e-01, 5.0855e-01, 6.0263e-01, + 5.3343e-02, 8.1332e-01, 1.6241e-01, 8.2746e-01, + 2.4580e-01, 6.0980e-02, 3.6788e-01, 9.0945e-02, + 5.2880e-01, 5.8751e-01, 3.6803e-01, 9.8383e-01, + 8.0574e-01, 2.9759e-01, 5.9896e-01, 5.5146e-01, + 9.8346e-02, 7.5541e-01, 9.0311e-01, 7.1468e-02, + 2.2938e-01, 7.1939e-01, 6.3102e-01, 9.3064e-01, + 9.9349e-01, 9.3896e-01, 2.8836e-01, 7.8170e-01, + 4.6400e-01, 1.4346e-01, 7.8384e-02, 5.9983e-01, + 6.5895e-01, 4.6100e-01, 4.6900e-01, 2.1629e-01, + 9.6051e-01, 6.5077e-01, 5.2054e-01, 4.7614e-01, + 7.6397e-02, 5.1015e-01, 7.9634e-01, 4.6907e-01, + 9.6766e-01, 8.1380e-01, 7.4731e-01, 9.8854e-01, + 3.5440e-02, 9.6596e-01, 5.3848e-01, 2.9996e-01, + 4.0293e-02, 6.8423e-01, 6.1380e-01, 3.3767e-01, + 9.1466e-01, 3.8488e-01, 5.0871e-01, 9.9911e-01, + 5.5936e-02, 5.3818e-01, 8.4011e-01, 4.9164e-01, + 9.0611e-01, 2.9127e-01, 7.7511e-02, 6.8092e-01, + 4.4948e-01, 8.0411e-01, 8.0856e-01, 1.7749e-01, + 6.0065e-01, 1.2962e-01, 5.3925e-01, 8.6809e-01, + 3.1247e-01, 5.4289e-01, 5.0400e-01, 8.6523e-01, + 6.1521e-01, 1.8618e-01, 6.4956e-01, 6.7938e-01, + 4.8913e-03, 8.8041e-01, 2.2916e-01, 2.9907e-01, + 2.4663e-01, 7.4980e-01, 7.8897e-01, 2.2230e-02, + 4.0795e-01, 6.5666e-02, 5.0611e-03, 8.6068e-01, + 9.0253e-01, 3.9248e-02, 9.1534e-01, 7.2456e-01, + 6.7679e-01, 3.8350e-01, 3.6286e-01, 8.5937e-01, + 1.8495e-01, 3.0385e-01, 5.7674e-01, 5.5443e-01, + 6.7577e-01, 3.2599e-01, 6.4667e-01, 7.7571e-01, + 5.4077e-01, 3.4173e-02, 6.6062e-01, 5.5497e-01, + 3.2429e-01, 2.3905e-01, 9.2743e-01, 8.2636e-01, + 6.2779e-01, 2.0618e-01, 6.5440e-01, 4.8835e-02, + 6.1570e-02, 7.9004e-01, 8.5253e-01, 6.4570e-01, + 4.3657e-01, 5.6724e-01, 6.6125e-01, 2.1790e-01, + 9.0854e-01, 1.0131e-02, 8.3074e-01, 8.0931e-02, + 1.2273e-01, 9.1632e-01, 7.5735e-01, 6.1709e-01, + 4.5239e-01, 7.6859e-01, 4.5898e-01, 7.7444e-01, + 2.2262e-01, 4.2228e-01, 6.4247e-04, 6.8685e-01, + 4.5280e-01, 9.8831e-01, 6.3551e-01, 2.4349e-01, + 6.3998e-01, 2.4863e-01, 7.4978e-01, 9.6525e-02, + 8.5432e-01, 2.8565e-01, 8.6400e-03, 6.9887e-01, + 9.8123e-01, 5.5568e-01, 6.5677e-02, 2.1300e-01, + 3.8626e-01, 1.7121e-01, 6.2299e-01, 8.5313e-01, + 9.7331e-01, 4.4678e-01, 6.2567e-01, 3.4985e-01, + 3.7997e-02, 1.7816e-01, 4.6951e-01, 2.5928e-01, + 8.1800e-01, 9.8585e-01, 9.9241e-01, 9.4591e-01, + 6.4886e-01, 6.6587e-01, 3.0200e-01, 1.9709e-01, + 6.9655e-01, 7.7186e-01, 4.6370e-01, 2.0512e-01, + 2.9187e-01, 6.5662e-01, 1.9881e-01, 1.8878e-01, + 8.3759e-01, 8.1497e-01, 3.8474e-01, 9.3922e-01, + 1.1980e-01, 7.9274e-01, 2.4139e-03, 4.0606e-01, + 9.4741e-01, 3.5579e-01, 7.3689e-01, 7.4013e-01, + 1.6341e-01, 4.0976e-01, 1.2106e-02, 6.9624e-01, + 3.4058e-01, 9.2252e-01, 5.7347e-01, 1.7341e-01, + 4.7453e-01, 5.3293e-01, 7.2677e-01, 7.6843e-01, + 5.5980e-01, 9.1006e-01, 6.5609e-01, 2.0080e-01, + 7.6444e-01, 7.5927e-01, 3.7025e-01, 2.2248e-01, + 2.7530e-01, 7.3119e-01, 4.2414e-01, 2.5233e-01, + 7.8818e-01, 2.3316e-01, 4.3970e-01, 9.2583e-02, + 1.2232e-01, 5.7076e-01, 5.1912e-01, 9.8308e-01, + 7.4081e-01, 1.3379e-01, 9.3796e-01, 9.1761e-01, + 5.4932e-01, 5.4914e-01, 6.8584e-01, 6.1119e-01, + 2.9282e-01, 8.8707e-01, 9.8210e-02, 7.5203e-01, + 5.7105e-02, 8.3390e-01, 4.3245e-01, 6.0756e-01, + 9.5487e-01, 1.4073e-01, 9.3377e-01, 5.3720e-01, + 2.9851e-01, 2.3323e-01, 5.7038e-01, 1.7177e-01, + 4.2263e-01, 3.6571e-01, 2.4465e-01, 2.3924e-01, + 7.4506e-01, 3.7908e-01, 1.0900e-01, 3.7748e-01, + 5.8194e-01, 1.9196e-01, 9.6828e-01, 6.5906e-01, + 5.4583e-01, 6.8981e-01, 3.3761e-01, 3.0262e-01, + 1.4093e-01, 8.9661e-01, 8.8664e-01, 3.6191e-01, + 7.2603e-01, 1.8928e-01, 1.5570e-01, 4.7355e-01, + 5.0235e-01, 7.0181e-01, 1.5485e-01, 6.3084e-01, + 6.8009e-01, 1.9700e-01, 5.5915e-01, 7.1118e-01, + 8.4480e-01, 8.8653e-01, 9.3805e-02, 8.1394e-01, + 7.6043e-01, 2.2537e-01, 3.5050e-01, 9.4824e-01, + 6.4804e-02, 8.9447e-02, 2.5136e-01, 8.4746e-01, + 4.4124e-01, 5.3570e-01, 7.2660e-01, 9.9153e-01, + 5.5409e-01, 5.7607e-01, 6.9001e-01, 6.8634e-01, + 6.5678e-01, 8.7622e-01, 6.9088e-01, 4.1804e-01, + 4.2696e-01, 7.1106e-01, 7.7869e-01, 9.0423e-01, + 5.0743e-01, 6.4079e-01, 3.9395e-01, 8.6100e-02, + 7.4019e-01, 4.5245e-01, 3.2098e-01, 7.4052e-01, + 5.5819e-03, 9.6355e-01, 4.3721e-01, 6.7750e-01, + 6.2113e-01, 4.9398e-01, 1.7439e-01, 1.9619e-01, + 6.3345e-01, 7.7621e-02, 5.8501e-01, 3.1614e-01, + 9.1062e-01, 2.9312e-01, 6.2033e-01, 7.4267e-01, + 8.8925e-01, 7.8148e-01, 6.9515e-01, 5.1877e-01, + 6.7792e-01, 8.1468e-01, 9.2343e-01, 9.2067e-01, + 5.8445e-01, 7.1713e-01, 7.6645e-01, 9.5688e-01, + 2.2005e-01, 5.8916e-02, 6.8496e-01, 2.7123e-01, + 5.3625e-01, 2.2256e-01, 1.1104e-01, 4.1722e-03, + 7.7575e-01, 3.4159e-01, 6.4283e-01, 9.6679e-01, + 1.7163e-01, 2.4263e-01, 6.1465e-01, 5.1415e-01, + 8.7361e-01, 6.0521e-01, 7.6585e-01, 3.6699e-01, + 4.6723e-01, 7.0614e-01, 5.9241e-01, 5.2480e-01, + 2.3362e-01, 2.8520e-01, 2.4362e-01, 1.8558e-01, + 7.9483e-01, 6.5965e-01, 4.1345e-01, 1.2104e-01, + 7.5543e-01, 9.2465e-01, 4.8631e-01, 1.8207e-01, + 7.4888e-01, 3.0846e-01, 9.4452e-01, 8.2471e-01, + 2.2609e-01, 8.6210e-01, 1.5446e-01, 5.2700e-01, + 1.4129e-01, 6.8153e-02, 7.1222e-01, 5.4379e-02, + 8.2369e-01, 6.7305e-02, 6.9326e-01, 9.5006e-01, + 7.8894e-01, 5.0686e-01, 1.0844e-02, 8.8476e-01, + 3.4081e-01, 7.1386e-01, 5.8332e-01, 8.0538e-02, + 1.2881e-01, 6.6073e-01, 6.6982e-01, 9.4311e-01, + 5.7920e-01, 3.4038e-01, 5.1929e-01, 6.6344e-01, + 3.6283e-01, 8.4071e-01, 8.3032e-02, 3.7224e-01, + 2.8825e-01, 2.1346e-01, 6.6572e-01, 9.1219e-01, + 7.5086e-01, 5.9660e-01, 6.4939e-01, 8.3636e-02, + 2.1276e-01, 8.1840e-01, 9.1241e-01, 4.1267e-01, + 1.2384e-01, 3.2306e-01, 1.8222e-01, 1.6973e-01, + 2.4016e-01, 5.0092e-02, 6.0148e-01, 2.9363e-01, + 7.7028e-01, 6.2133e-01, 4.0709e-01, 4.9819e-01, + 1.9079e-01, 8.4728e-01, 5.7285e-01, 4.6834e-01, + 2.6392e-01, 8.2958e-01, 9.0930e-01, 7.7450e-01, + 4.9029e-01, 6.0270e-02, 9.5693e-01, 2.5910e-01, + 4.3595e-01, 3.2220e-01, 7.7687e-02, 8.6974e-01, + 4.2140e-01, 9.3963e-01, 1.5046e-02, 1.9120e-01, + 6.3065e-01, 5.0110e-02, 8.6206e-01, 1.5593e-01, + 3.8009e-01, 9.0525e-01, 3.1360e-01, 2.8014e-01, + 8.6922e-01, 8.0054e-02, 1.5774e-01, 7.8264e-01, + 5.5830e-01, 8.2156e-01, 5.4363e-02, 6.1422e-01, + 6.4211e-01, 1.8056e-01, 4.8874e-01, 2.7600e-01, + 7.3967e-01, 6.3099e-01, 9.2851e-01, 2.9826e-01, + 7.2043e-02, 1.9501e-01, 2.0723e-01, 4.8988e-01, + 1.1743e-01, 6.0389e-01, 2.4019e-01, 5.3080e-01, + 2.4116e-01, 6.4832e-01, 1.3206e-01, 5.7881e-01, + 2.4751e-01, 2.5004e-01, 1.0644e-01, 6.4024e-02, + 8.1402e-01, 4.5228e-01, 6.7548e-01, 9.3771e-01, + 2.0818e-01, 4.7722e-01, 2.0221e-01, 6.3364e-01, + 2.8929e-01, 5.9029e-01, 4.2652e-01, 9.1620e-01, + 2.2454e-01, 6.7134e-01, 7.9748e-02, 3.2710e-01, + 8.1974e-01, 1.5549e-02, 4.9868e-01, 5.7492e-01, + 7.2008e-02, 7.9730e-01, 1.5602e-01, 2.0940e-01, + 5.3816e-01, 1.9379e-01, 2.7025e-02, 4.9766e-01, + 6.0189e-01, 3.2022e-01, 8.5769e-01, 6.5720e-01, + 3.0809e-01, 1.1742e-01, 9.2720e-01, 1.9132e-01, + 3.9789e-01, 3.7697e-02, 6.0352e-01, 3.9733e-01, + 4.4928e-01, 1.5419e-01, 9.8145e-01, 4.1892e-01, + 7.0064e-01, 3.8871e-01, 6.2793e-01, 7.7207e-01, + 4.4089e-01, 3.6800e-01, 4.9602e-01, 7.8104e-01, + 1.0657e-02, 8.0591e-01, 9.9727e-01, 1.3379e-01, + 5.4798e-01, 7.6658e-01, 6.3649e-01, 2.8939e-01, + 8.1990e-01, 7.9348e-01, 8.2543e-01, 6.7492e-01, + 6.7563e-02, 9.7314e-01, 2.2021e-01, 2.0069e-01, + 8.7506e-01, 7.2409e-01, 3.6865e-01, 1.9488e-01, + 7.4566e-01, 9.7425e-01, 9.4802e-01, 3.3832e-01, + 8.4433e-01, 4.1426e-02, 2.6255e-01, 1.7428e-01, + 1.8207e-01, 1.8488e-01, 8.2856e-03, 1.1348e-01, + 9.2197e-01, 9.2179e-01, 6.1482e-01, 1.7686e-01, + 6.5075e-02, 1.5011e-01, 7.1724e-01, 1.9623e-01, + 7.7691e-01, 8.8594e-01, 5.6303e-01, 7.3530e-01, + 9.8857e-01, 7.4483e-01, 8.5336e-01, 7.1468e-01, + 3.4586e-01, 6.2753e-01, 3.4994e-01, 9.4314e-01, + 5.9523e-01, 2.4831e-01, 6.7030e-01, 7.2279e-01, + 7.4167e-01, 3.7774e-01, 9.7774e-01, 2.7848e-01, + 5.9253e-01, 2.2646e-01, 7.3907e-02, 1.8840e-01, + 1.6017e-01, 9.0665e-01, 7.6825e-01, 1.7432e-01, + 5.2899e-02, 4.0457e-01, 9.7040e-01, 9.0688e-02, + 5.8056e-01, 2.6439e-01, 8.2605e-01, 9.1309e-01, + 6.8101e-01, 8.0757e-01, 6.2304e-01, 9.9518e-01, + 4.7896e-01, 3.1904e-01, 7.0082e-01, 4.5077e-01, + 1.7951e-01, 3.2747e-02, 1.3088e-01, 9.1100e-01, + 2.8097e-01, 7.7241e-01, 2.7702e-01, 8.9268e-01, + 7.4964e-02, 4.5346e-01, 3.5535e-01, 7.4730e-01, + 1.0795e-01, 9.2068e-01, 2.5310e-01, 9.3425e-01, + 8.6243e-03, 3.4566e-01, 1.2041e-01, 8.5141e-02, + 6.8159e-01, 6.3087e-01, 4.5251e-02, 3.5864e-01, + 2.6782e-01, 7.8826e-02, 8.7299e-01, 6.6518e-01, + 7.0044e-01, 1.8992e-01, 8.0560e-01, 7.7462e-01, + 3.8679e-01, 6.1189e-01, 1.5892e-01, 6.3252e-01, + 6.9613e-01, 3.5041e-01, 8.1632e-01, 4.0743e-01, + 7.7173e-01, 4.3097e-01, 4.1902e-01, 7.2770e-01, + 8.4816e-01, 2.2977e-01, 7.1589e-01, 9.0275e-01, + 9.9999e-01, 4.1852e-01, 2.2491e-02, 7.9153e-01, + 9.7639e-01, 3.3819e-01, 7.4342e-01, 8.5250e-01, + 9.9385e-01, 6.3891e-02, 9.7464e-01, 3.0992e-01, + 9.9006e-01, 1.1081e-01, 7.5155e-01, 8.4844e-01, + 7.0384e-01, 4.6698e-01, 1.6751e-01, 6.1651e-01, + 4.3979e-01, 6.1587e-01, 3.4869e-01, 4.9449e-01, + 9.1105e-02, 9.8166e-01, 3.3675e-02, 3.1123e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.3844, 0.6738, 0.8785, ..., 0.1267, 0.6920, 0.7757]) +tensor([0.1121, 0.3867, 0.9452, ..., 0.7986, 0.0788, 0.8344]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -1509,13 +1509,13 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 10.192691802978516 seconds +Time: 10.441989183425903 seconds -[18.55, 17.99, 18.16, 18.03, 18.22, 17.84, 17.97, 17.8, 18.22, 18.03] -[73.24] -13.685040473937988 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 279705, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.192691802978516, 'TIME_S_1KI': 0.03644086377783206, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1002.2923643112182, 'W': 73.24} -[18.55, 17.99, 18.16, 18.03, 18.22, 17.84, 17.97, 17.8, 18.22, 18.03, 18.98, 18.15, 18.06, 17.9, 17.97, 18.02, 18.2, 18.05, 17.87, 17.78] -325.11999999999995 -16.255999999999997 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 279705, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.192691802978516, 'TIME_S_1KI': 0.03644086377783206, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1002.2923643112182, 'W': 73.24, 'J_1KI': 3.5833909451429835, 'W_1KI': 0.2618473034089487, 'W_D': 56.983999999999995, 'J_D': 779.8283463668822, 'W_D_1KI': 0.20372892869272982, 'J_D_1KI': 0.0007283707073263969} +[18.13, 22.04, 18.79, 17.71, 17.77, 17.9, 17.83, 17.54, 17.97, 17.85] +[79.9] +13.809443473815918 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 286739, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.441989183425903, 'TIME_S_1KI': 0.036416354885194915, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1103.374533557892, 'W': 79.9} +[18.13, 22.04, 18.79, 17.71, 17.77, 17.9, 17.83, 17.54, 17.97, 17.85, 18.13, 17.63, 18.09, 17.81, 17.93, 17.7, 17.59, 17.82, 17.87, 17.69] +325.89 +16.2945 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 286739, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.441989183425903, 'TIME_S_1KI': 0.036416354885194915, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1103.374533557892, 'W': 79.9, 'J_1KI': 3.8480099796605693, 'W_1KI': 0.27865061955297327, 'W_D': 63.605500000000006, 'J_D': 878.3565568737985, 'W_D_1KI': 0.22182367937392544, 'J_D_1KI': 0.0007736083315277149} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_500000_1e-05.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_500000_1e-05.json index 8eef901..d6eca5a 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_500000_1e-05.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_500000_1e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 8355, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.90480637550354, "TIME_S_1KI": 1.305183288510298, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1307.563270778656, "W": 87.44, "J_1KI": 156.5006906976249, "W_1KI": 10.4655894673848, "W_D": 70.932, "J_D": 1060.7053742322921, "W_D_1KI": 8.489766606822261, "J_D_1KI": 1.0161300546765126} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 7939, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.723366260528564, "TIME_S_1KI": 1.350720022739459, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1313.216650083065, "W": 88.63, "J_1KI": 165.41335811601778, "W_1KI": 11.163874543393375, "W_D": 72.31174999999999, "J_D": 1071.4317284964918, "W_D_1KI": 9.108420455976821, "J_D_1KI": 1.1473007250254217} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_500000_1e-05.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_500000_1e-05.output index 03135d2..2715834 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_500000_1e-05.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_500000_1e-05.output @@ -1,15 +1,15 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '500000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.2567212581634521} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.3224358558654785} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 9, ..., 2499992, - 2499996, 2500000]), - col_indices=tensor([164554, 277712, 289036, ..., 389470, 409865, - 491502]), - values=tensor([0.0126, 0.9348, 0.8595, ..., 0.3584, 0.7345, 0.5238]), +tensor(crow_indices=tensor([ 0, 5, 7, ..., 2499992, + 2499995, 2500000]), + col_indices=tensor([ 81446, 111347, 262323, ..., 95785, 329641, + 405148]), + values=tensor([0.7472, 0.0566, 0.1215, ..., 0.1323, 0.4741, 0.2377]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.0175, 0.7668, 0.4852, ..., 0.2657, 0.5513, 0.9738]) +tensor([0.0977, 0.7761, 0.5514, ..., 0.9913, 0.3768, 0.8332]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -17,20 +17,20 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 1.2567212581634521 seconds +Time: 1.3224358558654785 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '8355', '-ss', '500000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.90480637550354} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '7939', '-ss', '500000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.723366260528564} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 10, 12, ..., 2499995, - 2499997, 2500000]), - col_indices=tensor([ 72448, 73110, 121261, ..., 13350, 176428, - 278854]), - values=tensor([0.1918, 0.3445, 0.8471, ..., 0.5873, 0.4603, 0.6922]), +tensor(crow_indices=tensor([ 0, 1, 9, ..., 2499990, + 2499995, 2500000]), + col_indices=tensor([185711, 60363, 105088, ..., 318731, 319175, + 323232]), + values=tensor([0.5920, 0.0659, 0.0171, ..., 0.3410, 0.9352, 0.3450]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.8369, 0.9252, 0.2721, ..., 0.2352, 0.7861, 0.2173]) +tensor([0.5701, 0.8906, 0.4066, ..., 0.2438, 0.9359, 0.5479]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -38,17 +38,17 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 10.90480637550354 seconds +Time: 10.723366260528564 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 10, 12, ..., 2499995, - 2499997, 2500000]), - col_indices=tensor([ 72448, 73110, 121261, ..., 13350, 176428, - 278854]), - values=tensor([0.1918, 0.3445, 0.8471, ..., 0.5873, 0.4603, 0.6922]), +tensor(crow_indices=tensor([ 0, 1, 9, ..., 2499990, + 2499995, 2500000]), + col_indices=tensor([185711, 60363, 105088, ..., 318731, 319175, + 323232]), + values=tensor([0.5920, 0.0659, 0.0171, ..., 0.3410, 0.9352, 0.3450]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.8369, 0.9252, 0.2721, ..., 0.2352, 0.7861, 0.2173]) +tensor([0.5701, 0.8906, 0.4066, ..., 0.2438, 0.9359, 0.5479]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -56,13 +56,13 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 10.90480637550354 seconds +Time: 10.723366260528564 seconds -[18.48, 21.5, 18.34, 18.43, 18.27, 18.03, 18.28, 18.13, 18.03, 18.78] -[87.44] -14.953834295272827 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8355, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.90480637550354, 'TIME_S_1KI': 1.305183288510298, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1307.563270778656, 'W': 87.44} -[18.48, 21.5, 18.34, 18.43, 18.27, 18.03, 18.28, 18.13, 18.03, 18.78, 18.48, 18.09, 18.1, 17.9, 18.11, 18.02, 17.99, 17.84, 18.15, 18.16] -330.15999999999997 -16.508 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8355, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.90480637550354, 'TIME_S_1KI': 1.305183288510298, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1307.563270778656, 'W': 87.44, 'J_1KI': 156.5006906976249, 'W_1KI': 10.4655894673848, 'W_D': 70.932, 'J_D': 1060.7053742322921, 'W_D_1KI': 8.489766606822261, 'J_D_1KI': 1.0161300546765126} +[18.35, 18.0, 17.81, 18.15, 20.9, 17.73, 17.87, 17.99, 18.01, 17.97] +[88.63] +14.81684136390686 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 7939, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.723366260528564, 'TIME_S_1KI': 1.350720022739459, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1313.216650083065, 'W': 88.63} +[18.35, 18.0, 17.81, 18.15, 20.9, 17.73, 17.87, 17.99, 18.01, 17.97, 18.83, 17.84, 18.13, 18.11, 17.94, 17.77, 17.81, 17.81, 18.09, 17.66] +326.365 +16.31825 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 7939, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.723366260528564, 'TIME_S_1KI': 1.350720022739459, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1313.216650083065, 'W': 88.63, 'J_1KI': 165.41335811601778, 'W_1KI': 11.163874543393375, 'W_D': 72.31174999999999, 'J_D': 1071.4317284964918, 'W_D_1KI': 9.108420455976821, 'J_D_1KI': 1.1473007250254217} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.0001.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.0001.json index 035895d..c8d8a61 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.0001.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.0001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 77922, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.570462703704834, "TIME_S_1KI": 0.13565440701861906, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1209.7638923931122, "W": 83.24, "J_1KI": 15.525318811030418, "W_1KI": 1.0682477349144015, "W_D": 66.53899999999999, "J_D": 967.0408413736818, "W_D_1KI": 0.8539180205846871, "J_D_1KI": 0.010958625556129042} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 78314, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.500975370407104, "TIME_S_1KI": 0.1340880988125636, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1246.381588702202, "W": 89.06, "J_1KI": 15.91518232630439, "W_1KI": 1.1372168450085551, "W_D": 72.72325000000001, "J_D": 1017.7511775273682, "W_D_1KI": 0.9286111040171617, "J_D_1KI": 0.0118575363794106} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.0001.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.0001.output index 70de4f5..e0ea23e 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.0001.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.0001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '50000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.14919304847717285} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.14957594871520996} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 9, ..., 249989, 249995, +tensor(crow_indices=tensor([ 0, 8, 15, ..., 249987, 249991, 250000]), - col_indices=tensor([ 8787, 10800, 12548, ..., 22776, 32520, 35593]), - values=tensor([0.0395, 0.0216, 0.0459, ..., 0.9233, 0.0886, 0.1442]), + col_indices=tensor([ 3249, 11393, 14942, ..., 33826, 38027, 48849]), + values=tensor([0.4435, 0.3887, 0.6766, ..., 0.7020, 0.9117, 0.7998]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.0084, 0.2765, 0.2672, ..., 0.0856, 0.1416, 0.8826]) +tensor([0.4072, 0.0290, 0.9610, ..., 0.4695, 0.4913, 0.1254]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 0.14919304847717285 seconds +Time: 0.14957594871520996 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '70378', '-ss', '50000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.483437538146973} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '70198', '-ss', '50000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.41176462173462} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 9, ..., 249988, 249997, +tensor(crow_indices=tensor([ 0, 7, 12, ..., 249984, 249993, 250000]), - col_indices=tensor([ 1665, 9567, 9654, ..., 4112, 18670, 38091]), - values=tensor([0.4890, 0.0494, 0.7903, ..., 0.9513, 0.0590, 0.1377]), + col_indices=tensor([ 257, 837, 13772, ..., 26625, 34572, 42693]), + values=tensor([0.6771, 0.0630, 0.4952, ..., 0.2009, 0.3453, 0.0186]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.5003, 0.9747, 0.2176, ..., 0.9666, 0.4758, 0.9002]) +tensor([0.1005, 0.4396, 0.3760, ..., 0.8175, 0.2613, 0.1136]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,19 +36,19 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 9.483437538146973 seconds +Time: 9.41176462173462 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '77922', '-ss', '50000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.570462703704834} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '78314', '-ss', '50000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.500975370407104} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 7, ..., 249995, 249999, +tensor(crow_indices=tensor([ 0, 4, 13, ..., 249994, 249998, 250000]), - col_indices=tensor([18420, 40988, 3727, ..., 33621, 36384, 44487]), - values=tensor([0.1861, 0.8144, 0.1628, ..., 0.4774, 0.5715, 0.3216]), + col_indices=tensor([ 417, 3050, 28352, ..., 48782, 1625, 48386]), + values=tensor([0.9216, 0.4652, 0.6011, ..., 0.6170, 0.6564, 0.4691]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.6272, 0.7644, 0.0884, ..., 0.9496, 0.3089, 0.8679]) +tensor([0.8482, 0.9835, 0.6846, ..., 0.7970, 0.3559, 0.9710]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -56,16 +56,16 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 10.570462703704834 seconds +Time: 10.500975370407104 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 7, ..., 249995, 249999, +tensor(crow_indices=tensor([ 0, 4, 13, ..., 249994, 249998, 250000]), - col_indices=tensor([18420, 40988, 3727, ..., 33621, 36384, 44487]), - values=tensor([0.1861, 0.8144, 0.1628, ..., 0.4774, 0.5715, 0.3216]), + col_indices=tensor([ 417, 3050, 28352, ..., 48782, 1625, 48386]), + values=tensor([0.9216, 0.4652, 0.6011, ..., 0.6170, 0.6564, 0.4691]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.6272, 0.7644, 0.0884, ..., 0.9496, 0.3089, 0.8679]) +tensor([0.8482, 0.9835, 0.6846, ..., 0.7970, 0.3559, 0.9710]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -73,13 +73,13 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 10.570462703704834 seconds +Time: 10.500975370407104 seconds -[18.59, 17.97, 19.57, 18.3, 18.09, 17.87, 18.02, 21.06, 18.56, 17.89] -[83.24] -14.533444166183472 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 77922, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.570462703704834, 'TIME_S_1KI': 0.13565440701861906, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1209.7638923931122, 'W': 83.24} -[18.59, 17.97, 19.57, 18.3, 18.09, 17.87, 18.02, 21.06, 18.56, 17.89, 18.2, 17.85, 17.83, 21.57, 17.96, 18.06, 18.27, 18.3, 18.38, 18.04] -334.02 -16.701 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 77922, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.570462703704834, 'TIME_S_1KI': 0.13565440701861906, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1209.7638923931122, 'W': 83.24, 'J_1KI': 15.525318811030418, 'W_1KI': 1.0682477349144015, 'W_D': 66.53899999999999, 'J_D': 967.0408413736818, 'W_D_1KI': 0.8539180205846871, 'J_D_1KI': 0.010958625556129042} +[18.34, 17.82, 17.88, 21.83, 18.66, 17.91, 17.8, 17.7, 17.85, 17.95] +[89.06] +13.994852781295776 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 78314, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.500975370407104, 'TIME_S_1KI': 0.1340880988125636, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1246.381588702202, 'W': 89.06} +[18.34, 17.82, 17.88, 21.83, 18.66, 17.91, 17.8, 17.7, 17.85, 17.95, 18.61, 18.02, 17.66, 18.02, 17.94, 17.65, 17.89, 18.0, 17.71, 17.89] +326.735 +16.336750000000002 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 78314, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.500975370407104, 'TIME_S_1KI': 0.1340880988125636, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1246.381588702202, 'W': 89.06, 'J_1KI': 15.91518232630439, 'W_1KI': 1.1372168450085551, 'W_D': 72.72325000000001, 'J_D': 1017.7511775273682, 'W_D_1KI': 0.9286111040171617, 'J_D_1KI': 0.0118575363794106} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.001.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.001.json index f58be3b..2dcd593 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.001.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 17357, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.690638303756714, "TIME_S_1KI": 0.6159266177194627, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1295.2395059108735, "W": 87.56, "J_1KI": 74.62346637730447, "W_1KI": 5.044650573255747, "W_D": 71.326, "J_D": 1055.0965394997595, "W_D_1KI": 4.109350694244396, "J_D_1KI": 0.23675466349279234} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 16503, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.453594446182251, "TIME_S_1KI": 0.6334360083731595, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1283.8258463859559, "W": 88.2, "J_1KI": 77.7934827840972, "W_1KI": 5.344482821305218, "W_D": 72.15925, "J_D": 1050.3391179798841, "W_D_1KI": 4.372492880082409, "J_D_1KI": 0.2649513955088414} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.001.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.001.output index ec20a71..eb8fa27 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.001.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '50000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.6049323081970215} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.6362464427947998} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 46, 103, ..., 2499893, - 2499950, 2500000]), - col_indices=tensor([ 214, 217, 3424, ..., 47339, 47927, 48505]), - values=tensor([0.8463, 0.5755, 0.1058, ..., 0.4565, 0.0843, 0.4040]), +tensor(crow_indices=tensor([ 0, 51, 104, ..., 2499889, + 2499946, 2500000]), + col_indices=tensor([ 554, 2346, 3623, ..., 48601, 49342, 49458]), + values=tensor([0.6346, 0.6039, 0.4681, ..., 0.0926, 0.5934, 0.5905]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.2070, 0.0126, 0.4112, ..., 0.3463, 0.8132, 0.3234]) +tensor([0.3504, 0.0589, 0.7648, ..., 0.3104, 0.5013, 0.0863]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 0.6049323081970215 seconds +Time: 0.6362464427947998 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '17357', '-ss', '50000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.690638303756714} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '16503', '-ss', '50000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.453594446182251} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 47, 101, ..., 2499901, - 2499949, 2500000]), - col_indices=tensor([ 511, 725, 819, ..., 47217, 48788, 49222]), - values=tensor([0.0511, 0.3894, 0.2647, ..., 0.8233, 0.9615, 0.4045]), +tensor(crow_indices=tensor([ 0, 44, 95, ..., 2499888, + 2499946, 2500000]), + col_indices=tensor([ 29, 59, 1099, ..., 49158, 49549, 49729]), + values=tensor([0.6925, 0.1264, 0.7717, ..., 0.9011, 0.2629, 0.2267]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.7118, 0.9063, 0.7110, ..., 0.7333, 0.4959, 0.7807]) +tensor([0.0922, 0.7073, 0.7429, ..., 0.1285, 0.2485, 0.0697]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,16 +36,16 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 10.690638303756714 seconds +Time: 10.453594446182251 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 47, 101, ..., 2499901, - 2499949, 2500000]), - col_indices=tensor([ 511, 725, 819, ..., 47217, 48788, 49222]), - values=tensor([0.0511, 0.3894, 0.2647, ..., 0.8233, 0.9615, 0.4045]), +tensor(crow_indices=tensor([ 0, 44, 95, ..., 2499888, + 2499946, 2500000]), + col_indices=tensor([ 29, 59, 1099, ..., 49158, 49549, 49729]), + values=tensor([0.6925, 0.1264, 0.7717, ..., 0.9011, 0.2629, 0.2267]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.7118, 0.9063, 0.7110, ..., 0.7333, 0.4959, 0.7807]) +tensor([0.0922, 0.7073, 0.7429, ..., 0.1285, 0.2485, 0.0697]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -53,13 +53,13 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 10.690638303756714 seconds +Time: 10.453594446182251 seconds -[18.27, 17.74, 18.29, 17.77, 18.01, 17.85, 17.88, 17.79, 18.2, 17.88] -[87.56] -14.792593717575073 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 17357, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.690638303756714, 'TIME_S_1KI': 0.6159266177194627, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1295.2395059108735, 'W': 87.56} -[18.27, 17.74, 18.29, 17.77, 18.01, 17.85, 17.88, 17.79, 18.2, 17.88, 18.2, 18.34, 18.01, 17.89, 18.13, 18.46, 18.22, 17.9, 17.95, 18.15] -324.68 -16.234 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 17357, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.690638303756714, 'TIME_S_1KI': 0.6159266177194627, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1295.2395059108735, 'W': 87.56, 'J_1KI': 74.62346637730447, 'W_1KI': 5.044650573255747, 'W_D': 71.326, 'J_D': 1055.0965394997595, 'W_D_1KI': 4.109350694244396, 'J_D_1KI': 0.23675466349279234} +[18.19, 17.67, 17.57, 17.63, 17.67, 17.54, 18.18, 18.16, 17.63, 17.67] +[88.2] +14.555848598480225 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 16503, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.453594446182251, 'TIME_S_1KI': 0.6334360083731595, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1283.8258463859559, 'W': 88.2} +[18.19, 17.67, 17.57, 17.63, 17.67, 17.54, 18.18, 18.16, 17.63, 17.67, 18.06, 18.43, 18.51, 17.47, 17.4, 17.92, 17.64, 17.73, 17.72, 17.97] +320.81500000000005 +16.040750000000003 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 16503, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.453594446182251, 'TIME_S_1KI': 0.6334360083731595, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1283.8258463859559, 'W': 88.2, 'J_1KI': 77.7934827840972, 'W_1KI': 5.344482821305218, 'W_D': 72.15925, 'J_D': 1050.3391179798841, 'W_D_1KI': 4.372492880082409, 'J_D_1KI': 0.2649513955088414} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_1e-05.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_1e-05.json index 3c6e8be..774716c 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_1e-05.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_1e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 112508, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.720516443252563, "TIME_S_1KI": 0.09528670355221462, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1090.2722121667862, "W": 75.82, "J_1KI": 9.69061944187779, "W_1KI": 0.6739076332349699, "W_D": 59.38549999999999, "J_D": 853.9483046113252, "W_D_1KI": 0.5278335762790201, "J_D_1KI": 0.004691520392141182} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 111170, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.608571290969849, "TIME_S_1KI": 0.09542656553899297, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1151.6899712467193, "W": 82.52, "J_1KI": 10.35971909010272, "W_1KI": 0.7422865881083026, "W_D": 66.40424999999999, "J_D": 926.7705861992239, "W_D_1KI": 0.5973216695151569, "J_D_1KI": 0.005373047310561814} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_1e-05.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_1e-05.output index b66333e..05a9200 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_1e-05.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_1e-05.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '50000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.11333847045898438} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.11198759078979492} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 2, ..., 24996, 24999, 25000]), - col_indices=tensor([ 9502, 18497, 7204, ..., 33396, 45910, 109]), - values=tensor([0.5325, 0.6011, 0.4727, ..., 0.6967, 0.0269, 0.7415]), +tensor(crow_indices=tensor([ 0, 1, 3, ..., 24999, 25000, 25000]), + col_indices=tensor([42990, 31865, 45603, ..., 32, 31145, 42502]), + values=tensor([0.2680, 0.8494, 0.1049, ..., 0.3912, 0.0276, 0.1741]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.7210, 0.8240, 0.5786, ..., 0.5702, 0.4441, 0.2533]) +tensor([0.9789, 0.0522, 0.6759, ..., 0.0240, 0.3185, 0.8367]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -15,18 +15,18 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 0.11333847045898438 seconds +Time: 0.11198759078979492 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '92642', '-ss', '50000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 8.645956993103027} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '93760', '-ss', '50000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 8.855576515197754} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 1, ..., 25000, 25000, 25000]), - col_indices=tensor([35285, 1305, 12700, ..., 6399, 17561, 45264]), - values=tensor([0.6896, 0.7157, 0.5414, ..., 0.3157, 0.2585, 0.8046]), + col_indices=tensor([ 213, 39463, 2534, ..., 21769, 20293, 48702]), + values=tensor([0.6944, 0.6922, 0.1012, ..., 0.1071, 0.8204, 0.4025]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.8892, 0.5178, 0.0901, ..., 0.0600, 0.1718, 0.0275]) +tensor([0.8784, 0.5968, 0.0083, ..., 0.0039, 0.6938, 0.6481]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -34,18 +34,18 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 8.645956993103027 seconds +Time: 8.855576515197754 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '112508', '-ss', '50000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.720516443252563} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '111170', '-ss', '50000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.608571290969849} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 25000, 25000, 25000]), - col_indices=tensor([27684, 39939, 2715, ..., 47308, 11944, 42221]), - values=tensor([0.6561, 0.5911, 0.5622, ..., 0.2806, 0.4491, 0.6100]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 24999, 24999, 25000]), + col_indices=tensor([37263, 14810, 49193, ..., 22299, 19031, 40338]), + values=tensor([0.7995, 0.8033, 0.3510, ..., 0.6585, 0.0621, 0.7519]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.8957, 0.2813, 0.1993, ..., 0.7019, 0.9944, 0.8970]) +tensor([0.9318, 0.0252, 0.9296, ..., 0.2820, 0.1820, 0.1630]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -53,15 +53,15 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 10.720516443252563 seconds +Time: 10.608571290969849 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 25000, 25000, 25000]), - col_indices=tensor([27684, 39939, 2715, ..., 47308, 11944, 42221]), - values=tensor([0.6561, 0.5911, 0.5622, ..., 0.2806, 0.4491, 0.6100]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 24999, 24999, 25000]), + col_indices=tensor([37263, 14810, 49193, ..., 22299, 19031, 40338]), + values=tensor([0.7995, 0.8033, 0.3510, ..., 0.6585, 0.0621, 0.7519]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.8957, 0.2813, 0.1993, ..., 0.7019, 0.9944, 0.8970]) +tensor([0.9318, 0.0252, 0.9296, ..., 0.2820, 0.1820, 0.1630]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -69,13 +69,13 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 10.720516443252563 seconds +Time: 10.608571290969849 seconds -[18.48, 18.01, 18.17, 19.64, 18.03, 18.42, 18.45, 18.1, 18.26, 18.04] -[75.82] -14.379744291305542 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 112508, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.720516443252563, 'TIME_S_1KI': 0.09528670355221462, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1090.2722121667862, 'W': 75.82} -[18.48, 18.01, 18.17, 19.64, 18.03, 18.42, 18.45, 18.1, 18.26, 18.04, 18.62, 18.47, 18.04, 18.06, 17.8, 18.08, 18.08, 18.67, 17.82, 18.04] -328.69000000000005 -16.434500000000003 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 112508, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.720516443252563, 'TIME_S_1KI': 0.09528670355221462, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1090.2722121667862, 'W': 75.82, 'J_1KI': 9.69061944187779, 'W_1KI': 0.6739076332349699, 'W_D': 59.38549999999999, 'J_D': 853.9483046113252, 'W_D_1KI': 0.5278335762790201, 'J_D_1KI': 0.004691520392141182} +[18.42, 17.72, 18.1, 17.98, 17.9, 18.19, 17.99, 17.99, 17.89, 17.81] +[82.52] +13.9564950466156 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 111170, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.608571290969849, 'TIME_S_1KI': 0.09542656553899297, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1151.6899712467193, 'W': 82.52} +[18.42, 17.72, 18.1, 17.98, 17.9, 18.19, 17.99, 17.99, 17.89, 17.81, 18.18, 17.91, 17.74, 18.05, 17.74, 17.64, 17.73, 17.76, 17.96, 17.64] +322.315 +16.11575 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 111170, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.608571290969849, 'TIME_S_1KI': 0.09542656553899297, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1151.6899712467193, 'W': 82.52, 'J_1KI': 10.35971909010272, 'W_1KI': 0.7422865881083026, 'W_D': 66.40424999999999, 'J_D': 926.7705861992239, 'W_D_1KI': 0.5973216695151569, 'J_D_1KI': 0.005373047310561814} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.0001.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.0001.json new file mode 100644 index 0000000..0ba39e0 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 323751, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.660384178161621, "TIME_S_1KI": 0.03292772587007182, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1091.8019851422312, "W": 79.43, "J_1KI": 3.3723509275407064, "W_1KI": 0.24534287152781, "W_D": 62.86500000000001, "J_D": 864.1084199416639, "W_D_1KI": 0.19417700640306904, "J_D_1KI": 0.0005997726845726161} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.0001.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.0001.output new file mode 100644 index 0000000..9a09fc6 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.0001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '5000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.04866957664489746} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 2500, 2500, 2500]), + col_indices=tensor([2014, 2333, 4073, ..., 1117, 3505, 2207]), + values=tensor([0.1339, 0.9980, 0.7024, ..., 0.3782, 0.0544, 0.2308]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.2751, 0.2895, 0.5101, ..., 0.3933, 0.2935, 0.0678]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 0.04866957664489746 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '215740', '-ss', '5000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 6.996948957443237} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 2497, 2498, 2500]), + col_indices=tensor([3059, 3492, 4969, ..., 3863, 1265, 1575]), + values=tensor([0.7839, 0.7068, 0.1359, ..., 0.6765, 0.7179, 0.7182]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.2637, 0.1133, 0.2354, ..., 0.5397, 0.9545, 0.7707]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 6.996948957443237 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '323751', '-ss', '5000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.660384178161621} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 2500, 2500, 2500]), + col_indices=tensor([4531, 3967, 1182, ..., 4005, 3234, 3449]), + values=tensor([0.1835, 0.1001, 0.2805, ..., 0.8615, 0.2040, 0.1828]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.2085, 0.7612, 0.9816, ..., 0.7337, 0.6921, 0.5494]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.660384178161621 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 2500, 2500, 2500]), + col_indices=tensor([4531, 3967, 1182, ..., 4005, 3234, 3449]), + values=tensor([0.1835, 0.1001, 0.2805, ..., 0.8615, 0.2040, 0.1828]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.2085, 0.7612, 0.9816, ..., 0.7337, 0.6921, 0.5494]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.660384178161621 seconds + +[18.46, 17.59, 17.71, 17.79, 18.06, 17.81, 22.16, 18.13, 17.88, 17.67] +[79.43] +13.745461225509644 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 323751, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.660384178161621, 'TIME_S_1KI': 0.03292772587007182, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1091.8019851422312, 'W': 79.43} +[18.46, 17.59, 17.71, 17.79, 18.06, 17.81, 22.16, 18.13, 17.88, 17.67, 18.24, 17.78, 22.87, 17.85, 17.88, 17.75, 18.09, 17.86, 17.93, 17.95] +331.3 +16.565 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 323751, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.660384178161621, 'TIME_S_1KI': 0.03292772587007182, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1091.8019851422312, 'W': 79.43, 'J_1KI': 3.3723509275407064, 'W_1KI': 0.24534287152781, 'W_D': 62.86500000000001, 'J_D': 864.1084199416639, 'W_D_1KI': 0.19417700640306904, 'J_D_1KI': 0.0005997726845726161} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.001.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.001.json new file mode 100644 index 0000000..9b486c5 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 244536, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.47565221786499, "TIME_S_1KI": 0.04283889577757463, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1122.3107054543495, "W": 80.61, "J_1KI": 4.589552071900863, "W_1KI": 0.32964471488860536, "W_D": 64.2855, "J_D": 895.0292129448652, "W_D_1KI": 0.2628876729806654, "J_D_1KI": 0.0010750469173482246} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.001.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.001.output new file mode 100644 index 0000000..a34514c --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '5000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.05913829803466797} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 10, ..., 24990, 24996, 25000]), + col_indices=tensor([ 91, 1225, 4183, ..., 1260, 1498, 1816]), + values=tensor([0.4538, 0.5289, 0.0869, ..., 0.3885, 0.0043, 0.2412]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.3917, 0.0968, 0.9015, ..., 0.9180, 0.2586, 0.0822]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 0.05913829803466797 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '177549', '-ss', '5000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 7.6236653327941895} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 10, ..., 24989, 24994, 25000]), + col_indices=tensor([ 412, 1102, 1155, ..., 695, 1250, 1499]), + values=tensor([0.5017, 0.7691, 0.1146, ..., 0.5300, 0.6967, 0.6559]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.7930, 0.9227, 0.2342, ..., 0.4335, 0.3949, 0.6803]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 7.6236653327941895 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '244536', '-ss', '5000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.47565221786499} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 8, ..., 24986, 24994, 25000]), + col_indices=tensor([ 194, 369, 2258, ..., 1755, 2835, 2987]), + values=tensor([0.8194, 0.2005, 0.5023, ..., 0.6221, 0.3751, 0.8448]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.5474, 0.2694, 0.2646, ..., 0.5254, 0.5763, 0.9998]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.47565221786499 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 8, ..., 24986, 24994, 25000]), + col_indices=tensor([ 194, 369, 2258, ..., 1755, 2835, 2987]), + values=tensor([0.8194, 0.2005, 0.5023, ..., 0.6221, 0.3751, 0.8448]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.5474, 0.2694, 0.2646, ..., 0.5254, 0.5763, 0.9998]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.47565221786499 seconds + +[18.33, 18.0, 17.95, 18.79, 18.11, 18.49, 17.87, 17.96, 17.71, 17.91] +[80.61] +13.922723054885864 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 244536, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.47565221786499, 'TIME_S_1KI': 0.04283889577757463, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1122.3107054543495, 'W': 80.61} +[18.33, 18.0, 17.95, 18.79, 18.11, 18.49, 17.87, 17.96, 17.71, 17.91, 18.2, 17.81, 17.83, 18.46, 18.26, 17.74, 18.1, 18.0, 17.87, 20.64] +326.49 +16.3245 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 244536, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.47565221786499, 'TIME_S_1KI': 0.04283889577757463, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1122.3107054543495, 'W': 80.61, 'J_1KI': 4.589552071900863, 'W_1KI': 0.32964471488860536, 'W_D': 64.2855, 'J_D': 895.0292129448652, 'W_D_1KI': 0.2628876729806654, 'J_D_1KI': 0.0010750469173482246} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.01.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.01.json new file mode 100644 index 0000000..6d5d9d5 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 162920, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.414216756820679, "TIME_S_1KI": 0.06392227324343652, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1267.1163623809814, "W": 89.2, "J_1KI": 7.7775372107843195, "W_1KI": 0.5475079793763811, "W_D": 72.96625, "J_D": 1036.5104178988934, "W_D_1KI": 0.44786551681807024, "J_D_1KI": 0.0027489904052177155} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.01.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.01.output new file mode 100644 index 0000000..548b49c --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.01.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '5000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.08090949058532715} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 56, 99, ..., 249898, 249957, + 250000]), + col_indices=tensor([ 27, 423, 607, ..., 4371, 4379, 4963]), + values=tensor([0.2630, 0.0898, 0.5767, ..., 0.9425, 0.5823, 0.3558]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.9446, 0.5109, 0.8342, ..., 0.1182, 0.7217, 0.5335]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 0.08090949058532715 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '129774', '-ss', '5000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 8.36373782157898} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 58, 109, ..., 249903, 249955, + 250000]), + col_indices=tensor([ 168, 371, 372, ..., 4708, 4876, 4879]), + values=tensor([0.3469, 0.2972, 0.5901, ..., 0.0640, 0.2331, 0.9267]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.6978, 0.0250, 0.3323, ..., 0.6356, 0.0847, 0.1678]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 8.36373782157898 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '162920', '-ss', '5000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.414216756820679} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 50, 103, ..., 249909, 249962, + 250000]), + col_indices=tensor([ 86, 107, 119, ..., 4571, 4629, 4973]), + values=tensor([0.3206, 0.5923, 0.4852, ..., 0.3807, 0.1641, 0.9581]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.7369, 0.6267, 0.7979, ..., 0.0231, 0.0899, 0.6643]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.414216756820679 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 50, 103, ..., 249909, 249962, + 250000]), + col_indices=tensor([ 86, 107, 119, ..., 4571, 4629, 4973]), + values=tensor([0.3206, 0.5923, 0.4852, ..., 0.3807, 0.1641, 0.9581]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.7369, 0.6267, 0.7979, ..., 0.0231, 0.0899, 0.6643]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.414216756820679 seconds + +[18.27, 17.91, 18.14, 17.62, 17.8, 17.9, 19.35, 17.84, 17.83, 17.75] +[89.2] +14.205340385437012 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 162920, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.414216756820679, 'TIME_S_1KI': 0.06392227324343652, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1267.1163623809814, 'W': 89.2} +[18.27, 17.91, 18.14, 17.62, 17.8, 17.9, 19.35, 17.84, 17.83, 17.75, 18.16, 17.99, 17.94, 17.95, 17.67, 17.72, 18.79, 18.01, 18.17, 17.91] +324.675 +16.23375 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 162920, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.414216756820679, 'TIME_S_1KI': 0.06392227324343652, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1267.1163623809814, 'W': 89.2, 'J_1KI': 7.7775372107843195, 'W_1KI': 0.5475079793763811, 'W_D': 72.96625, 'J_D': 1036.5104178988934, 'W_D_1KI': 0.44786551681807024, 'J_D_1KI': 0.0027489904052177155} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.05.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.05.json new file mode 100644 index 0000000..c4982b3 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 43553, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.298628091812134, "TIME_S_1KI": 0.23646196798870647, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1262.7661000442506, "W": 89.26, "J_1KI": 28.993779993209436, "W_1KI": 2.0494569834454572, "W_D": 72.47675000000001, "J_D": 1025.3325447163584, "W_D_1KI": 1.664104654099603, "J_D_1KI": 0.03820872624387764} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.05.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.05.output new file mode 100644 index 0000000..c9d208e --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.05.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '5000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.24108004570007324} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 254, 488, ..., 1249514, + 1249753, 1250000]), + col_indices=tensor([ 20, 116, 133, ..., 4920, 4936, 4946]), + values=tensor([0.1564, 0.7439, 0.0267, ..., 0.8153, 0.5940, 0.0091]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.2737, 0.8794, 0.7768, ..., 0.6794, 0.5883, 0.1555]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 0.24108004570007324 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '43553', '-ss', '5000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.298628091812134} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 228, 477, ..., 1249472, + 1249753, 1250000]), + col_indices=tensor([ 33, 68, 106, ..., 4915, 4934, 4973]), + values=tensor([0.4796, 0.5786, 0.7704, ..., 0.3679, 0.0791, 0.9103]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.4175, 0.6924, 0.0772, ..., 0.0345, 0.5597, 0.1347]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.298628091812134 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 228, 477, ..., 1249472, + 1249753, 1250000]), + col_indices=tensor([ 33, 68, 106, ..., 4915, 4934, 4973]), + values=tensor([0.4796, 0.5786, 0.7704, ..., 0.3679, 0.0791, 0.9103]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.4175, 0.6924, 0.0772, ..., 0.0345, 0.5597, 0.1347]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.298628091812134 seconds + +[18.22, 18.23, 17.9, 17.56, 17.94, 17.95, 17.83, 17.93, 21.91, 17.95] +[89.26] +14.147054672241211 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 43553, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.298628091812134, 'TIME_S_1KI': 0.23646196798870647, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1262.7661000442506, 'W': 89.26} +[18.22, 18.23, 17.9, 17.56, 17.94, 17.95, 17.83, 17.93, 21.91, 17.95, 18.27, 17.9, 17.95, 22.05, 18.3, 22.03, 18.06, 18.09, 17.91, 17.81] +335.66499999999996 +16.78325 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 43553, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.298628091812134, 'TIME_S_1KI': 0.23646196798870647, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1262.7661000442506, 'W': 89.26, 'J_1KI': 28.993779993209436, 'W_1KI': 2.0494569834454572, 'W_D': 72.47675000000001, 'J_D': 1025.3325447163584, 'W_D_1KI': 1.664104654099603, 'J_D_1KI': 0.03820872624387764} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.1.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.1.json new file mode 100644 index 0000000..901a9df --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 19209, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.589914798736572, "TIME_S_1KI": 0.5512996407276054, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1296.501946372986, "W": 88.72, "J_1KI": 67.49450499104513, "W_1KI": 4.618668332552449, "W_D": 72.5775, "J_D": 1060.6049370253086, "W_D_1KI": 3.778307043573325, "J_D_1KI": 0.19669462458083842} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.1.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.1.output new file mode 100644 index 0000000..ac30e09 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.1.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '5000', '-sd', '0.1', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 0.5466129779815674} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 514, 1026, ..., 2498984, + 2499497, 2500000]), + col_indices=tensor([ 4, 26, 34, ..., 4975, 4994, 4997]), + values=tensor([0.5421, 0.0550, 0.0297, ..., 0.2626, 0.0439, 0.1648]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.7995, 0.3197, 0.3485, ..., 0.5295, 0.0131, 0.9723]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 0.5466129779815674 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '19209', '-ss', '5000', '-sd', '0.1', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.589914798736572} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 503, 1003, ..., 2498928, + 2499468, 2500000]), + col_indices=tensor([ 6, 8, 21, ..., 4933, 4958, 4973]), + values=tensor([0.5143, 0.8442, 0.2205, ..., 0.0567, 0.9724, 0.7726]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.8237, 0.2559, 0.0746, ..., 0.2976, 0.1284, 0.3075]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.589914798736572 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 503, 1003, ..., 2498928, + 2499468, 2500000]), + col_indices=tensor([ 6, 8, 21, ..., 4933, 4958, 4973]), + values=tensor([0.5143, 0.8442, 0.2205, ..., 0.0567, 0.9724, 0.7726]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.8237, 0.2559, 0.0746, ..., 0.2976, 0.1284, 0.3075]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.589914798736572 seconds + +[18.45, 18.0, 17.83, 17.78, 17.91, 17.57, 18.07, 17.82, 17.87, 17.88] +[88.72] +14.613412380218506 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 19209, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.589914798736572, 'TIME_S_1KI': 0.5512996407276054, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1296.501946372986, 'W': 88.72} +[18.45, 18.0, 17.83, 17.78, 17.91, 17.57, 18.07, 17.82, 17.87, 17.88, 18.16, 17.97, 18.01, 17.95, 17.55, 18.45, 18.51, 17.65, 17.73, 17.87] +322.85 +16.142500000000002 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 19209, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.589914798736572, 'TIME_S_1KI': 0.5512996407276054, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1296.501946372986, 'W': 88.72, 'J_1KI': 67.49450499104513, 'W_1KI': 4.618668332552449, 'W_D': 72.5775, 'J_D': 1060.6049370253086, 'W_D_1KI': 3.778307043573325, 'J_D_1KI': 0.19669462458083842} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_1e-05.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_1e-05.json new file mode 100644 index 0000000..5c599da --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 353197, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.226954936981201, "TIME_S_1KI": 0.0289553844935863, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1121.6130926513672, "W": 79.36, "J_1KI": 3.1756019803434548, "W_1KI": 0.22469047019085664, "W_D": 63.1995, "J_D": 893.2130374120474, "W_D_1KI": 0.1789355515477198, "J_D_1KI": 0.0005066168499384757} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_1e-05.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_1e-05.output new file mode 100644 index 0000000..bd4d211 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_1e-05.output @@ -0,0 +1,329 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '5000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.04602789878845215} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([ 578, 489, 2035, 2602, 4011, 1806, 4187, 4466, 1701, + 3439, 1855, 2207, 4634, 4913, 4351, 2699, 4950, 1482, + 1630, 4011, 847, 262, 526, 120, 2856, 4636, 1597, + 3213, 516, 2941, 2174, 769, 2517, 499, 1934, 2295, + 4580, 2315, 3877, 4302, 1055, 1440, 1706, 4670, 429, + 2051, 2186, 1313, 1143, 3111, 4634, 2695, 4779, 4363, + 1595, 4655, 1338, 4420, 4067, 1526, 282, 2445, 369, + 1772, 4270, 1668, 3875, 412, 4249, 1643, 914, 856, + 3381, 2292, 4030, 3967, 3653, 1168, 3730, 661, 3997, + 1557, 2874, 4414, 241, 2576, 1853, 4254, 862, 608, + 4068, 2069, 967, 859, 1460, 3895, 2418, 1568, 1236, + 2469, 4377, 1881, 774, 3150, 4962, 4438, 3334, 3370, + 3850, 469, 1930, 1600, 4349, 1435, 2755, 2777, 2752, + 1750, 2319, 2825, 2053, 4982, 3998, 1031, 158, 2744, + 1843, 2266, 4999, 4122, 250, 2042, 4015, 3394, 4312, + 559, 3260, 296, 3827, 4372, 2993, 1918, 3134, 621, + 557, 2314, 3437, 2519, 4868, 4567, 645, 1366, 3758, + 4230, 810, 851, 1555, 4001, 1607, 876, 3143, 3677, + 620, 2976, 4865, 2725, 1890, 514, 1960, 1749, 2271, + 3746, 3959, 4437, 4381, 2386, 2843, 3407, 4429, 2460, + 1759, 3731, 4851, 3169, 994, 1771, 4332, 2376, 2120, + 69, 919, 3516, 57, 1846, 3363, 4747, 3055, 4318, + 1028, 4163, 4665, 4823, 505, 247, 4342, 3354, 2982, + 3367, 3474, 1671, 2141, 1806, 254, 2129, 187, 1832, + 3940, 4918, 419, 4670, 303, 921, 39, 4798, 1396, + 2176, 2156, 2536, 266, 4518, 4967, 4630, 2593, 1182, + 2488, 2445, 979, 1019, 4241, 1675, 1170, 2324, 2271, + 3633, 2309, 4715, 1380, 4338, 2573, 2764]), + values=tensor([0.6984, 0.1478, 0.0323, 0.8260, 0.6827, 0.1000, 0.4915, + 0.6587, 0.0376, 0.3470, 0.7142, 0.7494, 0.0897, 0.2827, + 0.6630, 0.3710, 0.5106, 0.3028, 0.3002, 0.0863, 0.1240, + 0.1798, 0.6305, 0.3002, 0.5649, 0.4551, 0.6642, 0.1708, + 0.5500, 0.6807, 0.3124, 0.4343, 0.1155, 0.5562, 0.7660, + 0.5677, 0.3794, 0.3402, 0.7695, 0.1890, 0.5328, 0.3628, + 0.6604, 0.2382, 0.4320, 0.8974, 0.3878, 0.2382, 0.2066, + 0.8734, 0.7091, 0.8197, 0.8175, 0.2812, 0.4902, 0.1894, + 0.3966, 0.5276, 0.7667, 0.0175, 0.7037, 0.7601, 0.1810, + 0.4741, 0.3863, 0.8670, 0.4845, 0.6586, 0.0648, 0.8124, + 0.7536, 0.0293, 0.5547, 0.4571, 0.0817, 0.7764, 0.3555, + 0.5853, 0.3952, 0.4216, 0.4013, 0.1391, 0.8172, 0.9389, + 0.3613, 0.8906, 0.6121, 0.5615, 0.7545, 0.1340, 0.0792, + 0.8924, 0.1038, 0.5565, 0.0169, 0.8812, 0.4265, 0.0727, + 0.1083, 0.5669, 0.5957, 0.1631, 0.9558, 0.7748, 0.9411, + 0.7256, 0.5800, 0.4846, 0.9970, 0.8586, 0.7723, 0.4078, + 0.6823, 0.7466, 0.9258, 0.1331, 0.3558, 0.7864, 0.4232, + 0.6710, 0.9708, 0.0475, 0.1393, 0.7271, 0.7770, 0.3222, + 0.4988, 0.2948, 0.5044, 0.9371, 0.0161, 0.2536, 0.5990, + 0.3689, 0.2194, 0.9840, 0.0757, 0.2181, 0.9674, 0.1702, + 0.3378, 0.9217, 0.7196, 0.9431, 0.0238, 0.2739, 0.4274, + 0.2266, 0.8166, 0.3636, 0.1711, 0.9816, 0.7731, 0.9314, + 0.1464, 0.5983, 0.5403, 0.2869, 0.9912, 0.8860, 0.2927, + 0.0879, 0.5830, 0.5619, 0.8287, 0.6664, 0.8686, 0.3651, + 0.4784, 0.5559, 0.8167, 0.6136, 0.5106, 0.0184, 0.8321, + 0.7988, 0.2100, 0.3066, 0.2554, 0.2412, 0.6610, 0.3077, + 0.2061, 0.0284, 0.0567, 0.7554, 0.1226, 0.1847, 0.1023, + 0.5889, 0.1845, 0.3455, 0.6453, 0.2221, 0.4719, 0.2134, + 0.3242, 0.6794, 0.0360, 0.6922, 0.2624, 0.4100, 0.5084, + 0.0818, 0.0375, 0.1527, 0.6806, 0.3748, 0.6249, 0.4817, + 0.9505, 0.0887, 0.9942, 0.1910, 0.6323, 0.8143, 0.9940, + 0.2187, 0.9553, 0.7841, 0.3921, 0.6046, 0.0750, 0.3392, + 0.4333, 0.0760, 0.7016, 0.3358, 0.0964, 0.7961, 0.8524, + 0.6531, 0.3470, 0.9589, 0.2215, 0.3106, 0.8796, 0.7441, + 0.0627, 0.6404, 0.0703, 0.8970, 0.3227, 0.0864, 0.1787, + 0.7479, 0.4857, 0.1928, 0.9739, 0.1023]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.6347, 0.6451, 0.4713, ..., 0.2060, 0.2664, 0.4890]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 0.04602789878845215 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '228122', '-ss', '5000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 6.781703472137451} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 249, 249, 250]), + col_indices=tensor([4412, 3431, 2377, 4102, 3105, 469, 2716, 4410, 2733, + 2349, 109, 3862, 716, 2870, 2405, 4409, 3017, 152, + 1291, 2012, 2518, 1601, 808, 1197, 1818, 4054, 1727, + 338, 2366, 2141, 4163, 602, 909, 1404, 3638, 4853, + 4334, 3774, 2454, 2125, 993, 2793, 20, 3340, 493, + 3838, 1420, 1159, 1629, 2170, 2030, 2643, 3042, 750, + 3505, 1065, 53, 1925, 4323, 314, 2351, 3881, 3378, + 516, 4610, 4522, 2030, 1297, 4803, 2768, 1424, 2842, + 4885, 268, 4021, 4648, 2523, 1919, 3169, 805, 738, + 2589, 45, 1444, 1957, 223, 2481, 4394, 386, 2449, + 63, 2942, 4865, 2949, 3856, 1911, 4197, 1693, 1675, + 4639, 4564, 233, 3973, 3759, 1045, 484, 4027, 3720, + 1180, 3869, 701, 796, 3406, 3536, 4421, 2555, 3123, + 2911, 213, 4454, 3508, 1549, 2383, 1068, 4187, 1933, + 1065, 1293, 2519, 2363, 3252, 3060, 1708, 1125, 1222, + 792, 2489, 2625, 4980, 3534, 4557, 2587, 1504, 2523, + 4865, 3799, 697, 2081, 3495, 3792, 447, 3562, 1341, + 4862, 3634, 3761, 4281, 363, 243, 4562, 286, 2825, + 3913, 2972, 2700, 1419, 1430, 3352, 3317, 563, 848, + 2244, 1261, 353, 3757, 649, 2753, 1341, 974, 197, + 2980, 1854, 432, 2396, 3616, 49, 1220, 2936, 3180, + 1438, 2052, 3219, 4512, 4166, 642, 4875, 934, 3770, + 3666, 2272, 4170, 4061, 4308, 2711, 1697, 3362, 1307, + 1394, 3062, 4568, 1642, 2190, 3138, 2, 977, 97, + 4543, 198, 2355, 2473, 2444, 381, 2793, 3795, 82, + 621, 1709, 2950, 2181, 896, 3658, 1597, 3087, 77, + 4639, 116, 1322, 3984, 4640, 1253, 1197, 4103, 4814, + 4947, 1925, 1050, 735, 66, 1794, 677]), + values=tensor([0.8584, 0.2940, 0.8361, 0.6545, 0.0599, 0.3888, 0.5851, + 0.6940, 0.8362, 0.8362, 0.9462, 0.2506, 0.0683, 0.7589, + 0.7588, 0.1215, 0.5075, 0.0715, 0.7309, 0.7006, 0.3393, + 0.6062, 0.5675, 0.0991, 0.6421, 0.8285, 0.2411, 0.6192, + 0.7606, 0.0570, 0.3224, 0.8569, 0.9310, 0.1626, 0.5654, + 0.9357, 0.1546, 0.1781, 0.6544, 0.6109, 0.7147, 0.0506, + 0.5901, 0.5614, 0.8122, 0.3694, 0.6076, 0.1018, 0.7603, + 0.4975, 0.8669, 0.5965, 0.4565, 0.6649, 0.6463, 0.7871, + 0.1496, 0.1997, 0.4029, 0.6148, 0.0954, 0.9115, 0.5070, + 0.1492, 0.5094, 0.8294, 0.3206, 0.4740, 0.8681, 0.4774, + 0.4284, 0.5390, 0.3012, 0.1084, 0.4943, 0.6244, 0.2177, + 0.7785, 0.0851, 0.4084, 0.4411, 0.4278, 0.1858, 0.2899, + 0.9883, 0.8319, 0.3029, 0.9928, 0.0011, 0.8219, 0.6450, + 0.9238, 0.2393, 0.7397, 0.9537, 0.1430, 0.9063, 0.8994, + 0.7356, 0.5662, 0.3795, 0.1296, 0.3682, 0.9644, 0.9991, + 0.3763, 0.9169, 0.8616, 0.9415, 0.2403, 0.4748, 0.5073, + 0.7745, 0.4686, 0.2383, 0.8867, 0.7226, 0.4254, 0.8763, + 0.5133, 0.8457, 0.4420, 0.3749, 0.5921, 0.2344, 0.4320, + 0.7194, 0.0469, 0.9783, 0.0970, 0.8022, 0.9309, 0.8787, + 0.3357, 0.7904, 0.8963, 0.4849, 0.1787, 0.5132, 0.4628, + 0.5414, 0.9554, 0.3271, 0.3169, 0.2442, 0.2757, 0.5089, + 0.3495, 0.4214, 0.3725, 0.8627, 0.8227, 0.6433, 0.8876, + 0.3830, 0.5849, 0.0981, 0.0978, 0.2785, 0.4140, 0.2048, + 0.1636, 0.0621, 0.1099, 0.4695, 0.1663, 0.9375, 0.7340, + 0.9932, 0.1563, 0.6681, 0.4036, 0.6962, 0.7990, 0.9004, + 0.2559, 0.4308, 0.5817, 0.7744, 0.5854, 0.2835, 0.0025, + 0.6549, 0.6423, 0.7235, 0.2989, 0.5604, 0.4228, 0.9786, + 0.9508, 0.7948, 0.6501, 0.6846, 0.8831, 0.1362, 0.6745, + 0.3634, 0.1194, 0.7865, 0.3274, 0.6153, 0.1243, 0.8629, + 0.7042, 0.7027, 0.1577, 0.8610, 0.0174, 0.4922, 0.3920, + 0.9174, 0.0231, 0.0128, 0.8149, 0.0929, 0.1162, 0.7130, + 0.4659, 0.5103, 0.1249, 0.5040, 0.7310, 0.9342, 0.2365, + 0.3416, 0.1041, 0.7717, 0.6249, 0.9648, 0.2441, 0.8921, + 0.8343, 0.6811, 0.2402, 0.4086, 0.3764, 0.9013, 0.2993, + 0.8767, 0.3813, 0.1437, 0.1242, 0.1512, 0.2907, 0.4614, + 0.4486, 0.2404, 0.7355, 0.7961, 0.7130]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.8182, 0.2605, 0.1489, ..., 0.1484, 0.3699, 0.6778]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 6.781703472137451 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '353197', '-ss', '5000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.226954936981201} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([1961, 1566, 179, 628, 4168, 3230, 47, 1058, 848, + 1307, 863, 3163, 497, 3237, 3835, 4225, 1765, 2385, + 2578, 3624, 2513, 1168, 2630, 3631, 1864, 568, 4361, + 4779, 4022, 399, 4958, 2227, 3685, 929, 3248, 4399, + 3742, 2634, 1997, 92, 422, 3204, 3122, 339, 265, + 3708, 478, 2565, 4710, 4857, 937, 3612, 4449, 1275, + 3883, 720, 2924, 2672, 816, 3571, 2100, 2481, 4778, + 4274, 2449, 1483, 3559, 3509, 2069, 4491, 301, 3501, + 3355, 3144, 2461, 4209, 4595, 3120, 42, 339, 2378, + 677, 812, 4696, 2299, 2787, 3449, 4225, 795, 357, + 3876, 3155, 630, 1217, 467, 4920, 2116, 1865, 509, + 2607, 505, 639, 4966, 3789, 4209, 698, 4136, 4750, + 2065, 2749, 4384, 509, 3499, 4937, 4796, 3051, 552, + 774, 3789, 1722, 767, 2957, 1237, 2321, 4698, 2045, + 4243, 3205, 4990, 779, 4074, 4440, 1390, 3840, 4194, + 3980, 3010, 577, 724, 889, 4234, 2698, 2212, 2964, + 4694, 1090, 4209, 4557, 847, 1631, 4530, 2407, 4787, + 789, 927, 3820, 3586, 723, 3734, 3635, 4071, 1476, + 3647, 2541, 4116, 2412, 1162, 883, 651, 4351, 3454, + 4637, 602, 3838, 3759, 4938, 3880, 1311, 3214, 3977, + 4877, 2037, 1676, 3561, 2013, 1782, 2279, 1713, 2273, + 4556, 10, 2998, 564, 2394, 4714, 4432, 152, 1276, + 2893, 1660, 4751, 3614, 3802, 3684, 4922, 4957, 354, + 4042, 3162, 2717, 2866, 4789, 3665, 2555, 3305, 1695, + 647, 3279, 2845, 2963, 2699, 4805, 4132, 3345, 427, + 3911, 132, 4865, 27, 3182, 674, 856, 3414, 836, + 2173, 3550, 3891, 1058, 4695, 4487, 1810, 3555, 3979, + 4408, 2688, 366, 1825, 2362, 2165, 528]), + values=tensor([0.4900, 0.1519, 0.0910, 0.3336, 0.1203, 0.0899, 0.6181, + 0.4862, 0.1318, 0.9250, 0.1441, 0.0670, 0.4525, 0.3839, + 0.8394, 0.7346, 0.5373, 0.5064, 0.9776, 0.6275, 0.4349, + 0.6891, 0.1229, 0.7614, 0.8176, 0.5621, 0.6156, 0.1536, + 0.6722, 0.6064, 0.2625, 0.9808, 0.5748, 0.9150, 0.4568, + 0.6909, 0.1190, 0.8592, 0.4831, 0.2786, 0.9355, 0.9047, + 0.2710, 0.9935, 0.6258, 0.0847, 0.2480, 0.4761, 0.4988, + 0.5869, 0.3880, 0.6275, 0.2775, 0.2227, 0.6139, 0.7839, + 0.7203, 0.4507, 0.9394, 0.2396, 0.5645, 0.0507, 0.3048, + 0.2385, 0.6518, 0.7404, 0.0325, 0.8256, 0.0527, 0.3542, + 0.1592, 0.5500, 0.2905, 0.8845, 0.4741, 0.2973, 0.0174, + 0.5234, 0.2314, 0.9813, 0.0451, 0.4561, 0.7036, 0.8049, + 0.7589, 0.9746, 0.1814, 0.0845, 0.1329, 0.7672, 0.6622, + 0.7941, 0.1831, 0.9526, 0.7283, 0.6676, 0.5133, 0.1222, + 0.9044, 0.9700, 0.2020, 0.9254, 0.3948, 0.8395, 0.6783, + 0.0135, 0.0908, 0.7106, 0.9979, 0.7791, 0.6211, 0.9269, + 0.0715, 0.4671, 0.4465, 0.5092, 0.0890, 0.6377, 0.1978, + 0.5935, 0.9471, 0.6538, 0.5919, 0.8443, 0.4530, 0.0807, + 0.9258, 0.4523, 0.4554, 0.2932, 0.8921, 0.0589, 0.3042, + 0.4416, 0.9399, 0.0639, 0.1672, 0.2592, 0.9334, 0.7784, + 0.2523, 0.4009, 0.3271, 0.4901, 0.0985, 0.6126, 0.3137, + 0.5938, 0.4894, 0.3721, 0.8337, 0.3234, 0.9788, 0.2330, + 0.2625, 0.8031, 0.0536, 0.2237, 0.3051, 0.9123, 0.3222, + 0.8402, 0.3156, 0.2969, 0.2334, 0.9665, 0.7377, 0.6395, + 0.4451, 0.7617, 0.6622, 0.5325, 0.4459, 0.0092, 0.7370, + 0.4452, 0.8857, 0.5499, 0.2713, 0.3315, 0.9736, 0.3753, + 0.9983, 0.8451, 0.4842, 0.0958, 0.3583, 0.1831, 0.1567, + 0.8604, 0.6328, 0.2541, 0.3850, 0.8555, 0.4146, 0.1263, + 0.1834, 0.2208, 0.6295, 0.4250, 0.5900, 0.7980, 0.5475, + 0.9764, 0.2051, 0.6760, 0.3076, 0.0382, 0.6317, 0.2634, + 0.3634, 0.2930, 0.9653, 0.5672, 0.1508, 0.6672, 0.4422, + 0.7693, 0.8897, 0.4264, 0.4859, 0.4197, 0.0661, 0.6678, + 0.0402, 0.8927, 0.4292, 0.2572, 0.1798, 0.3259, 0.6416, + 0.0733, 0.9193, 0.7059, 0.2676, 0.4781, 0.7963, 0.9337, + 0.7706, 0.7962, 0.5827, 0.3612, 0.1219, 0.5026, 0.1788, + 0.6829, 0.9316, 0.0223, 0.3259, 0.0955]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.8734, 0.8080, 0.1055, ..., 0.8475, 0.7666, 0.2333]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.226954936981201 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([1961, 1566, 179, 628, 4168, 3230, 47, 1058, 848, + 1307, 863, 3163, 497, 3237, 3835, 4225, 1765, 2385, + 2578, 3624, 2513, 1168, 2630, 3631, 1864, 568, 4361, + 4779, 4022, 399, 4958, 2227, 3685, 929, 3248, 4399, + 3742, 2634, 1997, 92, 422, 3204, 3122, 339, 265, + 3708, 478, 2565, 4710, 4857, 937, 3612, 4449, 1275, + 3883, 720, 2924, 2672, 816, 3571, 2100, 2481, 4778, + 4274, 2449, 1483, 3559, 3509, 2069, 4491, 301, 3501, + 3355, 3144, 2461, 4209, 4595, 3120, 42, 339, 2378, + 677, 812, 4696, 2299, 2787, 3449, 4225, 795, 357, + 3876, 3155, 630, 1217, 467, 4920, 2116, 1865, 509, + 2607, 505, 639, 4966, 3789, 4209, 698, 4136, 4750, + 2065, 2749, 4384, 509, 3499, 4937, 4796, 3051, 552, + 774, 3789, 1722, 767, 2957, 1237, 2321, 4698, 2045, + 4243, 3205, 4990, 779, 4074, 4440, 1390, 3840, 4194, + 3980, 3010, 577, 724, 889, 4234, 2698, 2212, 2964, + 4694, 1090, 4209, 4557, 847, 1631, 4530, 2407, 4787, + 789, 927, 3820, 3586, 723, 3734, 3635, 4071, 1476, + 3647, 2541, 4116, 2412, 1162, 883, 651, 4351, 3454, + 4637, 602, 3838, 3759, 4938, 3880, 1311, 3214, 3977, + 4877, 2037, 1676, 3561, 2013, 1782, 2279, 1713, 2273, + 4556, 10, 2998, 564, 2394, 4714, 4432, 152, 1276, + 2893, 1660, 4751, 3614, 3802, 3684, 4922, 4957, 354, + 4042, 3162, 2717, 2866, 4789, 3665, 2555, 3305, 1695, + 647, 3279, 2845, 2963, 2699, 4805, 4132, 3345, 427, + 3911, 132, 4865, 27, 3182, 674, 856, 3414, 836, + 2173, 3550, 3891, 1058, 4695, 4487, 1810, 3555, 3979, + 4408, 2688, 366, 1825, 2362, 2165, 528]), + values=tensor([0.4900, 0.1519, 0.0910, 0.3336, 0.1203, 0.0899, 0.6181, + 0.4862, 0.1318, 0.9250, 0.1441, 0.0670, 0.4525, 0.3839, + 0.8394, 0.7346, 0.5373, 0.5064, 0.9776, 0.6275, 0.4349, + 0.6891, 0.1229, 0.7614, 0.8176, 0.5621, 0.6156, 0.1536, + 0.6722, 0.6064, 0.2625, 0.9808, 0.5748, 0.9150, 0.4568, + 0.6909, 0.1190, 0.8592, 0.4831, 0.2786, 0.9355, 0.9047, + 0.2710, 0.9935, 0.6258, 0.0847, 0.2480, 0.4761, 0.4988, + 0.5869, 0.3880, 0.6275, 0.2775, 0.2227, 0.6139, 0.7839, + 0.7203, 0.4507, 0.9394, 0.2396, 0.5645, 0.0507, 0.3048, + 0.2385, 0.6518, 0.7404, 0.0325, 0.8256, 0.0527, 0.3542, + 0.1592, 0.5500, 0.2905, 0.8845, 0.4741, 0.2973, 0.0174, + 0.5234, 0.2314, 0.9813, 0.0451, 0.4561, 0.7036, 0.8049, + 0.7589, 0.9746, 0.1814, 0.0845, 0.1329, 0.7672, 0.6622, + 0.7941, 0.1831, 0.9526, 0.7283, 0.6676, 0.5133, 0.1222, + 0.9044, 0.9700, 0.2020, 0.9254, 0.3948, 0.8395, 0.6783, + 0.0135, 0.0908, 0.7106, 0.9979, 0.7791, 0.6211, 0.9269, + 0.0715, 0.4671, 0.4465, 0.5092, 0.0890, 0.6377, 0.1978, + 0.5935, 0.9471, 0.6538, 0.5919, 0.8443, 0.4530, 0.0807, + 0.9258, 0.4523, 0.4554, 0.2932, 0.8921, 0.0589, 0.3042, + 0.4416, 0.9399, 0.0639, 0.1672, 0.2592, 0.9334, 0.7784, + 0.2523, 0.4009, 0.3271, 0.4901, 0.0985, 0.6126, 0.3137, + 0.5938, 0.4894, 0.3721, 0.8337, 0.3234, 0.9788, 0.2330, + 0.2625, 0.8031, 0.0536, 0.2237, 0.3051, 0.9123, 0.3222, + 0.8402, 0.3156, 0.2969, 0.2334, 0.9665, 0.7377, 0.6395, + 0.4451, 0.7617, 0.6622, 0.5325, 0.4459, 0.0092, 0.7370, + 0.4452, 0.8857, 0.5499, 0.2713, 0.3315, 0.9736, 0.3753, + 0.9983, 0.8451, 0.4842, 0.0958, 0.3583, 0.1831, 0.1567, + 0.8604, 0.6328, 0.2541, 0.3850, 0.8555, 0.4146, 0.1263, + 0.1834, 0.2208, 0.6295, 0.4250, 0.5900, 0.7980, 0.5475, + 0.9764, 0.2051, 0.6760, 0.3076, 0.0382, 0.6317, 0.2634, + 0.3634, 0.2930, 0.9653, 0.5672, 0.1508, 0.6672, 0.4422, + 0.7693, 0.8897, 0.4264, 0.4859, 0.4197, 0.0661, 0.6678, + 0.0402, 0.8927, 0.4292, 0.2572, 0.1798, 0.3259, 0.6416, + 0.0733, 0.9193, 0.7059, 0.2676, 0.4781, 0.7963, 0.9337, + 0.7706, 0.7962, 0.5827, 0.3612, 0.1219, 0.5026, 0.1788, + 0.6829, 0.9316, 0.0223, 0.3259, 0.0955]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.8734, 0.8080, 0.1055, ..., 0.8475, 0.7666, 0.2333]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.226954936981201 seconds + +[18.35, 17.76, 18.03, 17.72, 17.87, 18.0, 18.15, 17.7, 17.8, 17.85] +[79.36] +14.133229494094849 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 353197, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.226954936981201, 'TIME_S_1KI': 0.0289553844935863, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1121.6130926513672, 'W': 79.36} +[18.35, 17.76, 18.03, 17.72, 17.87, 18.0, 18.15, 17.7, 17.8, 17.85, 18.33, 17.94, 18.05, 17.86, 17.95, 18.18, 18.05, 17.92, 18.13, 17.67] +323.21 +16.1605 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 353197, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.226954936981201, 'TIME_S_1KI': 0.0289553844935863, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1121.6130926513672, 'W': 79.36, 'J_1KI': 3.1756019803434548, 'W_1KI': 0.22469047019085664, 'W_D': 63.1995, 'J_D': 893.2130374120474, 'W_D_1KI': 0.1789355515477198, 'J_D_1KI': 0.0005066168499384757} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_20_10_10_synthetic_30000_0.0001.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_20_10_10_synthetic_30000_0.0001.json deleted file mode 100644 index 1a2b755..0000000 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_20_10_10_synthetic_30000_0.0001.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 234425, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [30000, 30000], "MATRIX_ROWS": 30000, "MATRIX_SIZE": 900000000, "MATRIX_NNZ": 90000, "MATRIX_DENSITY": 0.0001, "TIME_S": 21.312235116958618, "TIME_S_1KI": 0.09091280843322434, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2054.109435153008, "W": 83.45000000000002, "J_1KI": 8.762330959381499, "W_1KI": 0.3559773914898156, "W_D": 67.21450000000002, "J_D": 1654.4749985511307, "W_D_1KI": 0.2867206995840888, "J_D_1KI": 0.0012230807276702091} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_20_10_10_synthetic_30000_0.0001.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_20_10_10_synthetic_30000_0.0001.output deleted file mode 100644 index 06a3a12..0000000 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_20_10_10_synthetic_30000_0.0001.output +++ /dev/null @@ -1,81 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '30000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [30000, 30000], "MATRIX_ROWS": 30000, "MATRIX_SIZE": 900000000, "MATRIX_NNZ": 90000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.10643196105957031} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 9, ..., 89992, 89998, 90000]), - col_indices=tensor([ 7924, 12206, 12582, ..., 21107, 10373, 19571]), - values=tensor([0.8274, 0.6462, 0.9289, ..., 0.2542, 0.4328, 0.6143]), - size=(30000, 30000), nnz=90000, layout=torch.sparse_csr) -tensor([0.4141, 0.4229, 0.5665, ..., 0.1440, 0.7095, 0.1472]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([30000, 30000]) -Rows: 30000 -Size: 900000000 -NNZ: 90000 -Density: 0.0001 -Time: 0.10643196105957031 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '197309', '-ss', '30000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [30000, 30000], "MATRIX_ROWS": 30000, "MATRIX_SIZE": 900000000, "MATRIX_NNZ": 90000, "MATRIX_DENSITY": 0.0001, "TIME_S": 17.675063133239746} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 9, ..., 89992, 89999, 90000]), - col_indices=tensor([ 929, 2315, 11088, ..., 21381, 23338, 19838]), - values=tensor([0.3872, 0.2873, 0.0227, ..., 0.4746, 0.4839, 0.3522]), - size=(30000, 30000), nnz=90000, layout=torch.sparse_csr) -tensor([0.1013, 0.5431, 0.3309, ..., 0.2751, 0.1147, 0.0007]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([30000, 30000]) -Rows: 30000 -Size: 900000000 -NNZ: 90000 -Density: 0.0001 -Time: 17.675063133239746 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '234425', '-ss', '30000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [30000, 30000], "MATRIX_ROWS": 30000, "MATRIX_SIZE": 900000000, "MATRIX_NNZ": 90000, "MATRIX_DENSITY": 0.0001, "TIME_S": 21.312235116958618} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 10, ..., 89994, 89997, 90000]), - col_indices=tensor([ 6200, 14122, 21980, ..., 11781, 19689, 21155]), - values=tensor([0.5859, 0.7824, 0.3581, ..., 0.7747, 0.1479, 0.5181]), - size=(30000, 30000), nnz=90000, layout=torch.sparse_csr) -tensor([0.3266, 0.0767, 0.6789, ..., 0.9087, 0.9799, 0.8849]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([30000, 30000]) -Rows: 30000 -Size: 900000000 -NNZ: 90000 -Density: 0.0001 -Time: 21.312235116958618 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 10, ..., 89994, 89997, 90000]), - col_indices=tensor([ 6200, 14122, 21980, ..., 11781, 19689, 21155]), - values=tensor([0.5859, 0.7824, 0.3581, ..., 0.7747, 0.1479, 0.5181]), - size=(30000, 30000), nnz=90000, layout=torch.sparse_csr) -tensor([0.3266, 0.0767, 0.6789, ..., 0.9087, 0.9799, 0.8849]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([30000, 30000]) -Rows: 30000 -Size: 900000000 -NNZ: 90000 -Density: 0.0001 -Time: 21.312235116958618 seconds - -[18.37, 18.49, 18.06, 17.95, 17.95, 17.71, 18.07, 17.8, 17.94, 17.81] -[83.45] -24.61485242843628 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 234425, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [30000, 30000], 'MATRIX_ROWS': 30000, 'MATRIX_SIZE': 900000000, 'MATRIX_NNZ': 90000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 21.312235116958618, 'TIME_S_1KI': 0.09091280843322434, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2054.109435153008, 'W': 83.45000000000002} -[18.37, 18.49, 18.06, 17.95, 17.95, 17.71, 18.07, 17.8, 17.94, 17.81, 18.28, 18.06, 18.16, 17.86, 18.06, 17.97, 18.34, 17.89, 18.12, 18.1] -324.71 -16.2355 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 234425, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [30000, 30000], 'MATRIX_ROWS': 30000, 'MATRIX_SIZE': 900000000, 'MATRIX_NNZ': 90000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 21.312235116958618, 'TIME_S_1KI': 0.09091280843322434, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2054.109435153008, 'W': 83.45000000000002, 'J_1KI': 8.762330959381499, 'W_1KI': 0.3559773914898156, 'W_D': 67.21450000000002, 'J_D': 1654.4749985511307, 'W_D_1KI': 0.2867206995840888, 'J_D_1KI': 0.0012230807276702091} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_20_10_10_synthetic_30000_0.001.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_20_10_10_synthetic_30000_0.001.json deleted file mode 100644 index e69de29..0000000 diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_20_10_10_synthetic_30000_0.001.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_20_10_10_synthetic_30000_0.001.output deleted file mode 100644 index 355226c..0000000 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_20_10_10_synthetic_30000_0.001.output +++ /dev/null @@ -1,77 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '30000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [30000, 30000], "MATRIX_ROWS": 30000, "MATRIX_SIZE": 900000000, "MATRIX_NNZ": 900000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.2140212059020996} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 34, 62, ..., 899934, 899967, - 900000]), - col_indices=tensor([ 1559, 1711, 3295, ..., 29804, 29893, 29964]), - values=tensor([0.7225, 0.7366, 0.0675, ..., 0.3495, 0.2204, 0.5611]), - size=(30000, 30000), nnz=900000, layout=torch.sparse_csr) -tensor([0.1783, 0.4759, 0.5239, ..., 0.8363, 0.1566, 0.5506]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([30000, 30000]) -Rows: 30000 -Size: 900000000 -NNZ: 900000 -Density: 0.001 -Time: 0.2140212059020996 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '98121', '-ss', '30000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [30000, 30000], "MATRIX_ROWS": 30000, "MATRIX_SIZE": 900000000, "MATRIX_NNZ": 900000, "MATRIX_DENSITY": 0.001, "TIME_S": 19.3143093585968} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 29, 64, ..., 899940, 899966, - 900000]), - col_indices=tensor([ 612, 701, 1017, ..., 29770, 29777, 29834]), - values=tensor([0.4034, 0.5977, 0.8788, ..., 0.6466, 0.3405, 0.9207]), - size=(30000, 30000), nnz=900000, layout=torch.sparse_csr) -tensor([0.7678, 0.0123, 0.5496, ..., 0.4589, 0.2646, 0.8857]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([30000, 30000]) -Rows: 30000 -Size: 900000000 -NNZ: 900000 -Density: 0.001 -Time: 19.3143093585968 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '106684', '-ss', '30000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [30000, 30000], "MATRIX_ROWS": 30000, "MATRIX_SIZE": 900000000, "MATRIX_NNZ": 900000, "MATRIX_DENSITY": 0.001, "TIME_S": 20.90600872039795} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 21, 51, ..., 899936, 899963, - 900000]), - col_indices=tensor([ 855, 2329, 2453, ..., 28070, 28293, 29379]), - values=tensor([0.2478, 0.6443, 0.3087, ..., 0.2033, 0.4619, 0.6203]), - size=(30000, 30000), nnz=900000, layout=torch.sparse_csr) -tensor([0.3161, 0.0015, 0.4480, ..., 0.6517, 0.7843, 0.6370]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([30000, 30000]) -Rows: 30000 -Size: 900000000 -NNZ: 900000 -Density: 0.001 -Time: 20.90600872039795 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 21, 51, ..., 899936, 899963, - 900000]), - col_indices=tensor([ 855, 2329, 2453, ..., 28070, 28293, 29379]), - values=tensor([0.2478, 0.6443, 0.3087, ..., 0.2033, 0.4619, 0.6203]), - size=(30000, 30000), nnz=900000, layout=torch.sparse_csr) -tensor([0.3161, 0.0015, 0.4480, ..., 0.6517, 0.7843, 0.6370]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([30000, 30000]) -Rows: 30000 -Size: 900000000 -NNZ: 900000 -Density: 0.001 -Time: 20.90600872039795 seconds - diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_20_10_10_synthetic_30000_1e-05.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_20_10_10_synthetic_30000_1e-05.json deleted file mode 100644 index 21aeca6..0000000 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_20_10_10_synthetic_30000_1e-05.json +++ /dev/null @@ -1 +0,0 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 303288, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [30000, 30000], "MATRIX_ROWS": 30000, "MATRIX_SIZE": 900000000, "MATRIX_NNZ": 9000, "MATRIX_DENSITY": 1e-05, "TIME_S": 21.079484939575195, "TIME_S_1KI": 0.06950319478375404, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1922.1091361045835, "W": 78.6, "J_1KI": 6.337570679039671, "W_1KI": 0.2591596106670887, "W_D": 62.18274999999999, "J_D": 1520.636537953019, "W_D_1KI": 0.20502871857772148, "J_D_1KI": 0.0006760198839971298} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_20_10_10_synthetic_30000_1e-05.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_20_10_10_synthetic_30000_1e-05.output deleted file mode 100644 index d8910fb..0000000 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_20_10_10_synthetic_30000_1e-05.output +++ /dev/null @@ -1,81 +0,0 @@ -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '30000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [30000, 30000], "MATRIX_ROWS": 30000, "MATRIX_SIZE": 900000000, "MATRIX_NNZ": 9000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.08539462089538574} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 9000, 9000, 9000]), - col_indices=tensor([ 8168, 26166, 15021, ..., 3965, 14348, 3180]), - values=tensor([0.0414, 0.9204, 0.6909, ..., 0.5705, 0.2524, 0.4947]), - size=(30000, 30000), nnz=9000, layout=torch.sparse_csr) -tensor([0.9721, 0.7014, 0.8881, ..., 0.4193, 0.5170, 0.9013]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([30000, 30000]) -Rows: 30000 -Size: 900000000 -NNZ: 9000 -Density: 1e-05 -Time: 0.08539462089538574 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '245917', '-ss', '30000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [30000, 30000], "MATRIX_ROWS": 30000, "MATRIX_SIZE": 900000000, "MATRIX_NNZ": 9000, "MATRIX_DENSITY": 1e-05, "TIME_S": 17.02755308151245} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 9000, 9000, 9000]), - col_indices=tensor([ 9352, 11930, 17471, ..., 19597, 20552, 1111]), - values=tensor([0.4298, 0.4908, 0.5157, ..., 0.6454, 0.4570, 0.2738]), - size=(30000, 30000), nnz=9000, layout=torch.sparse_csr) -tensor([0.3622, 0.2189, 0.3857, ..., 0.2935, 0.6447, 0.7890]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([30000, 30000]) -Rows: 30000 -Size: 900000000 -NNZ: 9000 -Density: 1e-05 -Time: 17.02755308151245 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '303288', '-ss', '30000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [30000, 30000], "MATRIX_ROWS": 30000, "MATRIX_SIZE": 900000000, "MATRIX_NNZ": 9000, "MATRIX_DENSITY": 1e-05, "TIME_S": 21.079484939575195} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 8999, 9000, 9000]), - col_indices=tensor([20060, 11216, 16521, ..., 22127, 15786, 9820]), - values=tensor([0.3604, 0.7216, 0.9721, ..., 0.8443, 0.2707, 0.5761]), - size=(30000, 30000), nnz=9000, layout=torch.sparse_csr) -tensor([0.4467, 0.9917, 0.2567, ..., 0.4911, 0.0150, 0.9779]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([30000, 30000]) -Rows: 30000 -Size: 900000000 -NNZ: 9000 -Density: 1e-05 -Time: 21.079484939575195 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 8999, 9000, 9000]), - col_indices=tensor([20060, 11216, 16521, ..., 22127, 15786, 9820]), - values=tensor([0.3604, 0.7216, 0.9721, ..., 0.8443, 0.2707, 0.5761]), - size=(30000, 30000), nnz=9000, layout=torch.sparse_csr) -tensor([0.4467, 0.9917, 0.2567, ..., 0.4911, 0.0150, 0.9779]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([30000, 30000]) -Rows: 30000 -Size: 900000000 -NNZ: 9000 -Density: 1e-05 -Time: 21.079484939575195 seconds - -[18.69, 17.92, 17.95, 17.83, 18.17, 17.93, 17.96, 17.93, 18.28, 17.85] -[78.6] -24.454314708709717 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 303288, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [30000, 30000], 'MATRIX_ROWS': 30000, 'MATRIX_SIZE': 900000000, 'MATRIX_NNZ': 9000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 21.079484939575195, 'TIME_S_1KI': 0.06950319478375404, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1922.1091361045835, 'W': 78.6} -[18.69, 17.92, 17.95, 17.83, 18.17, 17.93, 17.96, 17.93, 18.28, 17.85, 18.57, 17.96, 18.0, 18.17, 18.14, 18.05, 18.17, 20.58, 18.66, 18.18] -328.345 -16.417250000000003 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 303288, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [30000, 30000], 'MATRIX_ROWS': 30000, 'MATRIX_SIZE': 900000000, 'MATRIX_NNZ': 9000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 21.079484939575195, 'TIME_S_1KI': 0.06950319478375404, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1922.1091361045835, 'W': 78.6, 'J_1KI': 6.337570679039671, 'W_1KI': 0.2591596106670887, 'W_D': 62.18274999999999, 'J_D': 1520.636537953019, 'W_D_1KI': 0.20502871857772148, 'J_D_1KI': 0.0006760198839971298} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.0001.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.0001.json index 27d9946..59c1962 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.0001.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.0001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 24.691206455230713, "TIME_S_1KI": 24.691206455230713, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 647.4072245025635, "W": 22.90104020202067, "J_1KI": 647.4072245025635, "W_1KI": 22.90104020202067, "W_D": 3.140040202020675, "J_D": 88.76822598814977, "W_D_1KI": 3.140040202020675, "J_D_1KI": 3.140040202020675} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 23.50609064102173, "TIME_S_1KI": 23.50609064102173, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 609.4492170715331, "W": 22.389903061636694, "J_1KI": 609.4492170715331, "W_1KI": 22.389903061636694, "W_D": 3.917903061636693, "J_D": 106.64463114929183, "W_D_1KI": 3.9179030616366926, "J_D_1KI": 3.9179030616366926} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.0001.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.0001.output index d5446f7..81f61e7 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.0001.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.0001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 100000 -sd 0.0001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 24.691206455230713} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 23.50609064102173} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 11, 21, ..., 999980, +tensor(crow_indices=tensor([ 0, 6, 16, ..., 999976, 999990, 1000000]), - col_indices=tensor([ 5106, 13656, 15471, ..., 68202, 79637, 95576]), - values=tensor([0.9862, 0.5796, 0.7870, ..., 0.3201, 0.7080, 0.2748]), + col_indices=tensor([35450, 44241, 45004, ..., 57756, 61659, 92730]), + values=tensor([0.6041, 0.1643, 0.4254, ..., 0.8911, 0.3600, 0.5834]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.6105, 0.8083, 0.7150, ..., 0.7011, 0.0810, 0.6416]) +tensor([0.3461, 0.8147, 0.1835, ..., 0.7972, 0.9198, 0.6224]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,16 +16,16 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 24.691206455230713 seconds +Time: 23.50609064102173 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 11, 21, ..., 999980, +tensor(crow_indices=tensor([ 0, 6, 16, ..., 999976, 999990, 1000000]), - col_indices=tensor([ 5106, 13656, 15471, ..., 68202, 79637, 95576]), - values=tensor([0.9862, 0.5796, 0.7870, ..., 0.3201, 0.7080, 0.2748]), + col_indices=tensor([35450, 44241, 45004, ..., 57756, 61659, 92730]), + values=tensor([0.6041, 0.1643, 0.4254, ..., 0.8911, 0.3600, 0.5834]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.6105, 0.8083, 0.7150, ..., 0.7011, 0.0810, 0.6416]) +tensor([0.3461, 0.8147, 0.1835, ..., 0.7972, 0.9198, 0.6224]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -33,13 +33,13 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 24.691206455230713 seconds +Time: 23.50609064102173 seconds -[20.64, 20.52, 20.48, 20.68, 20.48, 20.48, 20.52, 20.48, 20.28, 20.36] -[20.64, 20.84, 21.24, 24.56, 26.2, 27.16, 28.16, 26.08, 25.68, 24.72, 24.72, 24.48, 24.6, 24.6, 24.72, 24.68, 24.6, 24.52, 24.52, 24.8, 24.72, 24.6, 24.48, 24.48, 24.52, 24.44, 24.64] -28.269773721694946 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 24.691206455230713, 'TIME_S_1KI': 24.691206455230713, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 647.4072245025635, 'W': 22.90104020202067} -[20.64, 20.52, 20.48, 20.68, 20.48, 20.48, 20.52, 20.48, 20.28, 20.36, 20.6, 20.56, 20.64, 22.68, 24.64, 25.4, 25.4, 25.36, 24.48, 22.68] -395.2199999999999 -19.760999999999996 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 24.691206455230713, 'TIME_S_1KI': 24.691206455230713, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 647.4072245025635, 'W': 22.90104020202067, 'J_1KI': 647.4072245025635, 'W_1KI': 22.90104020202067, 'W_D': 3.140040202020675, 'J_D': 88.76822598814977, 'W_D_1KI': 3.140040202020675, 'J_D_1KI': 3.140040202020675} +[20.48, 20.4, 20.48, 20.48, 20.28, 20.4, 20.28, 20.28, 20.4, 20.44] +[20.28, 20.36, 20.36, 21.44, 23.4, 25.2, 26.04, 26.28, 25.2, 24.36, 24.32, 24.36, 24.52, 24.6, 24.68, 24.76, 24.68, 24.48, 24.52, 24.52, 24.48, 24.76, 24.72, 24.72, 24.64, 24.88] +27.219823837280273 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 23.50609064102173, 'TIME_S_1KI': 23.50609064102173, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 609.4492170715331, 'W': 22.389903061636694} +[20.48, 20.4, 20.48, 20.48, 20.28, 20.4, 20.28, 20.28, 20.4, 20.44, 20.6, 20.64, 20.72, 21.0, 20.92, 20.76, 20.68, 20.44, 20.28, 20.48] +369.44000000000005 +18.472 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 23.50609064102173, 'TIME_S_1KI': 23.50609064102173, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 609.4492170715331, 'W': 22.389903061636694, 'J_1KI': 609.4492170715331, 'W_1KI': 22.389903061636694, 'W_D': 3.917903061636693, 'J_D': 106.64463114929183, 'W_D_1KI': 3.9179030616366926, 'J_D_1KI': 3.9179030616366926} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.001.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.001.json new file mode 100644 index 0000000..05631c7 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 227.44817399978638, "TIME_S_1KI": 227.44817399978638, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 5651.356887254718, "W": 23.406634424755232, "J_1KI": 5651.356887254718, "W_1KI": 23.406634424755232, "W_D": 5.256634424755234, "J_D": 1269.1750817751913, "W_D_1KI": 5.256634424755234, "J_D_1KI": 5.256634424755234} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.001.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.001.output new file mode 100644 index 0000000..4847746 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.001.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 100000 -sd 0.001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 227.44817399978638} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 105, 212, ..., 9999786, + 9999896, 10000000]), + col_indices=tensor([ 310, 1031, 2044, ..., 96924, 97369, 99264]), + values=tensor([0.4389, 0.5701, 0.8338, ..., 0.1266, 0.7107, 0.7989]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.1044, 0.8564, 0.8953, ..., 0.3136, 0.0570, 0.9535]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 227.44817399978638 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 105, 212, ..., 9999786, + 9999896, 10000000]), + col_indices=tensor([ 310, 1031, 2044, ..., 96924, 97369, 99264]), + values=tensor([0.4389, 0.5701, 0.8338, ..., 0.1266, 0.7107, 0.7989]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.1044, 0.8564, 0.8953, ..., 0.3136, 0.0570, 0.9535]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 227.44817399978638 seconds + +[20.56, 20.48, 20.48, 20.36, 20.4, 20.32, 20.32, 19.96, 20.0, 20.04] +[20.12, 20.12, 20.4, 21.88, 23.4, 25.08, 27.48, 28.24, 27.96, 27.08, 26.24, 25.68, 24.72, 24.48, 24.4, 24.28, 24.36, 24.4, 24.48, 24.68, 24.72, 24.72, 24.8, 24.88, 24.76, 24.68, 24.64, 24.48, 24.44, 24.28, 24.44, 24.64, 24.76, 24.8, 24.84, 24.72, 24.44, 24.36, 24.32, 24.48, 24.48, 24.6, 24.76, 24.76, 24.68, 24.64, 24.72, 24.64, 24.4, 24.4, 24.68, 24.52, 24.6, 24.56, 24.48, 24.28, 24.32, 24.28, 24.32, 24.52, 24.52, 24.64, 24.76, 24.76, 24.68, 24.68, 24.6, 24.56, 24.72, 24.52, 24.76, 24.76, 24.68, 24.56, 24.48, 24.24, 24.4, 24.6, 24.76, 24.76, 24.8, 24.64, 24.64, 24.56, 24.76, 24.72, 24.8, 24.8, 24.8, 24.8, 24.6, 24.56, 24.44, 24.68, 24.72, 24.72, 24.68, 24.72, 24.68, 24.88, 24.92, 24.84, 24.8, 24.8, 25.0, 25.08, 25.0, 24.92, 24.8, 24.8, 24.6, 24.72, 24.84, 25.0, 25.0, 24.88, 24.96, 24.92, 24.96, 24.92, 24.92, 24.64, 24.56, 24.44, 24.44, 24.36, 24.6, 24.44, 24.52, 24.88, 25.12, 25.12, 25.2, 25.32, 24.96, 24.96, 24.8, 24.56, 24.64, 24.52, 24.44, 24.48, 24.4, 24.28, 24.56, 24.52, 24.48, 24.6, 24.52, 24.6, 24.64, 24.88, 24.8, 24.8, 24.76, 24.76, 24.76, 24.4, 24.28, 24.28, 24.28, 24.32, 24.8, 25.04, 24.92, 24.8, 24.8, 24.56, 24.52, 24.52, 24.48, 24.64, 24.52, 24.64, 24.68, 24.68, 24.72, 24.56, 24.56, 24.96, 24.96, 24.88, 24.72, 24.4, 24.32, 24.36, 24.4, 24.64, 24.92, 24.8, 24.76, 24.72, 24.64, 24.52, 24.8, 24.72, 24.76, 24.76, 24.72, 25.04, 24.96, 24.64, 24.32, 24.16, 24.12, 24.36, 24.44, 24.32, 24.16, 24.04, 24.32, 24.68, 24.56, 24.68, 24.72, 24.28, 24.4, 24.36, 24.36, 24.36, 24.44, 24.44, 24.16, 24.32, 24.44, 24.36, 24.6, 24.68, 24.8, 24.76] +241.44252371788025 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 227.44817399978638, 'TIME_S_1KI': 227.44817399978638, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 5651.356887254718, 'W': 23.406634424755232} +[20.56, 20.48, 20.48, 20.36, 20.4, 20.32, 20.32, 19.96, 20.0, 20.04, 20.08, 20.32, 20.08, 20.0, 19.84, 19.84, 19.92, 20.08, 20.08, 20.36] +363.0 +18.15 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 227.44817399978638, 'TIME_S_1KI': 227.44817399978638, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 5651.356887254718, 'W': 23.406634424755232, 'J_1KI': 5651.356887254718, 'W_1KI': 23.406634424755232, 'W_D': 5.256634424755234, 'J_D': 1269.1750817751913, 'W_D_1KI': 5.256634424755234, 'J_D_1KI': 5.256634424755234} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_1e-05.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_1e-05.json index 75a4f33..4c6f614 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_1e-05.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_1e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 3170, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.487157583236694, "TIME_S_1KI": 3.3082516035446985, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 325.83638416290285, "W": 22.27932516765204, "J_1KI": 102.78750289050564, "W_1KI": 7.028178286325565, "W_D": 3.710325167652041, "J_D": 54.263714344978354, "W_D_1KI": 1.170449579700959, "J_D_1KI": 0.36922699675109116} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 3195, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.793879747390747, "TIME_S_1KI": 3.3783661181191698, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 338.5230771541595, "W": 23.150052849773353, "J_1KI": 105.95401475873538, "W_1KI": 7.245712942026088, "W_D": 4.780052849773355, "J_D": 69.89868274450302, "W_D_1KI": 1.4961041783328186, "J_D_1KI": 0.46826421857052225} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_1e-05.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_1e-05.output index 2086467..a9ff479 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_1e-05.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_1e-05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 100000 -sd 1e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 3.3119447231292725} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 3.2854795455932617} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 99999, 99999, +tensor(crow_indices=tensor([ 0, 1, 3, ..., 100000, 100000, 100000]), - col_indices=tensor([34080, 20424, 38945, ..., 64155, 47978, 44736]), - values=tensor([0.5824, 0.7466, 0.8758, ..., 0.8278, 0.8938, 0.7712]), + col_indices=tensor([96494, 10713, 51050, ..., 77096, 58241, 39394]), + values=tensor([0.9472, 0.0468, 0.6571, ..., 0.2815, 0.5696, 0.0055]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.9015, 0.6308, 0.7799, ..., 0.6045, 0.4908, 0.8218]) +tensor([0.9254, 0.6847, 0.8457, ..., 0.6275, 0.7476, 0.1010]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 3.3119447231292725 seconds +Time: 3.2854795455932617 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 3170 -ss 100000 -sd 1e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.487157583236694} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 3195 -ss 100000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.793879747390747} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 99997, 99999, +tensor(crow_indices=tensor([ 0, 0, 1, ..., 99997, 100000, 100000]), - col_indices=tensor([62540, 50524, 43651, ..., 12394, 59846, 74659]), - values=tensor([0.6601, 0.8101, 0.4564, ..., 0.4320, 0.9061, 0.8749]), + col_indices=tensor([41532, 61839, 3968, ..., 19432, 54156, 77664]), + values=tensor([0.2018, 0.3494, 0.0819, ..., 0.4942, 0.5843, 0.6732]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.2783, 0.8812, 0.6091, ..., 0.5557, 0.0745, 0.6879]) +tensor([0.4618, 0.8259, 0.6206, ..., 0.7480, 0.2960, 0.5870]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,16 +36,16 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 10.487157583236694 seconds +Time: 10.793879747390747 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 99997, 99999, +tensor(crow_indices=tensor([ 0, 0, 1, ..., 99997, 100000, 100000]), - col_indices=tensor([62540, 50524, 43651, ..., 12394, 59846, 74659]), - values=tensor([0.6601, 0.8101, 0.4564, ..., 0.4320, 0.9061, 0.8749]), + col_indices=tensor([41532, 61839, 3968, ..., 19432, 54156, 77664]), + values=tensor([0.2018, 0.3494, 0.0819, ..., 0.4942, 0.5843, 0.6732]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.2783, 0.8812, 0.6091, ..., 0.5557, 0.0745, 0.6879]) +tensor([0.4618, 0.8259, 0.6206, ..., 0.7480, 0.2960, 0.5870]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -53,13 +53,13 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 10.487157583236694 seconds +Time: 10.793879747390747 seconds -[20.76, 20.76, 20.76, 20.8, 20.84, 20.72, 20.6, 20.36, 20.36, 20.32] -[20.32, 20.36, 20.48, 22.0, 23.24, 25.44, 26.04, 26.48, 26.08, 24.6, 24.44, 24.44, 24.4, 24.6] -14.625056266784668 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 3170, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.487157583236694, 'TIME_S_1KI': 3.3082516035446985, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 325.83638416290285, 'W': 22.27932516765204} -[20.76, 20.76, 20.76, 20.8, 20.84, 20.72, 20.6, 20.36, 20.36, 20.32, 20.04, 20.16, 20.24, 20.6, 20.72, 20.72, 20.88, 20.72, 21.08, 21.0] -371.38 -18.569 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 3170, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.487157583236694, 'TIME_S_1KI': 3.3082516035446985, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 325.83638416290285, 'W': 22.27932516765204, 'J_1KI': 102.78750289050564, 'W_1KI': 7.028178286325565, 'W_D': 3.710325167652041, 'J_D': 54.263714344978354, 'W_D_1KI': 1.170449579700959, 'J_D_1KI': 0.36922699675109116} +[20.88, 20.84, 20.52, 20.4, 20.32, 20.2, 20.2, 20.44, 20.48, 20.8] +[20.92, 20.8, 21.0, 22.96, 24.6, 25.56, 26.6, 27.0, 26.4, 25.96, 26.12, 26.12, 25.88, 25.88] +14.62299370765686 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 3195, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.793879747390747, 'TIME_S_1KI': 3.3783661181191698, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 338.5230771541595, 'W': 23.150052849773353} +[20.88, 20.84, 20.52, 20.4, 20.32, 20.2, 20.2, 20.44, 20.48, 20.8, 20.4, 20.4, 20.2, 20.36, 20.52, 20.36, 20.4, 20.36, 20.2, 20.32] +367.4 +18.369999999999997 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 3195, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.793879747390747, 'TIME_S_1KI': 3.3783661181191698, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 338.5230771541595, 'W': 23.150052849773353, 'J_1KI': 105.95401475873538, 'W_1KI': 7.245712942026088, 'W_D': 4.780052849773355, 'J_D': 69.89868274450302, 'W_D_1KI': 1.4961041783328186, 'J_D_1KI': 0.46826421857052225} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.0001.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.0001.json index 25fcf70..8de09b8 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.0001.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.0001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 32170, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.42804479598999, "TIME_S_1KI": 0.32415432999658034, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 291.8528582954407, "W": 21.456480473872652, "J_1KI": 9.07220572879828, "W_1KI": 0.666971727506144, "W_D": 3.1474804738726547, "J_D": 42.81229504752161, "W_D_1KI": 0.09783899514680307, "J_D_1KI": 0.0030413116303016183} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 32341, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.187894582748413, "TIME_S_1KI": 0.3150148289399961, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 322.80105960845947, "W": 22.074332714462717, "J_1KI": 9.981171256561623, "W_1KI": 0.6825494794367124, "W_D": 3.644332714462717, "J_D": 53.29241327524185, "W_D_1KI": 0.11268460203650836, "J_D_1KI": 0.0034842646187968327} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.0001.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.0001.output index 034e4a3..a571cff 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.0001.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.0001.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 10000 -sd 0.0001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.3263826370239258} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.3246574401855469} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 10000, 10000, 10000]), - col_indices=tensor([1982, 558, 3662, ..., 629, 5634, 6549]), - values=tensor([0.5250, 0.9307, 0.0448, ..., 0.0150, 0.4421, 0.4831]), +tensor(crow_indices=tensor([ 0, 1, 4, ..., 9996, 9998, 10000]), + col_indices=tensor([ 702, 590, 2393, ..., 5106, 4251, 5881]), + values=tensor([0.8131, 0.4443, 0.5032, ..., 0.0454, 0.7892, 0.7021]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.5546, 0.0630, 0.8785, ..., 0.4779, 0.8090, 0.6189]) +tensor([0.5617, 0.3540, 0.6665, ..., 0.2887, 0.4752, 0.2274]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -15,18 +15,18 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 0.3263826370239258 seconds +Time: 0.3246574401855469 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 32170 -ss 10000 -sd 0.0001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.42804479598999} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 32341 -ss 10000 -sd 0.0001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.187894582748413} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 2, ..., 9996, 9999, 10000]), - col_indices=tensor([8155, 9480, 4094, ..., 6796, 6921, 3902]), - values=tensor([0.0915, 0.3699, 0.5728, ..., 0.9057, 0.8661, 0.7356]), +tensor(crow_indices=tensor([ 0, 1, 2, ..., 9996, 9998, 10000]), + col_indices=tensor([5513, 4819, 4488, ..., 7223, 1569, 1749]), + values=tensor([0.7502, 0.9864, 0.0219, ..., 0.7577, 0.3030, 0.6500]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.3327, 0.6532, 0.3155, ..., 0.1421, 0.0155, 0.6755]) +tensor([0.4735, 0.0629, 0.6403, ..., 0.2218, 0.6036, 0.6062]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -34,15 +34,15 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 10.42804479598999 seconds +Time: 10.187894582748413 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 2, ..., 9996, 9999, 10000]), - col_indices=tensor([8155, 9480, 4094, ..., 6796, 6921, 3902]), - values=tensor([0.0915, 0.3699, 0.5728, ..., 0.9057, 0.8661, 0.7356]), +tensor(crow_indices=tensor([ 0, 1, 2, ..., 9996, 9998, 10000]), + col_indices=tensor([5513, 4819, 4488, ..., 7223, 1569, 1749]), + values=tensor([0.7502, 0.9864, 0.0219, ..., 0.7577, 0.3030, 0.6500]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.3327, 0.6532, 0.3155, ..., 0.1421, 0.0155, 0.6755]) +tensor([0.4735, 0.0629, 0.6403, ..., 0.2218, 0.6036, 0.6062]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -50,13 +50,13 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 10.42804479598999 seconds +Time: 10.187894582748413 seconds -[20.24, 20.16, 19.96, 20.2, 20.32, 20.28, 20.4, 20.32, 20.32, 20.36] -[20.36, 20.24, 20.48, 22.52, 23.04, 24.76, 25.6, 25.52, 24.28, 23.12, 23.12, 23.16, 23.44] -13.602084398269653 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 32170, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.42804479598999, 'TIME_S_1KI': 0.32415432999658034, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 291.8528582954407, 'W': 21.456480473872652} -[20.24, 20.16, 19.96, 20.2, 20.32, 20.28, 20.4, 20.32, 20.32, 20.36, 20.52, 20.48, 20.64, 20.48, 20.48, 20.32, 20.48, 20.36, 20.32, 20.2] -366.17999999999995 -18.308999999999997 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 32170, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.42804479598999, 'TIME_S_1KI': 0.32415432999658034, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 291.8528582954407, 'W': 21.456480473872652, 'J_1KI': 9.07220572879828, 'W_1KI': 0.666971727506144, 'W_D': 3.1474804738726547, 'J_D': 42.81229504752161, 'W_D_1KI': 0.09783899514680307, 'J_D_1KI': 0.0030413116303016183} +[20.16, 20.36, 20.44, 20.64, 20.64, 20.64, 20.68, 20.0, 19.96, 20.04] +[20.04, 20.36, 20.64, 22.2, 24.32, 25.36, 25.96, 26.0, 25.44, 24.12, 24.0, 23.8, 23.8, 23.84] +14.623366594314575 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 32341, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.187894582748413, 'TIME_S_1KI': 0.3150148289399961, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 322.80105960845947, 'W': 22.074332714462717} +[20.16, 20.36, 20.44, 20.64, 20.64, 20.64, 20.68, 20.0, 19.96, 20.04, 19.96, 20.28, 20.36, 20.44, 20.48, 20.48, 20.72, 20.76, 20.96, 21.36] +368.6 +18.43 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 32341, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.187894582748413, 'TIME_S_1KI': 0.3150148289399961, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 322.80105960845947, 'W': 22.074332714462717, 'J_1KI': 9.981171256561623, 'W_1KI': 0.6825494794367124, 'W_D': 3.644332714462717, 'J_D': 53.29241327524185, 'W_D_1KI': 0.11268460203650836, 'J_D_1KI': 0.0034842646187968327} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.001.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.001.json index 6de6107..f0f5b59 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.001.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 4747, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.586360931396484, "TIME_S_1KI": 2.2301160588574858, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 326.0044006347656, "W": 22.3162197944069, "J_1KI": 68.67587963656321, "W_1KI": 4.701120664505352, "W_D": 3.9862197944068996, "J_D": 58.23231742858882, "W_D_1KI": 0.8397345258914893, "J_D_1KI": 0.17689794099251935} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 4681, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.577015399932861, "TIME_S_1KI": 2.2595632129743346, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 334.1403603744507, "W": 22.94669291080223, "J_1KI": 71.38226028080554, "W_1KI": 4.902092055287809, "W_D": 4.40869291080223, "J_D": 64.19758366584783, "W_D_1KI": 0.9418271546255567, "J_D_1KI": 0.20120212660234066} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.001.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.001.output index 0876b19..7485b00 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.001.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 10000 -sd 0.001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.2116076946258545} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.242969274520874} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 19, ..., 99984, 99990, +tensor(crow_indices=tensor([ 0, 10, 21, ..., 99980, 99990, 100000]), - col_indices=tensor([ 365, 990, 1421, ..., 6204, 7506, 8345]), - values=tensor([0.4012, 0.2163, 0.0214, ..., 0.4427, 0.7190, 0.8381]), + col_indices=tensor([ 158, 243, 1021, ..., 9060, 9386, 9562]), + values=tensor([0.4026, 0.0672, 0.1618, ..., 0.9478, 0.4676, 0.6061]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.6373, 0.6560, 0.2779, ..., 0.6662, 0.5919, 0.8676]) +tensor([0.1276, 0.9367, 0.3121, ..., 0.3681, 0.2222, 0.5819]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 2.2116076946258545 seconds +Time: 2.242969274520874 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 4747 -ss 10000 -sd 0.001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.586360931396484} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 4681 -ss 10000 -sd 0.001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.577015399932861} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 12, 23, ..., 99976, 99989, +tensor(crow_indices=tensor([ 0, 13, 23, ..., 99978, 99989, 100000]), - col_indices=tensor([ 145, 447, 695, ..., 7955, 8009, 9128]), - values=tensor([0.3182, 0.0478, 0.7097, ..., 0.3986, 0.2793, 0.7202]), + col_indices=tensor([1463, 2229, 2458, ..., 6913, 8671, 9837]), + values=tensor([0.1583, 0.2191, 0.0082, ..., 0.3537, 0.5043, 0.1355]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.3264, 0.5290, 0.7390, ..., 0.4961, 0.6761, 0.4965]) +tensor([0.7982, 0.2389, 0.8535, ..., 0.4532, 0.2540, 0.6422]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,16 +36,16 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 10.586360931396484 seconds +Time: 10.577015399932861 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 12, 23, ..., 99976, 99989, +tensor(crow_indices=tensor([ 0, 13, 23, ..., 99978, 99989, 100000]), - col_indices=tensor([ 145, 447, 695, ..., 7955, 8009, 9128]), - values=tensor([0.3182, 0.0478, 0.7097, ..., 0.3986, 0.2793, 0.7202]), + col_indices=tensor([1463, 2229, 2458, ..., 6913, 8671, 9837]), + values=tensor([0.1583, 0.2191, 0.0082, ..., 0.3537, 0.5043, 0.1355]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.3264, 0.5290, 0.7390, ..., 0.4961, 0.6761, 0.4965]) +tensor([0.7982, 0.2389, 0.8535, ..., 0.4532, 0.2540, 0.6422]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,13 +53,13 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 10.586360931396484 seconds +Time: 10.577015399932861 seconds -[20.56, 20.48, 20.36, 20.48, 20.4, 20.2, 20.32, 20.48, 20.52, 20.6] -[20.44, 20.44, 20.44, 21.8, 24.32, 26.12, 27.12, 27.16, 25.36, 24.28, 24.24, 24.12, 23.96, 23.84] -14.608406066894531 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 4747, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.586360931396484, 'TIME_S_1KI': 2.2301160588574858, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 326.0044006347656, 'W': 22.3162197944069} -[20.56, 20.48, 20.36, 20.48, 20.4, 20.2, 20.32, 20.48, 20.52, 20.6, 20.28, 20.08, 20.4, 20.32, 20.2, 20.36, 20.36, 20.4, 20.28, 20.48] -366.6 -18.330000000000002 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 4747, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.586360931396484, 'TIME_S_1KI': 2.2301160588574858, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 326.0044006347656, 'W': 22.3162197944069, 'J_1KI': 68.67587963656321, 'W_1KI': 4.701120664505352, 'W_D': 3.9862197944068996, 'J_D': 58.23231742858882, 'W_D_1KI': 0.8397345258914893, 'J_D_1KI': 0.17689794099251935} +[20.84, 20.96, 20.84, 20.76, 20.88, 20.8, 20.72, 20.8, 20.8, 20.72] +[20.72, 21.0, 21.04, 25.64, 26.56, 27.96, 28.44, 25.92, 25.0, 24.0, 23.88, 24.12, 24.36, 24.36] +14.561591148376465 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 4681, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.577015399932861, 'TIME_S_1KI': 2.2595632129743346, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 334.1403603744507, 'W': 22.94669291080223} +[20.84, 20.96, 20.84, 20.76, 20.88, 20.8, 20.72, 20.8, 20.8, 20.72, 20.52, 20.24, 20.24, 20.2, 20.44, 20.16, 20.56, 20.64, 20.52, 20.32] +370.76 +18.538 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 4681, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.577015399932861, 'TIME_S_1KI': 2.2595632129743346, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 334.1403603744507, 'W': 22.94669291080223, 'J_1KI': 71.38226028080554, 'W_1KI': 4.902092055287809, 'W_D': 4.40869291080223, 'J_D': 64.19758366584783, 'W_D_1KI': 0.9418271546255567, 'J_D_1KI': 0.20120212660234066} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.01.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.01.json index 9af62e4..7587c79 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.01.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.01.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 21.214847326278687, "TIME_S_1KI": 21.214847326278687, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 593.507265138626, "W": 22.813207511083125, "J_1KI": 593.507265138626, "W_1KI": 22.813207511083125, "W_D": 4.622207511083129, "J_D": 120.25111933398253, "W_D_1KI": 4.622207511083129, "J_D_1KI": 4.622207511083129} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 21.648348808288574, "TIME_S_1KI": 21.648348808288574, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 597.1462204551697, "W": 23.932854010192194, "J_1KI": 597.1462204551697, "W_1KI": 23.932854010192194, "W_D": 5.369854010192196, "J_D": 133.98268443942072, "W_D_1KI": 5.369854010192196, "J_D_1KI": 5.369854010192196} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.01.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.01.output index a8afaf9..ed3ac8e 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.01.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.01.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 10000 -sd 0.01 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 21.214847326278687} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 21.648348808288574} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 111, 190, ..., 999805, - 999902, 1000000]), - col_indices=tensor([ 3, 255, 407, ..., 9480, 9499, 9966]), - values=tensor([0.6179, 0.1045, 0.6429, ..., 0.5216, 0.7550, 0.7148]), +tensor(crow_indices=tensor([ 0, 93, 181, ..., 999807, + 999904, 1000000]), + col_indices=tensor([ 20, 39, 173, ..., 9424, 9617, 9690]), + values=tensor([0.7771, 0.0078, 0.5851, ..., 0.0250, 0.0076, 0.8688]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.6572, 0.8503, 0.2699, ..., 0.6176, 0.8577, 0.2518]) +tensor([0.6163, 0.0977, 0.8617, ..., 0.7477, 0.6432, 0.7227]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,16 +16,16 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 21.214847326278687 seconds +Time: 21.648348808288574 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 111, 190, ..., 999805, - 999902, 1000000]), - col_indices=tensor([ 3, 255, 407, ..., 9480, 9499, 9966]), - values=tensor([0.6179, 0.1045, 0.6429, ..., 0.5216, 0.7550, 0.7148]), +tensor(crow_indices=tensor([ 0, 93, 181, ..., 999807, + 999904, 1000000]), + col_indices=tensor([ 20, 39, 173, ..., 9424, 9617, 9690]), + values=tensor([0.7771, 0.0078, 0.5851, ..., 0.0250, 0.0076, 0.8688]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.6572, 0.8503, 0.2699, ..., 0.6176, 0.8577, 0.2518]) +tensor([0.6163, 0.0977, 0.8617, ..., 0.7477, 0.6432, 0.7227]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -33,13 +33,13 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 21.214847326278687 seconds +Time: 21.648348808288574 seconds -[20.56, 20.64, 20.64, 20.52, 20.36, 20.4, 19.92, 19.96, 20.0, 20.08] -[20.04, 20.08, 23.16, 25.4, 27.72, 28.8, 28.8, 29.48, 25.64, 25.4, 24.0, 23.96, 23.64, 23.72, 23.92, 24.04, 24.32, 24.36, 24.04, 24.0, 23.84, 24.08, 24.28, 24.28, 24.28] -26.015949964523315 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 21.214847326278687, 'TIME_S_1KI': 21.214847326278687, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 593.507265138626, 'W': 22.813207511083125} -[20.56, 20.64, 20.64, 20.52, 20.36, 20.4, 19.92, 19.96, 20.0, 20.08, 19.76, 19.76, 19.96, 20.28, 20.4, 20.36, 20.4, 20.0, 19.92, 20.2] -363.81999999999994 -18.190999999999995 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 21.214847326278687, 'TIME_S_1KI': 21.214847326278687, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 593.507265138626, 'W': 22.813207511083125, 'J_1KI': 593.507265138626, 'W_1KI': 22.813207511083125, 'W_D': 4.622207511083129, 'J_D': 120.25111933398253, 'W_D_1KI': 4.622207511083129, 'J_D_1KI': 4.622207511083129} +[20.48, 20.52, 20.6, 20.6, 20.6, 20.72, 20.48, 20.56, 20.64, 20.72] +[20.72, 21.08, 23.92, 25.96, 27.92, 28.72, 29.32, 26.52, 26.52, 25.36, 24.24, 24.4, 24.24, 24.44, 24.16, 24.12, 24.16, 24.12, 24.16, 24.2, 24.32, 24.36, 24.68, 24.72] +24.95089888572693 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 21.648348808288574, 'TIME_S_1KI': 21.648348808288574, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 597.1462204551697, 'W': 23.932854010192194} +[20.48, 20.52, 20.6, 20.6, 20.6, 20.72, 20.48, 20.56, 20.64, 20.72, 20.68, 20.72, 20.52, 20.52, 20.64, 20.76, 20.64, 20.8, 20.68, 20.64] +371.26 +18.563 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 21.648348808288574, 'TIME_S_1KI': 21.648348808288574, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 597.1462204551697, 'W': 23.932854010192194, 'J_1KI': 597.1462204551697, 'W_1KI': 23.932854010192194, 'W_D': 5.369854010192196, 'J_D': 133.98268443942072, 'W_D_1KI': 5.369854010192196, 'J_D_1KI': 5.369854010192196} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.05.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.05.json index 4f5ecb3..e29f054 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.05.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 106.68757820129395, "TIME_S_1KI": 106.68757820129395, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2614.243714923859, "W": 23.06903562044379, "J_1KI": 2614.243714923859, "W_1KI": 23.06903562044379, "W_D": 4.456035620443789, "J_D": 504.9696617529395, "W_D_1KI": 4.456035620443789, "J_D_1KI": 4.456035620443789} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 106.87029075622559, "TIME_S_1KI": 106.87029075622559, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2633.659623832703, "W": 23.232350154828893, "J_1KI": 2633.659623832703, "W_1KI": 23.232350154828893, "W_D": 4.519350154828892, "J_D": 512.3213944957257, "W_D_1KI": 4.519350154828892, "J_D_1KI": 4.519350154828892} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.05.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.05.output index 4e8e799..5edda98 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.05.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 10000 -sd 0.05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 106.68757820129395} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 106.87029075622559} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 529, 1008, ..., 4999026, - 4999478, 5000000]), - col_indices=tensor([ 75, 122, 128, ..., 9908, 9909, 9916]), - values=tensor([0.8571, 0.2596, 0.0411, ..., 0.7048, 0.9398, 0.3732]), +tensor(crow_indices=tensor([ 0, 497, 970, ..., 4998958, + 4999495, 5000000]), + col_indices=tensor([ 3, 19, 30, ..., 9933, 9939, 9986]), + values=tensor([0.6521, 0.8632, 0.3100, ..., 0.6388, 0.4505, 0.0265]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.2354, 0.1436, 0.6485, ..., 0.5167, 0.9065, 0.2719]) +tensor([0.1776, 0.4739, 0.9893, ..., 0.4929, 0.9525, 0.7109]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,16 +16,16 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 106.68757820129395 seconds +Time: 106.87029075622559 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 529, 1008, ..., 4999026, - 4999478, 5000000]), - col_indices=tensor([ 75, 122, 128, ..., 9908, 9909, 9916]), - values=tensor([0.8571, 0.2596, 0.0411, ..., 0.7048, 0.9398, 0.3732]), +tensor(crow_indices=tensor([ 0, 497, 970, ..., 4998958, + 4999495, 5000000]), + col_indices=tensor([ 3, 19, 30, ..., 9933, 9939, 9986]), + values=tensor([0.6521, 0.8632, 0.3100, ..., 0.6388, 0.4505, 0.0265]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.2354, 0.1436, 0.6485, ..., 0.5167, 0.9065, 0.2719]) +tensor([0.1776, 0.4739, 0.9893, ..., 0.4929, 0.9525, 0.7109]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -33,13 +33,13 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 106.68757820129395 seconds +Time: 106.87029075622559 seconds -[20.52, 20.6, 20.8, 20.84, 20.8, 20.6, 20.68, 20.68, 20.4, 20.56] -[20.56, 20.64, 20.92, 22.0, 23.8, 25.56, 26.6, 26.56, 26.4, 25.36, 24.52, 24.6, 24.56, 24.6, 24.32, 24.4, 24.4, 24.32, 24.24, 24.44, 24.6, 24.36, 24.32, 24.36, 24.48, 24.6, 24.76, 24.72, 24.64, 24.6, 24.48, 24.56, 24.64, 24.64, 24.44, 24.48, 24.36, 24.16, 24.16, 24.24, 24.28, 24.16, 24.2, 24.36, 24.44, 24.44, 24.32, 24.0, 24.0, 24.0, 24.28, 24.44, 24.56, 24.48, 24.48, 24.32, 24.52, 24.52, 24.36, 24.4, 24.4, 24.32, 24.36, 24.32, 24.68, 24.72, 24.6, 24.6, 24.64, 24.6, 24.72, 24.64, 24.64, 24.68, 24.68, 24.52, 24.4, 24.32, 24.2, 24.16, 24.24, 24.2, 24.2, 24.4, 24.52, 24.56, 24.8, 24.8, 24.56, 24.44, 24.4, 23.84, 23.76, 23.88, 24.0, 24.0, 24.16, 24.2, 24.36, 24.2, 24.16, 24.2, 24.24, 24.16, 24.16, 24.4, 24.32, 24.56] -113.32262682914734 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 106.68757820129395, 'TIME_S_1KI': 106.68757820129395, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2614.243714923859, 'W': 23.06903562044379} -[20.52, 20.6, 20.8, 20.84, 20.8, 20.6, 20.68, 20.68, 20.4, 20.56, 20.4, 20.52, 20.72, 20.84, 20.96, 20.84, 20.8, 20.52, 20.64, 20.56] -372.26 -18.613 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 106.68757820129395, 'TIME_S_1KI': 106.68757820129395, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2614.243714923859, 'W': 23.06903562044379, 'J_1KI': 2614.243714923859, 'W_1KI': 23.06903562044379, 'W_D': 4.456035620443789, 'J_D': 504.9696617529395, 'W_D_1KI': 4.456035620443789, 'J_D_1KI': 4.456035620443789} +[20.76, 20.72, 20.52, 20.48, 20.68, 20.6, 20.6, 20.56, 20.68, 20.6] +[20.64, 20.72, 20.72, 24.72, 25.96, 28.32, 29.48, 30.12, 26.68, 25.6, 25.08, 24.6, 24.6, 24.6, 24.8, 24.8, 24.88, 24.92, 24.84, 24.8, 24.72, 24.52, 24.52, 24.52, 24.6, 24.56, 24.4, 24.48, 24.32, 24.16, 24.28, 24.36, 24.48, 24.64, 24.68, 24.64, 24.4, 24.68, 24.72, 24.72, 24.56, 24.64, 24.48, 24.32, 24.12, 24.12, 24.2, 24.52, 24.4, 24.56, 24.68, 24.48, 24.28, 24.24, 24.2, 24.04, 23.92, 24.04, 24.28, 24.12, 24.28, 24.36, 24.28, 24.44, 24.52, 24.6, 24.72, 24.72, 24.88, 24.84, 24.72, 24.44, 24.16, 24.2, 24.0, 24.2, 24.44, 24.32, 24.2, 24.2, 24.16, 24.12, 24.24, 24.2, 24.12, 24.16, 24.2, 24.16, 24.4, 24.4, 24.36, 24.2, 24.28, 24.52, 24.12, 24.36, 24.64, 24.6, 24.6, 24.52, 24.48, 24.2, 24.4, 24.4, 24.4, 24.52, 24.4, 24.16] +113.36173939704895 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 106.87029075622559, 'TIME_S_1KI': 106.87029075622559, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2633.659623832703, 'W': 23.232350154828893} +[20.76, 20.72, 20.52, 20.48, 20.68, 20.6, 20.6, 20.56, 20.68, 20.6, 20.52, 20.76, 20.76, 21.2, 21.2, 21.28, 21.12, 21.0, 20.88, 20.56] +374.26 +18.713 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 106.87029075622559, 'TIME_S_1KI': 106.87029075622559, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2633.659623832703, 'W': 23.232350154828893, 'J_1KI': 2633.659623832703, 'W_1KI': 23.232350154828893, 'W_D': 4.519350154828892, 'J_D': 512.3213944957257, 'W_D_1KI': 4.519350154828892, 'J_D_1KI': 4.519350154828892} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.1.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.1.json new file mode 100644 index 0000000..8cd094e --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 210.98000812530518, "TIME_S_1KI": 210.98000812530518, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 5224.394376831054, "W": 23.586192508859664, "J_1KI": 5224.394376831054, "W_1KI": 23.586192508859664, "W_D": 5.122192508859662, "J_D": 1134.5770933685287, "W_D_1KI": 5.122192508859662, "J_D_1KI": 5.122192508859662} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.1.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.1.output new file mode 100644 index 0000000..d1e3dfe --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.1.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 10000 -sd 0.1 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 210.98000812530518} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 993, 1975, ..., 9997956, + 9998997, 10000000]), + col_indices=tensor([ 19, 22, 26, ..., 9979, 9989, 9990]), + values=tensor([0.9746, 0.4059, 0.0503, ..., 0.3598, 0.3506, 0.0768]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.8784, 0.5931, 0.4456, ..., 0.6081, 0.2914, 0.4121]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 210.98000812530518 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 993, 1975, ..., 9997956, + 9998997, 10000000]), + col_indices=tensor([ 19, 22, 26, ..., 9979, 9989, 9990]), + values=tensor([0.9746, 0.4059, 0.0503, ..., 0.3598, 0.3506, 0.0768]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.8784, 0.5931, 0.4456, ..., 0.6081, 0.2914, 0.4121]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 210.98000812530518 seconds + +[20.72, 20.64, 20.6, 20.6, 20.48, 20.44, 20.4, 20.36, 20.28, 20.28] +[20.2, 20.36, 21.16, 23.28, 25.44, 27.76, 29.48, 29.48, 28.08, 28.08, 26.32, 25.08, 24.24, 24.16, 24.44, 24.76, 24.64, 24.68, 24.72, 24.64, 24.56, 24.64, 24.68, 24.64, 24.76, 24.76, 24.64, 24.56, 24.52, 24.48, 24.6, 24.6, 24.64, 24.96, 24.88, 25.04, 24.84, 24.68, 24.6, 24.48, 24.64, 24.52, 24.56, 24.4, 24.6, 24.6, 24.68, 24.84, 25.0, 24.68, 24.68, 24.68, 24.68, 24.68, 24.92, 24.68, 24.96, 25.12, 24.88, 24.8, 24.92, 24.72, 24.6, 24.64, 24.64, 24.96, 25.12, 25.0, 24.92, 24.88, 24.6, 24.48, 24.32, 24.48, 24.52, 24.52, 24.6, 24.76, 24.84, 24.76, 25.0, 24.72, 24.6, 24.92, 24.88, 24.88, 24.84, 24.92, 25.12, 25.2, 25.2, 25.12, 24.96, 24.52, 24.52, 24.32, 24.4, 24.4, 24.48, 24.36, 24.32, 24.28, 24.2, 24.16, 24.0, 24.08, 24.32, 24.36, 24.88, 25.12, 25.12, 25.08, 24.76, 25.0, 25.2, 24.84, 25.24, 25.16, 24.96, 24.96, 25.08, 25.24, 25.04, 25.12, 25.24, 25.16, 25.12, 25.24, 25.44, 25.64, 25.68, 25.44, 26.16, 26.24, 26.0, 26.24, 26.4, 25.56, 25.68, 25.56, 25.4, 25.4, 25.32, 25.24, 25.4, 25.6, 25.36, 25.16, 24.84, 24.52, 24.4, 24.24, 24.44, 24.48, 24.4, 24.56, 24.36, 24.24, 24.24, 24.44, 24.52, 24.68, 24.72, 24.72, 24.88, 24.76, 24.64, 24.36, 24.68, 24.84, 24.6, 24.84, 24.56, 24.28, 24.36, 24.52, 24.32, 24.4, 24.36, 24.4, 24.44, 24.44, 24.72, 24.64, 24.76, 24.76, 24.64, 24.52, 24.76, 24.68, 24.56, 24.72, 24.36, 24.44, 24.48, 24.88, 24.88, 25.0, 25.0, 24.68, 24.4, 24.44, 24.52, 24.36, 24.6, 24.52, 24.56, 24.56, 24.56, 24.64, 24.32] +221.50223588943481 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 210.98000812530518, 'TIME_S_1KI': 210.98000812530518, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 5224.394376831054, 'W': 23.586192508859664} +[20.72, 20.64, 20.6, 20.6, 20.48, 20.44, 20.4, 20.36, 20.28, 20.28, 20.24, 20.44, 20.68, 20.92, 21.04, 20.8, 20.44, 20.28, 20.2, 20.12] +369.28000000000003 +18.464000000000002 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 210.98000812530518, 'TIME_S_1KI': 210.98000812530518, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 5224.394376831054, 'W': 23.586192508859664, 'J_1KI': 5224.394376831054, 'W_1KI': 23.586192508859664, 'W_D': 5.122192508859662, 'J_D': 1134.5770933685287, 'W_D_1KI': 5.122192508859662, 'J_D_1KI': 5.122192508859662} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_1e-05.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_1e-05.json index 69bee1d..e2472aa 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_1e-05.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_1e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 145400, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.376285076141357, "TIME_S_1KI": 0.07136372129395707, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 324.9616888427734, "W": 22.159348523127505, "J_1KI": 2.2349497169379187, "W_1KI": 0.15240267209853856, "W_D": 3.711348523127505, "J_D": 54.42606233215331, "W_D_1KI": 0.02552509300637899, "J_D_1KI": 0.00017555084598610036} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 142368, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.641618490219116, "TIME_S_1KI": 0.07474726406368788, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 348.11982940673823, "W": 23.739150118754246, "J_1KI": 2.445211209026876, "W_1KI": 0.16674498566218704, "W_D": 4.927150118754245, "J_D": 72.25358322525018, "W_D_1KI": 0.03460855050821986, "J_D_1KI": 0.00024309220125463487} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_1e-05.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_1e-05.output index d251499..4bc129d 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_1e-05.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_1e-05.output @@ -1,373 +1,266 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 10000 -sd 1e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.08243966102600098} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.0838630199432373} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([ 252, 7839, 5648, 3465, 7349, 4902, 9434, 7529, 7692, - 165, 3611, 104, 550, 6486, 7084, 9069, 7958, 6919, - 690, 9278, 3067, 6601, 7528, 1640, 3373, 4102, 2924, - 2640, 1739, 407, 8622, 7009, 7252, 6788, 1851, 3757, - 6304, 8203, 5332, 7635, 594, 3806, 4878, 4044, 1441, - 999, 1148, 5958, 9975, 4945, 2434, 1204, 59, 181, - 7425, 800, 8678, 5796, 5760, 120, 6846, 442, 3920, - 1463, 5374, 6614, 1071, 5654, 6755, 4329, 2096, 3557, - 3459, 2406, 5557, 9403, 8210, 6660, 740, 4513, 3423, - 2395, 8647, 3341, 136, 1978, 4301, 975, 3977, 9483, - 1644, 1238, 3590, 4407, 378, 953, 4885, 3832, 7590, - 727, 9280, 2092, 6016, 2681, 4198, 2877, 6915, 4242, - 6915, 8581, 5016, 2122, 9650, 9146, 4295, 9411, 1035, - 3607, 4089, 1201, 5045, 5545, 7311, 3130, 7563, 2568, - 6650, 8830, 9967, 763, 8604, 7974, 6093, 2055, 9735, - 2084, 6764, 9924, 9982, 8233, 9788, 2760, 7451, 647, - 9876, 3730, 1454, 7105, 9740, 3, 6735, 3817, 6148, - 2672, 8936, 3502, 36, 122, 8671, 6286, 16, 4468, - 7863, 6117, 5323, 3322, 1830, 4682, 2100, 8360, 6810, - 1598, 8824, 932, 5248, 3917, 7002, 3906, 3017, 2692, - 1181, 3736, 4511, 4850, 7042, 514, 3936, 2631, 7634, - 8605, 7530, 2136, 1830, 5351, 6593, 8222, 4992, 702, - 8215, 7622, 3843, 1766, 8771, 4771, 6546, 8907, 5810, - 4223, 4783, 1749, 808, 748, 8530, 510, 4005, 9341, - 9392, 5211, 8047, 1297, 1483, 2102, 9250, 9844, 5843, - 7781, 5823, 5125, 7934, 6365, 4344, 2486, 5379, 5512, - 1500, 5968, 9635, 2436, 343, 691, 5998, 6974, 5014, - 8797, 2209, 662, 5712, 468, 4740, 3465, 7884, 1157, - 5482, 4513, 3540, 1871, 3557, 4818, 294, 9373, 9392, - 6804, 446, 4018, 9572, 2746, 8821, 3101, 5524, 4011, - 6392, 4612, 6933, 5523, 6755, 5391, 9534, 6269, 2247, - 26, 3621, 8701, 6455, 4517, 2157, 6077, 8811, 8299, - 1793, 7712, 8935, 4953, 4593, 2233, 1763, 2638, 6120, - 3919, 6685, 8170, 8912, 8472, 2086, 4535, 5619, 680, - 4842, 7676, 6910, 6323, 7473, 2912, 6983, 1773, 3647, - 1488, 4405, 9243, 4961, 5147, 4030, 6029, 7737, 9786, - 8179, 920, 4796, 5800, 5579, 8198, 6536, 9822, 7168, - 4169, 9541, 5653, 1326, 4959, 4059, 87, 6667, 6591, - 1202, 4268, 583, 2119, 8178, 8386, 2346, 9152, 480, - 8393, 1347, 969, 5740, 199, 2706, 9179, 9925, 1569, - 3911, 617, 7052, 7551, 8066, 3096, 422, 6701, 9877, - 1422, 9102, 806, 5587, 9274, 2597, 1463, 9111, 4690, - 6223, 1110, 1661, 1854, 9509, 4300, 4820, 680, 8309, - 83, 406, 145, 2909, 4808, 5417, 5546, 8440, 6714, - 8204, 2476, 3404, 3107, 4637, 7287, 2700, 6764, 9687, - 3766, 7158, 1579, 1642, 2888, 9988, 8451, 7261, 7908, - 9760, 6495, 868, 8055, 2229, 6718, 6854, 7101, 3460, - 1480, 5908, 1710, 1311, 7394, 1535, 5767, 9382, 6035, - 6352, 9940, 3068, 1099, 9199, 844, 2345, 7539, 5311, - 1344, 795, 1299, 1427, 2359, 6210, 1940, 4014, 9343, - 6807, 601, 9305, 6485, 4671, 479, 9989, 6498, 8791, - 4029, 1185, 2150, 1611, 9247, 1386, 6282, 1198, 8207, - 168, 9966, 8935, 7136, 2956, 7945, 3135, 1338, 8120, - 8911, 7324, 7616, 9525, 1089, 2535, 5885, 6794, 4177, - 1549, 6210, 3390, 6804, 2877, 4943, 9928, 8223, 8906, - 8888, 3459, 625, 8152, 4970, 6566, 1431, 3558, 5909, - 4644, 5732, 6646, 2764, 9269, 7042, 4735, 8837, 8508, - 4960, 8021, 8758, 717, 7061, 7250, 2575, 3253, 7578, - 8526, 5442, 8779, 1392, 7075, 7474, 5206, 1365, 4114, - 6910, 8849, 4615, 1920, 8635, 4916, 8961, 314, 6483, - 8460, 8661, 3346, 5713, 2155, 4770, 8480, 6079, 1859, - 4905, 7013, 9809, 7525, 6366, 5580, 800, 3941, 6983, - 5992, 823, 5419, 6585, 5265, 7523, 1529, 1063, 1845, - 508, 440, 3534, 6337, 4197, 3477, 4822, 3503, 5247, - 8192, 1821, 6846, 6103, 7202, 2324, 6837, 3842, 2645, - 5069, 6889, 9598, 2706, 2071, 6669, 5766, 9229, 442, - 2610, 8285, 6236, 5573, 3986, 1231, 4409, 7210, 1785, - 8842, 2784, 8116, 2335, 2665, 4250, 4511, 4655, 5687, - 2034, 7694, 1632, 1903, 2798, 9449, 32, 4346, 3152, - 401, 3235, 6393, 9087, 3028, 428, 8951, 2121, 1980, - 6229, 2463, 8133, 2257, 9417, 6518, 5725, 8271, 9273, - 5432, 3877, 5458, 5090, 6954, 5520, 8694, 6001, 9990, - 3826, 5607, 8624, 5287, 4743, 4098, 5108, 7594, 8810, - 7198, 7629, 7936, 58, 7202, 1568, 4531, 5153, 7169, - 2986, 9248, 8747, 5959, 7564, 3379, 2192, 8481, 333, - 8571, 8198, 2115, 2561, 795, 8427, 7486, 6933, 7082, - 2006, 9398, 6619, 3063, 4813, 8722, 4295, 3600, 7348, - 9286, 2123, 6647, 6207, 8996, 801, 2030, 925, 5992, - 9553, 3423, 9026, 8, 7802, 2458, 9300, 6765, 5796, - 8636, 5723, 7062, 8809, 1599, 598, 8436, 5782, 965, - 461, 5948, 2047, 2459, 5968, 3285, 1989, 9399, 2683, - 9902, 9444, 7797, 9034, 8461, 8558, 7258, 6644, 1630, - 246, 6871, 5933, 5087, 8762, 4315, 5997, 4386, 2172, - 4536, 9311, 9832, 7624, 2977, 3946, 6388, 790, 6935, - 9174, 9391, 4050, 2698, 7708, 7922, 275, 6423, 3266, - 5292, 1182, 8926, 8600, 9081, 5546, 4427, 1370, 6470, - 4267, 4145, 5893, 7704, 6234, 4555, 3422, 8549, 5286, - 7269, 594, 9099, 6281, 2857, 9220, 3671, 9897, 2948, - 4497, 3803, 6350, 7674, 1594, 7639, 2889, 6927, 5836, - 6736, 7455, 916, 7481, 4994, 6720, 6379, 3857, 4222, - 519, 4570, 3697, 4849, 1787, 5768, 9060, 916, 12, - 3578, 2283, 6099, 2525, 10, 5457, 5555, 4678, 2627, - 6632, 2690, 5114, 6385, 338, 4606, 6990, 9683, 4269, - 7335, 5251, 8029, 5687, 4726, 7669, 6989, 2867, 890, - 5989, 7548, 1788, 2014, 8701, 659, 4759, 5429, 6064, - 9106, 1014, 2757, 9766, 8207, 1927, 2784, 6272, 6801, - 3107, 4185, 7396, 6416, 2895, 6501, 1862, 6835, 3353, - 7285, 8443, 5937, 3968, 9162, 903, 8575, 1535, 5003, - 7432, 4678, 2158, 3254, 8555, 4467, 3658, 4909, 8302, - 967, 6993, 2364, 1980, 7994, 2960, 1862, 5699, 9380, - 7802, 836, 6809, 8323, 7040, 1082, 6752, 8264, 6441, - 4214, 7290, 3066, 3737, 1923, 7871, 5838, 2703, 1797, - 8902, 2887, 1148, 4435, 6935, 9604, 7491, 7918, 1891, - 5769, 5780, 9339, 4089, 8820, 8455, 2176, 5092, 7496, - 9986, 7792, 7515, 9400, 3576, 1656, 1034, 4984, 4318, - 516, 1104, 6615, 3729, 9239, 4176, 5109, 4523, 5423, - 4310, 8446, 8954, 6305, 3579, 5848, 5971, 1235, 7513, - 6017, 3772, 197, 9255, 5929, 5514, 5490, 1717, 1881, - 4208, 9867, 7526, 7915, 6579, 5417, 7580, 5540, 7660, - 7530, 7142, 638, 4444, 7450, 5581, 2950, 9703, 1035, - 1395, 2126, 2348, 2840, 2560, 3902, 1786, 6271, 3582, - 5179]), - values=tensor([1.1398e-01, 4.5701e-01, 9.1677e-01, 7.6676e-01, - 4.5397e-01, 7.8607e-01, 6.7699e-01, 4.4824e-01, - 1.1089e-01, 2.2162e-04, 1.7425e-01, 3.1837e-01, - 8.5822e-03, 4.7768e-01, 1.5387e-01, 3.0223e-01, - 4.0371e-01, 4.4123e-01, 7.7609e-01, 6.4492e-01, - 8.2429e-01, 8.3313e-01, 9.8148e-01, 9.1538e-01, - 4.9730e-01, 8.5059e-01, 6.1877e-01, 5.1939e-01, - 5.5798e-01, 5.0091e-01, 4.1061e-01, 9.2937e-01, - 9.5609e-01, 8.7103e-02, 5.0227e-01, 1.0999e-01, - 6.5964e-02, 4.7419e-01, 2.0885e-01, 9.7682e-01, - 3.1682e-01, 5.3352e-01, 1.7838e-01, 3.6944e-01, - 7.7856e-01, 8.1945e-02, 3.2643e-01, 7.1965e-01, - 9.2732e-01, 6.5053e-01, 3.1863e-01, 8.4939e-02, - 3.8327e-02, 9.1600e-01, 4.6334e-01, 2.7798e-03, - 5.5788e-01, 4.2202e-01, 9.7366e-01, 7.3529e-01, - 6.0986e-01, 9.9120e-01, 5.4564e-01, 4.6692e-01, - 7.5075e-01, 9.9939e-01, 1.7701e-01, 1.7010e-01, - 7.2917e-01, 1.0548e-01, 2.3979e-01, 4.0362e-01, - 2.2728e-02, 7.9852e-01, 5.9097e-01, 8.4211e-01, - 2.7317e-01, 7.5793e-01, 4.4763e-01, 8.9137e-01, - 2.1648e-01, 5.3602e-01, 2.5258e-01, 2.7488e-01, - 5.8449e-01, 2.3385e-01, 1.6282e-01, 1.2994e-02, - 6.0808e-01, 7.2198e-01, 4.5666e-02, 4.8221e-01, - 5.3837e-01, 2.4284e-01, 8.5430e-01, 5.3583e-01, - 6.6264e-01, 3.5052e-02, 5.8137e-01, 8.9660e-01, - 9.7719e-01, 5.8996e-01, 8.2914e-01, 2.1977e-01, - 8.9453e-01, 9.1374e-01, 9.3604e-01, 5.8379e-01, - 4.6967e-01, 4.0322e-02, 2.7081e-01, 3.6179e-01, - 8.6877e-01, 2.3540e-01, 9.2958e-01, 8.0094e-01, - 5.1751e-01, 2.4405e-01, 7.8634e-01, 1.0417e-01, - 2.1528e-01, 1.4438e-01, 4.6555e-01, 9.7393e-01, - 9.7512e-01, 7.2696e-01, 4.1536e-01, 8.0871e-01, - 5.9536e-01, 5.9568e-01, 5.0752e-01, 7.5060e-01, - 4.8779e-01, 9.9115e-02, 6.4193e-01, 9.6880e-01, - 9.9615e-01, 9.1596e-01, 7.5914e-01, 5.4084e-02, - 5.5377e-02, 8.6618e-01, 8.1098e-01, 1.5683e-01, - 4.5166e-01, 7.1720e-01, 7.4074e-01, 2.2057e-01, - 7.7304e-01, 3.7224e-01, 5.8425e-01, 4.2210e-01, - 8.8491e-01, 3.7541e-01, 8.1284e-01, 7.6339e-01, - 9.0975e-02, 1.3359e-01, 3.5264e-02, 3.2247e-01, - 3.8566e-01, 3.6076e-01, 7.6483e-03, 3.9528e-01, - 3.2447e-01, 2.0965e-01, 2.2797e-01, 3.4802e-01, - 4.7149e-01, 8.4883e-02, 5.9693e-01, 9.5199e-01, - 8.0506e-01, 5.7482e-01, 8.5519e-01, 3.1827e-01, - 6.4714e-01, 6.1169e-01, 2.0934e-01, 4.1690e-01, - 3.6825e-02, 3.7347e-01, 8.3016e-01, 8.6529e-01, - 2.7912e-01, 1.8435e-01, 8.1002e-01, 7.0997e-01, - 1.9340e-01, 3.6621e-01, 1.5457e-01, 2.3361e-01, - 7.1600e-01, 6.7596e-01, 3.5321e-01, 4.1996e-01, - 2.8372e-01, 7.9354e-01, 5.6871e-01, 4.6209e-02, - 8.5928e-01, 4.4915e-01, 4.1458e-01, 8.0500e-01, - 3.8968e-01, 7.7263e-01, 2.4845e-01, 4.2430e-01, - 8.2861e-01, 3.6574e-01, 2.3914e-01, 8.8428e-01, - 1.0700e-02, 3.9405e-01, 3.1077e-02, 6.1753e-01, - 7.6090e-01, 1.4204e-01, 9.8571e-02, 4.7776e-01, - 7.5835e-01, 7.4891e-01, 4.1702e-01, 7.4036e-01, - 5.1610e-01, 1.5415e-01, 3.2752e-01, 8.2579e-01, - 9.7355e-01, 2.4171e-01, 4.3604e-01, 1.4826e-01, - 7.7730e-01, 3.6806e-01, 6.6672e-01, 4.8489e-01, - 1.4653e-01, 7.8715e-01, 9.0612e-01, 5.3896e-01, - 1.4912e-01, 3.6756e-01, 5.8564e-01, 9.7277e-01, - 9.1323e-02, 2.5754e-01, 8.1067e-01, 8.7861e-01, - 4.2476e-02, 4.0739e-02, 6.4508e-01, 6.3302e-01, - 4.1919e-01, 3.4137e-01, 7.4183e-01, 9.2600e-01, - 7.6754e-01, 6.4006e-01, 6.5888e-01, 8.2627e-01, - 3.6192e-01, 8.3794e-01, 5.9752e-01, 4.6776e-01, - 2.7883e-01, 6.3075e-01, 9.7661e-01, 7.6649e-01, - 5.8900e-01, 6.0578e-01, 7.3006e-01, 3.4738e-01, - 8.9139e-01, 6.6618e-01, 2.3515e-01, 1.8818e-01, - 1.9701e-02, 4.7496e-01, 4.7267e-02, 5.7144e-01, - 2.3268e-01, 5.8053e-01, 5.6927e-01, 1.4274e-01, - 2.1320e-04, 2.2279e-01, 5.0075e-02, 8.0242e-01, - 6.9103e-01, 3.7218e-01, 5.5271e-01, 6.1699e-01, - 8.4592e-01, 3.7016e-01, 2.0105e-01, 4.7011e-01, - 3.7640e-01, 4.1230e-01, 7.0537e-01, 1.2735e-01, - 4.3863e-01, 7.4747e-01, 3.4724e-01, 9.8014e-01, - 2.5834e-01, 3.9635e-01, 4.5503e-01, 3.9827e-01, - 3.0887e-02, 2.2023e-01, 6.9080e-01, 7.6920e-01, - 5.8589e-01, 6.3230e-01, 4.2782e-01, 4.7725e-02, - 7.1669e-01, 6.7426e-01, 2.9678e-01, 7.0318e-01, - 7.7099e-01, 5.6702e-01, 1.8709e-01, 7.4465e-01, - 5.9567e-01, 9.9705e-01, 9.1906e-01, 8.8980e-01, - 9.2834e-01, 4.7402e-02, 8.5015e-01, 8.6636e-01, - 1.7816e-01, 6.0995e-01, 7.7153e-01, 2.7887e-01, - 7.5408e-01, 3.7058e-02, 7.3075e-01, 8.8680e-01, - 4.8675e-01, 8.1393e-01, 1.7963e-01, 6.6882e-02, - 1.0117e-01, 2.9975e-01, 6.4973e-02, 1.2699e-01, - 8.4331e-01, 5.6704e-01, 4.1427e-01, 9.6904e-01, - 8.6781e-01, 1.4923e-01, 4.3881e-01, 9.4444e-03, - 2.7722e-01, 8.0251e-01, 2.7641e-02, 1.7977e-01, - 2.7851e-01, 4.0525e-01, 7.3115e-01, 5.0286e-01, - 1.0425e-01, 6.6263e-01, 1.1008e-01, 2.7001e-01, - 1.4887e-01, 1.1304e-01, 4.9196e-01, 8.2746e-01, - 9.3889e-01, 4.3963e-01, 7.8570e-01, 7.6910e-01, - 5.6108e-01, 5.2922e-02, 5.1490e-01, 4.0716e-01, - 9.5061e-01, 8.3793e-01, 8.8752e-01, 2.8193e-01, - 2.2696e-01, 5.0925e-01, 5.6142e-01, 5.3658e-01, - 6.7385e-01, 9.5147e-01, 1.7414e-01, 3.4050e-01, - 4.2443e-01, 3.4825e-01, 9.2234e-01, 1.4358e-01, - 6.8724e-01, 2.5411e-01, 1.1012e-01, 5.3814e-01, - 8.2895e-01, 8.8965e-01, 3.2104e-01, 4.9204e-01, - 7.0080e-02, 2.0692e-01, 8.5369e-03, 4.1856e-01, - 1.1532e-01, 5.3277e-01, 4.9503e-01, 9.6395e-01, - 2.0702e-01, 9.3586e-01, 1.4136e-01, 1.9169e-01, - 2.3680e-01, 3.8705e-01, 1.1694e-01, 4.6803e-01, - 8.6328e-04, 9.5239e-01, 7.8887e-01, 8.5043e-01, - 1.2684e-01, 8.6429e-01, 9.4349e-01, 3.3252e-01, - 3.1966e-01, 1.5868e-01, 5.3139e-01, 8.4187e-01, - 6.2553e-02, 2.3747e-01, 7.3373e-01, 8.9954e-01, - 2.0646e-01, 5.1753e-01, 2.1982e-01, 8.1967e-01, - 4.4549e-02, 6.7763e-01, 8.7376e-01, 2.3773e-01, - 7.1060e-01, 3.4842e-01, 4.0725e-01, 4.9744e-01, - 1.9944e-01, 4.5739e-01, 6.6146e-02, 3.5634e-01, - 9.9532e-01, 1.9343e-01, 8.2669e-01, 4.6879e-01, - 2.0484e-01, 2.4199e-01, 7.0875e-01, 4.7504e-01, - 5.3929e-01, 7.3912e-01, 8.8178e-01, 3.9528e-01, - 7.5814e-01, 3.1440e-01, 5.5335e-01, 6.8341e-01, - 6.8526e-01, 4.5211e-01, 8.6014e-01, 4.2533e-01, - 7.7686e-01, 9.8740e-01, 6.6787e-01, 3.8617e-01, - 7.1282e-01, 3.4285e-01, 2.4578e-01, 1.6732e-02, - 1.3306e-01, 9.0550e-01, 6.5296e-01, 8.2162e-02, - 8.9173e-01, 4.0778e-01, 5.1374e-02, 5.6948e-01, - 5.9055e-01, 6.4468e-01, 4.9269e-01, 2.8866e-01, - 7.8007e-01, 6.4385e-01, 3.0288e-01, 8.1409e-01, - 2.1734e-01, 3.5483e-01, 4.3751e-01, 3.8616e-01, - 9.5444e-01, 5.5599e-01, 8.6840e-01, 5.9001e-01, - 8.7433e-01, 5.9580e-01, 3.2009e-02, 9.6400e-02, - 8.1664e-01, 6.1383e-01, 1.8644e-01, 4.9923e-01, - 5.6197e-02, 8.0709e-01, 3.5743e-01, 2.8251e-02, - 1.3573e-01, 9.9283e-01, 1.4065e-01, 9.0814e-01, - 4.8452e-02, 8.8843e-01, 1.6080e-01, 4.2915e-01, - 7.8624e-01, 3.4093e-01, 7.2624e-01, 8.5272e-01, - 4.1485e-01, 2.0991e-01, 1.6736e-02, 6.2741e-01, - 6.2117e-01, 7.1089e-02, 3.2588e-01, 1.3331e-01, - 1.6339e-01, 7.4822e-01, 9.0327e-01, 1.8440e-01, - 5.8138e-01, 2.1518e-01, 1.4683e-01, 4.7070e-01, - 3.7685e-01, 6.3560e-01, 8.2158e-01, 1.9214e-01, - 7.1863e-01, 7.7731e-01, 4.1349e-02, 4.1811e-01, - 2.1785e-01, 8.8719e-01, 7.3854e-01, 2.2702e-01, - 1.2780e-01, 1.1562e-01, 2.9558e-01, 7.0063e-01, - 3.0304e-01, 2.2893e-01, 8.6231e-01, 4.1059e-01, - 4.6898e-01, 3.7939e-01, 9.9234e-01, 7.9008e-01, - 1.7271e-02, 7.8224e-02, 3.9120e-01, 7.2943e-01, - 8.9274e-01, 7.2804e-01, 2.9125e-01, 2.9461e-01, - 5.5926e-04, 5.7127e-02, 3.2634e-01, 5.9806e-01, - 6.6821e-01, 9.7015e-01, 4.5580e-01, 5.1612e-01, - 9.8549e-01, 5.1499e-01, 8.7530e-01, 9.7102e-01, - 4.2024e-01, 6.0139e-01, 8.8153e-01, 8.5210e-01, - 3.9449e-01, 9.3593e-01, 4.8778e-02, 5.9719e-01, - 9.6158e-01, 2.5940e-01, 6.1996e-01, 6.8075e-01, - 7.5797e-01, 1.5981e-01, 5.5610e-01, 2.8746e-01, - 8.1320e-02, 2.7118e-01, 5.6972e-02, 6.2419e-01, - 6.7384e-01, 2.7560e-01, 5.8770e-01, 6.8138e-01, - 9.0889e-01, 6.6457e-01, 6.3371e-01, 1.9423e-01, - 3.8710e-01, 2.4285e-01, 7.8363e-01, 4.6299e-01, - 2.0956e-01, 5.7082e-01, 6.5173e-01, 5.7313e-01, - 1.6594e-01, 6.0966e-01, 9.6414e-02, 5.8382e-01, - 8.1047e-01, 8.8127e-01, 9.7669e-01, 1.2448e-01, - 9.8313e-01, 2.3959e-01, 6.9699e-01, 2.2990e-01, - 5.7752e-01, 8.9664e-01, 2.0216e-01, 9.1705e-03, - 9.6705e-02, 3.7369e-02, 7.4901e-02, 4.1642e-01, - 2.8389e-01, 9.0397e-01, 6.5649e-01, 7.7615e-01, - 1.1745e-01, 9.6129e-01, 4.3090e-01, 1.1566e-01, - 1.4618e-01, 6.0785e-01, 7.8769e-01, 9.6897e-01, - 2.7124e-01, 9.2450e-01, 7.8702e-01, 7.1601e-01, - 7.7713e-01, 6.7111e-01, 3.7125e-01, 4.5772e-01, - 6.7504e-02, 1.0098e-01, 5.9474e-01, 6.2316e-01, - 3.3852e-01, 6.0984e-01, 8.2521e-01, 4.4878e-01, - 5.3732e-01, 4.0525e-01, 8.5208e-01, 2.4995e-01, - 9.2809e-01, 4.5984e-01, 5.4973e-01, 1.8787e-01, - 2.4292e-02, 4.9504e-01, 4.0271e-01, 9.8695e-01, - 7.3109e-01, 5.6608e-01, 3.1516e-01, 5.2712e-01, - 7.3263e-01, 9.2175e-02, 4.9851e-01, 6.8842e-01, - 6.5507e-01, 7.9785e-01, 7.9736e-01, 2.0350e-01, - 3.0152e-01, 1.7449e-03, 4.6760e-01, 8.3885e-01, - 7.1918e-01, 7.5664e-01, 2.5782e-01, 9.8605e-01, - 9.3790e-01, 7.4069e-01, 6.5677e-01, 7.9951e-01, - 6.0790e-01, 6.7957e-01, 6.6074e-01, 3.9157e-01, - 9.0328e-01, 6.3894e-01, 4.0108e-01, 3.1656e-01, - 2.9250e-01, 5.6162e-01, 8.1784e-01, 2.2062e-01, - 5.6757e-01, 8.0534e-01, 7.6018e-01, 2.0955e-01, - 4.0000e-01, 7.0074e-01, 7.8129e-01, 6.2359e-01, - 2.8015e-01, 8.1904e-01, 9.5535e-01, 2.1447e-01, - 1.7544e-01, 9.7574e-01, 2.8111e-01, 7.3511e-01, - 5.0578e-03, 2.9594e-01, 9.2274e-01, 8.0725e-01, - 6.3487e-02, 8.6772e-02, 2.7162e-02, 6.4633e-01, - 6.0350e-01, 9.6413e-01, 3.5128e-01, 9.0722e-01, - 7.3824e-01, 9.5999e-02, 1.4347e-01, 3.9736e-01, - 7.8452e-01, 5.0295e-01, 9.3458e-01, 2.8296e-01, - 5.1810e-01, 5.9604e-01, 5.5536e-01, 6.8616e-02, - 4.5378e-01, 5.7180e-01, 4.4828e-01, 3.2565e-02, - 3.8607e-01, 6.8995e-01, 8.2802e-01, 9.2692e-01, - 6.0816e-01, 6.8835e-01, 4.1899e-01, 8.8009e-01, - 8.4180e-01, 7.4527e-01, 4.0284e-01, 5.7607e-01, - 7.4242e-01, 2.7207e-01, 9.0512e-01, 4.0952e-01, - 5.7238e-01, 7.2598e-01, 4.5506e-01, 5.9661e-01, - 8.0981e-01, 6.1561e-01, 9.7702e-01, 5.8719e-01, - 6.8327e-01, 8.4993e-01, 5.4135e-01, 3.8588e-01, - 7.4067e-01, 9.2621e-01, 4.6096e-01, 2.8529e-01, - 1.4482e-01, 2.1615e-01, 7.4707e-01, 3.9938e-01, - 9.2713e-01, 5.2573e-01, 1.1798e-01, 9.5194e-01, - 2.4309e-01, 5.8642e-01, 1.3744e-01, 2.7034e-02, - 1.5566e-01, 3.0149e-01, 6.8730e-01, 3.7439e-01, - 6.8046e-01, 9.6453e-01, 5.6607e-01, 3.8754e-01, - 5.0828e-01, 8.5224e-01, 5.3385e-01, 8.4458e-01, - 3.9624e-01, 7.8082e-01, 6.3012e-01, 8.6090e-01, - 8.2682e-02, 2.8030e-01, 8.3641e-01, 7.3690e-01, - 7.6480e-01, 3.1092e-01, 8.2627e-01, 8.5567e-01, - 3.9378e-01, 8.8259e-01, 2.7266e-01, 9.2776e-01, - 7.7166e-01, 7.9371e-01, 1.8818e-03, 3.4382e-01, - 2.5273e-01, 5.3813e-01, 1.6621e-01, 7.1769e-01, - 2.4843e-01, 2.2461e-01, 8.5656e-02, 8.3542e-01, - 3.1930e-01, 3.0431e-01, 1.8306e-01, 5.6499e-01, - 9.6427e-01, 4.7266e-01, 8.0358e-01, 6.5886e-02, - 1.2313e-01, 3.7418e-01, 6.2444e-01, 4.7433e-01, - 3.4292e-01, 4.6292e-01, 4.9627e-01, 7.1799e-01, - 1.0792e-01, 8.9623e-02, 9.4429e-02, 8.6175e-01, - 4.4076e-01, 9.1106e-02, 2.1722e-01, 5.0437e-01, - 1.1861e-01, 6.2768e-01, 9.5253e-01, 7.8368e-01, - 1.6111e-02, 7.6369e-01, 6.5446e-02, 4.1335e-01, - 4.8179e-01, 1.6964e-01, 8.0002e-01, 9.1953e-01, - 3.6392e-01, 7.5378e-01, 3.1533e-01, 3.7893e-01, - 3.0600e-01, 5.6584e-01, 6.2913e-01, 4.0979e-01, - 1.6906e-01, 6.0526e-01, 4.1224e-02, 7.2197e-01, - 9.9469e-01, 6.4528e-01, 7.4957e-01, 4.6243e-01, - 7.2221e-01, 4.9441e-02, 8.3462e-01, 5.1226e-01, - 1.6898e-01, 2.7865e-01, 8.1535e-01, 5.0811e-01, - 2.5135e-01, 1.1343e-01, 5.0433e-01, 9.0188e-03, - 2.7474e-01, 2.2469e-01, 2.4591e-01, 7.8338e-01, - 5.4588e-01, 2.0641e-01, 4.3812e-01, 7.5445e-01, - 9.3529e-01, 4.2207e-01, 6.5658e-01, 7.7379e-01, - 3.0028e-01, 6.2521e-01, 7.8399e-01, 1.6164e-01, - 8.1784e-01, 3.6623e-02, 3.0384e-01, 5.7160e-01, - 7.9836e-01, 5.5367e-01, 4.0792e-01, 2.8991e-01, - 2.6509e-01, 5.2970e-01, 9.9647e-02, 6.5667e-01, - 6.3266e-01, 4.9762e-01, 4.5680e-01, 9.7947e-01, - 2.9317e-02, 3.0876e-01, 9.0484e-01, 6.2800e-01, - 1.1287e-01, 2.2607e-01, 4.3438e-01, 6.4189e-01, - 4.8834e-01, 2.2177e-01, 2.0731e-02, 5.1039e-01, - 1.4713e-01, 9.9235e-01, 7.4332e-02, 9.9883e-01, - 7.6851e-01, 1.6249e-01, 6.7292e-01, 4.0961e-01, - 1.3180e-01, 9.4992e-01, 6.0024e-01, 1.0024e-01, - 9.2405e-01, 6.0441e-01, 4.3533e-01, 4.1263e-01, - 6.7157e-01, 3.1733e-01, 5.4730e-01, 3.9169e-01, - 3.4628e-01, 4.1199e-01, 2.6291e-01, 1.0795e-01, - 2.2910e-01, 1.7739e-01, 8.4945e-01, 7.3081e-01, - 5.8030e-01, 8.8301e-02, 1.7386e-01, 4.8359e-01, - 6.9634e-01, 9.8297e-01, 6.0213e-01, 5.1143e-01, - 6.3160e-01, 6.2993e-01, 9.2445e-01, 1.7298e-01, - 5.9912e-01, 8.9877e-01, 9.8527e-01, 4.8430e-01]), + col_indices=tensor([2253, 476, 8386, 498, 9957, 4225, 8921, 5276, 6649, + 8361, 9030, 5103, 3236, 7146, 9127, 2162, 9108, 6109, + 7536, 3391, 5945, 596, 2632, 4253, 1582, 1210, 8101, + 3475, 1476, 5207, 5384, 5794, 8608, 7628, 6539, 4656, + 3584, 5833, 2648, 8342, 6408, 8271, 1628, 7349, 575, + 7362, 4397, 3774, 5414, 2631, 5850, 2642, 3145, 3161, + 377, 8231, 2181, 5528, 2062, 2662, 8705, 9554, 9972, + 7839, 4744, 1749, 9566, 8398, 2429, 4619, 8801, 4605, + 923, 3311, 3483, 3043, 7643, 9036, 8304, 1912, 6129, + 5169, 5472, 5945, 2394, 4490, 494, 3501, 5216, 6603, + 665, 7641, 281, 3907, 8487, 5619, 9635, 4755, 2164, + 2784, 5175, 1775, 6954, 9274, 7097, 8360, 5171, 9211, + 7466, 7749, 191, 4501, 4484, 7642, 624, 2893, 5539, + 843, 8041, 8, 7403, 371, 2730, 286, 1593, 634, + 6431, 3385, 7870, 1108, 7997, 9418, 6500, 144, 9317, + 8997, 4648, 3107, 6947, 7390, 1587, 6749, 1802, 1348, + 6123, 1209, 9744, 121, 7082, 1344, 5264, 3961, 337, + 1330, 8627, 8947, 5889, 2601, 7669, 6120, 1019, 9499, + 9644, 7263, 5542, 2071, 8937, 953, 6648, 3926, 8115, + 5125, 7691, 352, 2444, 9825, 446, 2457, 8890, 6114, + 444, 1431, 454, 7900, 764, 5672, 4233, 944, 6601, + 3851, 1429, 6524, 7034, 1142, 5431, 6286, 5413, 2636, + 6329, 8315, 3414, 2949, 3506, 3226, 5971, 847, 3600, + 992, 324, 7224, 5784, 6318, 83, 3862, 582, 144, + 9213, 7884, 7303, 9318, 282, 388, 7306, 5058, 5981, + 4403, 1069, 8491, 3796, 8782, 1118, 9875, 4797, 4628, + 7243, 6257, 3754, 989, 2393, 8967, 8200, 5340, 3683, + 3434, 6938, 77, 3597, 9197, 8552, 9020, 7876, 3038, + 3483, 5985, 8914, 6258, 909, 1009, 5616, 7735, 6306, + 8373, 8632, 9452, 2154, 850, 8434, 629, 2534, 4582, + 4794, 9281, 8393, 6244, 3735, 4916, 6011, 2651, 609, + 7143, 4921, 4268, 6408, 1599, 8894, 2479, 2987, 832, + 3094, 5886, 3124, 7081, 5811, 3779, 5580, 6485, 5673, + 2218, 6770, 8285, 7334, 9715, 9128, 9532, 7461, 5679, + 8558, 8501, 477, 1192, 7608, 1319, 5616, 6695, 471, + 3772, 635, 9729, 2007, 6636, 676, 3085, 7945, 9975, + 3381, 2953, 8697, 7684, 3880, 2799, 6966, 7129, 4996, + 6015, 4533, 1443, 3747, 1336, 2630, 929, 5196, 3570, + 9424, 2273, 2381, 5266, 1572, 6420, 3875, 6027, 7141, + 6677, 6980, 3009, 2751, 11, 9306, 7709, 5830, 3046, + 9401, 5663, 1417, 3815, 8979, 4391, 9801, 7149, 456, + 5864, 1995, 124, 2256, 64, 7564, 4815, 7391, 5180, + 1771, 6764, 7376, 8538, 3458, 9772, 5308, 4242, 3324, + 1587, 8262, 7034, 8132, 3536, 1819, 291, 5025, 8022, + 2479, 2185, 738, 6526, 1167, 198, 6694, 4805, 3411, + 1094, 5203, 6491, 3786, 3097, 14, 5114, 9852, 2892, + 1099, 3181, 7333, 9943, 2852, 3882, 1189, 7485, 801, + 7674, 1836, 2040, 48, 9611, 831, 9698, 3539, 9064, + 2385, 8967, 1047, 3548, 8644, 1830, 2701, 4898, 5632, + 4136, 454, 5669, 6049, 4736, 9501, 135, 4897, 1534, + 9703, 7765, 8595, 8435, 2609, 6015, 2453, 9389, 2270, + 465, 7417, 430, 7171, 8871, 1021, 7527, 7908, 5769, + 8048, 3197, 8416, 7025, 5256, 5349, 524, 2853, 25, + 2139, 4464, 8829, 8865, 1374, 8061, 5300, 5876, 877, + 5876, 5689, 2903, 4858, 8712, 2885, 8268, 2876, 745, + 8136, 3532, 1841, 2997, 5759, 8795, 2224, 2888, 3987, + 7096, 5077, 4678, 314, 6278, 2146, 7763, 289, 9645, + 6451, 5383, 2666, 2568, 8764, 9199, 1976, 9869, 1871, + 2021, 3166, 6570, 1222, 7177, 2488, 2338, 7477, 1189, + 3030, 3721, 1710, 1232, 7961, 4126, 308, 4200, 965, + 9904, 4017, 6562, 6332, 7006, 5924, 2802, 7274, 1169, + 3009, 4422, 6534, 9656, 208, 8761, 850, 9534, 4682, + 2555, 6198, 6368, 7692, 5662, 2129, 3691, 4273, 9508, + 2696, 7307, 3188, 6647, 5727, 8700, 4207, 682, 9106, + 2149, 4986, 5637, 1941, 499, 3102, 1738, 6760, 4204, + 3978, 8966, 6178, 2258, 6397, 4255, 8893, 3201, 8526, + 2337, 7166, 5403, 8700, 6301, 4346, 7735, 2026, 5837, + 6048, 4330, 7066, 9100, 599, 3049, 8143, 7838, 9560, + 19, 1997, 7509, 3059, 631, 1554, 7487, 3158, 9740, + 4447, 7007, 6865, 7805, 8858, 9961, 4470, 1340, 3019, + 3976, 7670, 3406, 7333, 9623, 8635, 8755, 230, 7678, + 300, 7869, 1930, 5050, 6663, 7893, 2260, 9909, 3177, + 7398, 7540, 8321, 1906, 4295, 342, 9074, 2945, 347, + 8837, 4083, 5991, 9167, 6116, 5351, 3445, 3983, 8585, + 2090, 6146, 270, 9204, 4432, 3628, 6533, 7536, 2391, + 1685, 2450, 9637, 3683, 2462, 3807, 1036, 3839, 228, + 3925, 3816, 8324, 6421, 5687, 5110, 7733, 1652, 4160, + 1137, 5865, 3286, 9592, 2840, 3618, 4485, 311, 4669, + 928, 4264, 6305, 9097, 2633, 2665, 4297, 1264, 4645, + 3225, 4230, 5094, 1069, 5003, 6336, 5540, 4439, 9466, + 7232, 5756, 3182, 2102, 2707, 1311, 2637, 7030, 3968, + 6272, 1505, 4585, 7602, 4311, 2082, 9202, 5354, 6351, + 1441, 8362, 3808, 4841, 9724, 9207, 3877, 8352, 7408, + 4030, 2185, 9052, 7226, 3336, 3945, 8666, 7938, 8782, + 4100, 8086, 5984, 3626, 2672, 7372, 3546, 2561, 4074, + 8717, 8180, 9385, 7597, 5929, 7174, 2432, 8780, 9283, + 6149, 5146, 6441, 6822, 8442, 9156, 429, 7461, 4279, + 4570, 2502, 7584, 4060, 1992, 8593, 5213, 1470, 212, + 8012, 2299, 9100, 164, 6950, 318, 3931, 8854, 2544, + 8123, 4669, 3733, 1762, 1623, 624, 8523, 8469, 8314, + 6018, 2782, 641, 6358, 3665, 84, 4407, 4887, 7885, + 6216, 5842, 7476, 6155, 3633, 122, 5167, 1130, 9085, + 2101, 4068, 9401, 1920, 9958, 6623, 2692, 1400, 9977, + 747, 5121, 4933, 3403, 6168, 2497, 4983, 1046, 368, + 4188, 2792, 7678, 3307, 6594, 5130, 3360, 4616, 6665, + 756, 7347, 9242, 1770, 5077, 4147, 7161, 4247, 7564, + 9777, 7509, 1971, 6841, 9178, 3219, 3923, 5557, 1156, + 6659, 5854, 755, 7951, 3645, 5002, 5996, 5298, 2962, + 664, 910, 9706, 559, 9086, 7185, 7068, 8873, 3043, + 6847, 899, 9812, 5204, 6576, 9492, 2795, 1137, 2641, + 3328, 9343, 564, 9305, 2923, 1111, 1779, 3369, 6605, + 4019, 1610, 880, 1451, 9582, 5536, 5942, 8693, 761, + 2047, 8128, 3665, 344, 200, 829, 921, 6070, 5139, + 7786, 1227, 845, 2875, 8428, 3177, 7508, 8640, 3794, + 1021, 1201, 793, 4861, 1815, 805, 934, 278, 9733, + 3485, 6488, 9019, 3305, 9442, 5657, 1096, 9025, 788, + 9251, 8928, 2037, 6123, 8627, 9039, 338, 9114, 5248, + 6939, 4216, 482, 938, 2695, 7299, 281, 7594, 308, + 2340, 3138, 4712, 2333, 1413, 5695, 6460, 8022, 5681, + 9297, 3452, 7181, 2694, 9869, 6366, 7679, 706, 6973, + 7311, 612, 6829, 395, 1800, 3281, 659, 7900, 3453, + 8264]), + values=tensor([0.2353, 0.1782, 0.2258, 0.1855, 0.4423, 0.9398, 0.9665, + 0.8674, 0.3458, 0.4757, 0.8998, 0.6019, 0.2886, 0.4221, + 0.6484, 0.9801, 0.0245, 0.8132, 0.5707, 0.2417, 0.1923, + 0.3961, 0.7153, 0.5454, 0.4598, 0.6610, 0.4260, 0.1997, + 0.0793, 0.4958, 0.2937, 0.5847, 0.0359, 0.4976, 0.2729, + 0.8319, 0.8665, 0.9690, 0.6840, 0.6532, 0.0099, 0.4971, + 0.1625, 0.5070, 0.5926, 0.1981, 0.5308, 0.2462, 0.8898, + 0.8763, 0.9767, 0.5809, 0.2706, 0.9621, 0.7549, 0.3768, + 0.9278, 0.5204, 1.0000, 0.2137, 0.4229, 0.0576, 0.7793, + 0.1382, 0.0969, 0.2743, 0.5872, 0.0691, 0.9723, 0.3887, + 0.0943, 0.1416, 0.4521, 0.8468, 0.5301, 0.2564, 0.1849, + 0.6969, 0.0750, 0.3222, 0.0861, 0.6703, 0.5693, 0.6477, + 0.1966, 0.4584, 0.8691, 0.6805, 0.2765, 0.7859, 0.7443, + 0.9810, 0.3422, 0.7561, 0.4489, 0.0063, 0.4574, 0.9083, + 0.1002, 0.7936, 0.5968, 0.0707, 0.7668, 0.6483, 0.7189, + 0.4981, 0.1607, 0.6888, 0.0829, 0.7417, 0.2211, 0.2819, + 0.9757, 0.3594, 0.7768, 0.9804, 0.6903, 0.7514, 0.7754, + 0.1849, 0.3762, 0.7186, 0.0990, 0.6769, 0.3048, 0.1519, + 0.9897, 0.3153, 0.4324, 0.9344, 0.0248, 0.5611, 0.8766, + 0.7837, 0.4802, 0.9675, 0.5851, 0.5862, 0.7681, 0.0508, + 0.6572, 0.7537, 0.5656, 0.1112, 0.2042, 0.7151, 0.5006, + 0.5707, 0.5865, 0.0130, 0.0664, 0.1659, 0.3827, 0.7062, + 0.3658, 0.7399, 0.8337, 0.0364, 0.6575, 0.4213, 0.2326, + 0.9789, 0.6253, 0.6894, 0.0904, 0.7227, 0.6683, 0.5135, + 0.2681, 0.8516, 0.3120, 0.3633, 0.7677, 0.8894, 0.6031, + 0.0593, 0.5694, 0.3693, 0.1344, 0.6320, 0.4248, 0.2232, + 0.6425, 0.4196, 0.6058, 0.4603, 0.3503, 0.6498, 0.4788, + 0.4362, 0.9373, 0.3056, 0.4705, 0.4236, 0.5704, 0.4843, + 0.9620, 0.1021, 0.7610, 0.0814, 0.6722, 0.4110, 0.7771, + 0.4689, 0.6626, 0.4133, 0.8867, 0.2756, 0.4896, 0.2682, + 0.0517, 0.9650, 0.2920, 0.3181, 0.0581, 0.4257, 0.5482, + 0.1617, 0.1381, 0.1610, 0.5911, 0.2667, 0.9487, 0.9879, + 0.0731, 0.9821, 0.6859, 0.7774, 0.3801, 0.7192, 0.6019, + 0.8431, 0.3503, 0.7171, 0.9187, 0.3757, 0.4129, 0.6595, + 0.8404, 0.7307, 0.0031, 0.3753, 0.7273, 0.5407, 0.3289, + 0.0344, 0.5252, 0.4677, 0.4304, 0.6884, 0.3139, 0.1012, + 0.8221, 0.2497, 0.6689, 0.0923, 0.6619, 0.4437, 0.6033, + 0.6195, 0.2430, 0.8474, 0.1926, 0.1247, 0.2040, 0.4323, + 0.3765, 0.3732, 0.1982, 0.9063, 0.4932, 0.8726, 0.1883, + 0.5386, 0.5904, 0.3114, 0.6236, 0.7317, 0.9186, 0.7537, + 0.3131, 0.6360, 0.0203, 0.1384, 0.9474, 0.6232, 0.4153, + 0.3962, 0.5622, 0.0194, 0.2466, 0.5790, 0.2534, 0.9447, + 0.0912, 0.5169, 0.8722, 0.3340, 0.8679, 0.7267, 0.7460, + 0.9842, 0.9486, 0.7102, 0.2158, 0.1190, 0.4696, 0.4126, + 0.7215, 0.0963, 0.0323, 0.1045, 0.9260, 0.9084, 0.1646, + 0.9534, 0.2639, 0.5164, 0.0732, 0.7655, 0.7814, 0.7669, + 0.7098, 0.0085, 0.3405, 0.5626, 0.5535, 0.8817, 0.1845, + 0.6215, 0.5931, 0.8487, 0.9499, 0.3328, 0.6443, 0.7090, + 0.5190, 0.0648, 0.1536, 0.0726, 0.4312, 0.3042, 0.2809, + 0.5510, 0.4125, 0.2830, 0.2534, 0.7475, 0.2773, 0.9025, + 0.1924, 0.9863, 0.2745, 0.5967, 0.2546, 0.0514, 0.5147, + 0.5375, 0.0157, 0.3671, 0.5558, 0.6812, 0.5989, 0.4999, + 0.3845, 0.5625, 0.8284, 0.1874, 0.7319, 0.2837, 0.3265, + 0.1183, 0.5433, 0.1711, 0.6828, 0.1592, 0.6204, 0.3344, + 0.7467, 0.9883, 0.3898, 0.3093, 0.3595, 0.7632, 0.4240, + 0.4645, 0.2155, 0.9780, 0.3326, 0.3188, 0.3761, 0.9520, + 0.9066, 0.1536, 0.7306, 0.0318, 0.2393, 0.4902, 0.8550, + 0.1201, 0.9456, 0.2522, 0.7416, 0.6499, 0.6393, 0.6763, + 0.4288, 0.7816, 0.5055, 0.1002, 0.8372, 0.4266, 0.9321, + 0.5332, 0.9639, 0.3621, 0.7693, 0.2079, 0.2135, 0.2986, + 0.8318, 0.1809, 0.6887, 0.7968, 0.4485, 0.9968, 0.3844, + 0.9171, 0.8948, 0.0563, 0.2633, 0.4923, 0.2338, 0.8688, + 0.5756, 0.7592, 0.2458, 0.9535, 0.3897, 0.0756, 0.5022, + 0.9240, 0.5638, 0.2016, 0.6776, 0.2604, 0.4394, 0.2034, + 0.8727, 0.0420, 0.7949, 0.8553, 0.2219, 0.2845, 0.7576, + 0.0822, 0.9063, 0.8381, 0.4579, 0.9659, 0.9046, 0.2436, + 0.1292, 0.6101, 0.9870, 0.8638, 0.9652, 0.7550, 0.0480, + 0.6712, 0.2958, 0.3868, 0.8628, 0.0355, 0.3204, 0.2060, + 0.3250, 0.1398, 0.3678, 0.5695, 0.7809, 0.4826, 0.0989, + 0.5059, 0.4212, 0.2406, 0.9781, 0.3638, 0.1720, 0.8480, + 0.4271, 0.8091, 0.9918, 0.3000, 0.6606, 0.8454, 0.5300, + 0.5252, 0.0635, 0.9108, 0.8222, 0.1548, 0.0985, 0.8547, + 0.6072, 0.7014, 0.3452, 0.2884, 0.3585, 0.8758, 0.1001, + 0.4468, 0.6047, 0.1332, 0.1165, 0.1894, 0.8703, 0.9388, + 0.5289, 0.0961, 0.3963, 0.4088, 0.0353, 0.4726, 0.0497, + 0.1657, 0.1332, 0.8424, 0.3770, 0.3753, 0.2990, 0.7492, + 0.3599, 0.1006, 0.2027, 0.2482, 0.2160, 0.7178, 0.4018, + 0.5121, 0.1620, 0.1861, 0.9097, 0.1876, 0.6771, 0.7242, + 0.1984, 0.8642, 0.7265, 0.8026, 0.0894, 0.7046, 0.3341, + 0.9662, 0.8060, 0.2941, 0.3211, 0.8905, 0.6195, 0.5768, + 0.2107, 0.9394, 0.6531, 0.0393, 0.7642, 0.5794, 0.9121, + 0.7424, 0.9481, 0.2357, 0.5093, 0.4284, 0.6020, 0.0803, + 0.7598, 0.8604, 0.7256, 0.6093, 0.5086, 0.3756, 0.6412, + 0.8776, 0.9545, 0.5824, 0.6562, 0.1354, 0.9933, 0.2672, + 0.0285, 0.8526, 0.6755, 0.7049, 0.2854, 0.5057, 0.9990, + 0.9196, 0.6274, 0.2554, 0.2122, 0.3370, 0.3386, 0.3169, + 0.7733, 0.2486, 0.2674, 0.1403, 0.6963, 0.7088, 0.3851, + 0.2191, 0.3001, 0.6826, 0.5536, 0.2177, 0.6052, 0.8636, + 0.9715, 0.2358, 0.4881, 0.0889, 0.8262, 0.1997, 0.8162, + 0.0582, 0.4706, 0.6276, 0.9860, 0.7618, 0.7273, 0.9791, + 0.8954, 0.1469, 0.0188, 0.2048, 0.3781, 0.0089, 0.6219, + 0.3027, 0.8627, 0.8771, 0.8271, 0.9922, 0.6041, 0.4076, + 0.3944, 0.6122, 0.9607, 0.8934, 0.1106, 0.5422, 0.8581, + 0.0265, 0.3612, 0.6596, 0.0347, 0.8474, 0.0167, 0.6080, + 0.1079, 0.0585, 0.8435, 0.3170, 0.7671, 0.7858, 0.4844, + 0.7878, 0.3447, 0.0805, 0.1858, 0.2807, 0.2509, 0.8630, + 0.6871, 0.3052, 0.7960, 0.0089, 0.0988, 0.1041, 0.9455, + 0.5949, 0.2156, 0.7655, 0.1696, 0.7121, 0.9759, 0.7770, + 0.1164, 0.5865, 0.6119, 0.4107, 0.4022, 0.3372, 0.8928, + 0.1138, 0.9831, 0.7790, 0.4196, 0.4976, 0.6814, 0.3419, + 0.7406, 0.3991, 0.0289, 0.5356, 0.4211, 0.2193, 0.0087, + 0.7933, 0.3929, 0.5198, 0.7999, 0.8401, 0.3842, 0.1942, + 0.7014, 0.8914, 0.6002, 0.7161, 0.4335, 0.9511, 0.9698, + 0.0936, 0.2259, 0.4025, 0.3622, 0.8372, 0.2849, 0.3400, + 0.6678, 0.0346, 0.4859, 0.5633, 0.8356, 0.4528, 0.6768, + 0.5972, 0.3821, 0.3636, 0.8229, 0.2322, 0.7735, 0.1857, + 0.5068, 0.3651, 0.7864, 0.7204, 0.2674, 0.3027, 0.3049, + 0.5230, 0.1538, 0.4601, 0.6939, 0.1634, 0.2994, 0.0774, + 0.2376, 0.2841, 0.3050, 0.0617, 0.7215, 0.0305, 0.8728, + 0.2061, 0.2378, 0.0375, 0.7683, 0.9988, 0.6914, 0.1107, + 0.0736, 0.9019, 0.7942, 0.2277, 0.9235, 0.3860, 0.9262, + 0.7040, 0.3201, 0.0612, 0.4204, 0.1116, 0.4348, 0.3602, + 0.5310, 0.7646, 0.4236, 0.0975, 0.2950, 0.0345, 0.6622, + 0.4405, 0.4799, 0.0829, 0.1177, 0.8585, 0.5586, 0.5044, + 0.5414, 0.8945, 0.8002, 0.9645, 0.1886, 0.9038, 0.5484, + 0.0204, 0.8519, 0.5493, 0.4895, 0.7530, 0.9959, 0.2604, + 0.6691, 0.3460, 0.4685, 0.0621, 0.3649, 0.2189, 0.2904, + 0.8808, 0.2042, 0.2147, 0.3779, 0.4607, 0.2132, 0.9129, + 0.9209, 0.1822, 0.2024, 0.8409, 0.7644, 0.5983, 0.7859, + 0.2499, 0.4718, 0.4239, 0.7014, 0.1942, 0.2520, 0.4721, + 0.6679, 0.1025, 0.1320, 0.3436, 0.3961, 0.6327, 0.2792, + 0.5959, 0.5798, 0.2139, 0.0897, 0.6923, 0.6952, 0.9848, + 0.8212, 0.0136, 0.4390, 0.5528, 0.4977, 0.2278, 0.8417, + 0.4814, 0.4675, 0.3121, 0.8191, 0.1734, 0.1167, 0.3309, + 0.1047, 0.9547, 0.3483, 0.7649, 0.9901, 0.8778, 0.3147, + 0.6047, 0.5848, 0.7965, 0.7107, 0.9034, 0.8508, 0.2629, + 0.5257, 0.3152, 0.5617, 0.9414, 0.6428, 0.5980, 0.7990, + 0.4296, 0.1560, 0.5798, 0.0593, 0.1015, 0.7821, 0.4172, + 0.6000, 0.8223, 0.9921, 0.9472, 0.7984, 0.4961, 0.5188, + 0.8866, 0.4976, 0.6972, 0.4976, 0.6772, 0.6002, 0.2993, + 0.7316, 0.1629, 0.5415, 0.8855, 0.6732, 0.5311, 0.0305, + 0.3944, 0.9823, 0.1791, 0.9728, 0.6487, 0.2106, 0.8214, + 0.7792, 0.5466, 0.7555, 0.1071, 0.9621, 0.6549, 0.3949, + 0.9203, 0.0732, 0.7772, 0.5887, 0.2841, 0.0951, 0.4007, + 0.4779, 0.0368, 0.9097, 0.4207, 0.1401, 0.8859, 0.1233, + 0.4669, 0.0737, 0.9870, 0.7542, 0.8903, 0.5909, 0.6027, + 0.6678, 0.8937, 0.5338, 0.9161, 0.2478, 0.3565, 0.6401, + 0.1779, 0.3402, 0.7089, 0.6358, 0.7531, 0.5739, 0.2462, + 0.8489, 0.2223, 0.5307, 0.8218, 0.9545, 0.4531, 0.9697, + 0.8082, 0.7093, 0.6366, 0.5366, 0.4305, 0.2988, 0.2509, + 0.2966, 0.3305, 0.0533, 0.9515, 0.3749, 0.2751, 0.9848, + 0.8736, 0.1427, 0.2371, 0.5146, 0.5425, 0.6733, 0.7992, + 0.8399, 0.9753, 0.8351, 0.3146, 0.2432, 0.5413]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.8709, 0.3477, 0.9071, ..., 0.3290, 0.2447, 0.6100]) +tensor([0.2336, 0.7811, 0.2916, ..., 0.9209, 0.8685, 0.4951]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -375,378 +268,271 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 0.08243966102600098 seconds +Time: 0.0838630199432373 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 127365 -ss 10000 -sd 1e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.197555303573608} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 125204 -ss 10000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.234081745147705} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([8951, 5667, 956, 7950, 5201, 1331, 1765, 3006, 5900, - 2081, 5366, 8255, 7412, 6448, 5104, 6260, 9166, 2113, - 8971, 6174, 6836, 879, 9072, 156, 6527, 5601, 2012, - 6002, 4221, 7765, 3990, 7258, 2865, 1967, 7820, 9862, - 418, 17, 3074, 2165, 8428, 6171, 6497, 2053, 5484, - 4943, 9733, 4335, 9186, 435, 7561, 757, 7593, 4461, - 1964, 3289, 5697, 8181, 6697, 6346, 2540, 5038, 6182, - 7579, 9304, 3023, 5138, 7682, 8029, 1723, 4898, 3727, - 6168, 1394, 4633, 3134, 3220, 8290, 4361, 8659, 8749, - 6471, 4502, 765, 2454, 7851, 4423, 6861, 3263, 4149, - 6309, 6921, 8089, 1483, 3889, 3348, 1563, 5080, 5924, - 9985, 5924, 9061, 9701, 1918, 9154, 1454, 7379, 1012, - 5960, 5244, 7249, 1042, 5782, 1289, 7395, 9762, 5609, - 6097, 7610, 292, 747, 8778, 898, 9783, 3676, 4917, - 7746, 8001, 5522, 3015, 7253, 2567, 9147, 9836, 6451, - 4829, 313, 9348, 5810, 7122, 4162, 1558, 1306, 697, - 5233, 2078, 2835, 6223, 298, 3238, 2600, 2591, 875, - 5647, 2031, 8637, 4497, 3757, 2693, 789, 3782, 2376, - 2989, 2241, 357, 3782, 2633, 7373, 9854, 231, 2988, - 2070, 4669, 5534, 5178, 5587, 9132, 1222, 9184, 1831, - 8653, 4091, 6946, 3489, 4529, 7234, 1431, 1763, 7845, - 3709, 8160, 5434, 5198, 8333, 9047, 8040, 371, 4799, - 8555, 1520, 1237, 3704, 7267, 3499, 6169, 3396, 2756, - 8524, 4433, 7834, 7468, 981, 4682, 7935, 1187, 3506, - 9399, 5392, 3729, 3884, 3038, 1536, 348, 6647, 7856, - 1530, 2535, 468, 9754, 6571, 4248, 1308, 3268, 4373, - 6400, 6830, 8882, 3801, 8028, 4294, 1253, 6984, 8814, - 4362, 5481, 6779, 5370, 9990, 2125, 5427, 2907, 484, - 3375, 1668, 6722, 8648, 1546, 6671, 4934, 7726, 4065, - 7135, 1387, 4399, 4022, 2558, 6330, 2156, 2243, 7118, - 8898, 4356, 8307, 6366, 7530, 377, 8630, 6535, 6322, - 3893, 5173, 8179, 3990, 7010, 5730, 9636, 4081, 7352, - 4638, 803, 2043, 9662, 5196, 9680, 4473, 2982, 9221, - 9863, 438, 4406, 3232, 9102, 6251, 5767, 2833, 2230, - 9893, 1670, 1462, 3102, 2383, 9343, 5557, 5254, 6204, - 3268, 1452, 3530, 3372, 4859, 7526, 3677, 9290, 6998, - 7256, 6351, 6099, 752, 2711, 5409, 1344, 6438, 5218, - 1309, 2561, 6301, 4999, 182, 6656, 4193, 2756, 950, - 9918, 8768, 9417, 3732, 6098, 2529, 4022, 7160, 5676, - 6516, 8008, 3634, 6607, 1251, 9, 3992, 7758, 1914, - 8408, 8428, 8579, 9714, 8052, 8671, 2909, 7465, 6553, - 6602, 1740, 4016, 8658, 1603, 1303, 402, 126, 4020, - 8894, 8464, 7028, 7516, 4733, 334, 7983, 5384, 7380, - 3065, 5355, 3755, 580, 2843, 1681, 4450, 3415, 5685, - 2824, 8294, 3911, 1122, 966, 9515, 8314, 3754, 7409, - 686, 6128, 1982, 1386, 3227, 451, 7972, 6370, 4333, - 774, 9669, 6060, 8811, 1434, 2331, 2818, 7564, 8908, - 1106, 4259, 7245, 681, 4175, 7703, 4218, 9815, 4060, - 5703, 2543, 6382, 2254, 4497, 8420, 3474, 959, 2786, - 9112, 2275, 8989, 9316, 5187, 277, 5809, 4960, 558, - 5959, 2140, 3869, 9548, 4992, 6509, 7212, 1384, 1766, - 7111, 4082, 5753, 8261, 8396, 4065, 452, 479, 7733, - 9669, 6830, 8392, 1995, 6861, 5319, 6116, 8073, 8728, - 7161, 8145, 3184, 5559, 412, 783, 1547, 8107, 9519, - 3211, 6058, 1202, 440, 174, 8101, 1808, 9805, 6280, - 9452, 3118, 8983, 4424, 7637, 5101, 5179, 5972, 8442, - 7018, 9897, 4824, 1871, 1892, 6705, 4898, 98, 5681, - 7765, 8554, 8102, 7121, 7380, 5392, 7554, 8933, 4525, - 7503, 7915, 2395, 4810, 9815, 2656, 1084, 467, 3122, - 9566, 897, 837, 4757, 8988, 5537, 436, 461, 6366, - 5642, 8874, 2020, 2283, 4825, 2436, 541, 4641, 47, - 4722, 8068, 4262, 880, 9007, 8642, 1636, 9627, 4235, - 7239, 3306, 7990, 8591, 3903, 3044, 9230, 197, 5875, - 9842, 1038, 3862, 6186, 457, 8430, 9394, 3707, 8130, - 4559, 2459, 3372, 3218, 4638, 9717, 109, 5158, 9842, - 3750, 8383, 486, 8700, 7403, 8251, 79, 4842, 5372, - 1410, 4433, 2937, 1648, 8294, 7535, 6494, 714, 1739, - 8923, 5987, 5476, 9289, 9131, 5687, 3326, 8856, 9565, - 3869, 9430, 8143, 2114, 6718, 4856, 3476, 7051, 6247, - 1826, 9265, 2119, 4975, 5340, 3157, 9047, 6166, 5350, - 3690, 6834, 7430, 1983, 371, 1578, 4503, 9170, 9900, - 3527, 3630, 749, 8514, 9935, 5944, 8298, 4776, 9929, - 8330, 653, 1643, 1764, 9586, 513, 2172, 6698, 215, - 6477, 7577, 4515, 7996, 4713, 2355, 1611, 6833, 2323, - 3740, 4310, 504, 1853, 8109, 6314, 907, 3924, 9903, - 7526, 1854, 8927, 1178, 8385, 867, 1023, 3773, 9962, - 1456, 8079, 5190, 6786, 7737, 4957, 6851, 7055, 6487, - 45, 2983, 2328, 1861, 4531, 5912, 5277, 9667, 4187, - 5442, 6221, 132, 961, 5314, 70, 4492, 9580, 6490, - 6281, 4592, 1715, 9523, 3959, 5774, 6597, 6351, 968, - 7132, 3012, 9691, 7995, 9369, 6693, 8391, 919, 6276, - 1383, 3472, 2727, 1851, 9641, 5547, 9814, 1386, 3806, - 5800, 7824, 6994, 8102, 3059, 630, 5299, 7082, 408, - 3966, 5459, 2268, 513, 1111, 4036, 1216, 647, 9051, - 6557, 1868, 2555, 7314, 5402, 8856, 8036, 9242, 8770, - 6588, 8568, 939, 2372, 9021, 5117, 4395, 5898, 314, - 3612, 5330, 8768, 4296, 2234, 3310, 5238, 1193, 7901, - 2450, 6235, 8276, 4737, 7496, 9519, 7422, 7651, 1236, - 5689, 5301, 1581, 1937, 9339, 1566, 8562, 8354, 4056, - 2837, 1798, 1628, 8145, 5673, 4146, 8286, 7315, 8841, - 7971, 7223, 7365, 6819, 6450, 375, 2406, 5836, 2699, - 4122, 9510, 7268, 9830, 5194, 3857, 9164, 5493, 1059, - 4327, 8919, 2908, 7586, 1083, 979, 9176, 9825, 1123, - 1636, 4668, 6489, 2442, 812, 6814, 5788, 9485, 9064, - 8453, 2804, 1948, 7534, 4563, 7316, 3436, 2415, 4301, - 1444, 5066, 9181, 5458, 8732, 4105, 7725, 1653, 9297, - 2700, 4319, 979, 7420, 6663, 3774, 892, 451, 3618, - 7453, 4542, 9281, 8092, 9442, 2319, 2653, 6580, 466, - 1036, 8495, 3184, 2138, 1095, 2970, 7474, 4438, 3204, - 7315, 3790, 263, 3921, 2558, 4458, 7556, 6273, 8378, - 6737, 7044, 4215, 6933, 5375, 1689, 1974, 8500, 870, - 9010, 4, 6462, 9141, 9834, 75, 1864, 3979, 8119, - 7525, 4421, 110, 1620, 2729, 7787, 7365, 7318, 1659, - 3132, 1011, 9075, 6234, 8992, 3356, 5492, 7063, 7291, - 2484, 3415, 4663, 7485, 747, 4057, 4634, 7612, 611, - 8159, 4813, 2990, 3047, 2956, 4987, 1433, 970, 4324, - 3741, 5005, 8457, 8017, 2487, 8029, 3981, 9755, 4901, - 6929, 6800, 2549, 9812, 4597, 5997, 1351, 469, 8967, - 195, 9043, 4129, 8942, 5512, 1918, 4398, 1374, 2066, - 1893, 1888, 789, 5725, 2471, 1750, 2390, 171, 921, - 2279, 7110, 5273, 3762, 600, 1387, 8780, 2817, 9487, - 8296]), - values=tensor([1.5216e-01, 1.7019e-01, 6.3585e-01, 6.6138e-01, - 9.0749e-01, 1.1783e-01, 9.5883e-01, 3.9604e-01, - 7.1962e-03, 8.1892e-01, 6.8589e-01, 1.5211e-01, - 4.1271e-01, 7.8112e-01, 8.4618e-04, 1.8663e-01, - 8.8874e-01, 6.8031e-01, 4.9372e-01, 7.7045e-01, - 4.4235e-01, 9.6290e-01, 7.6512e-01, 9.6549e-01, - 8.1447e-01, 9.5403e-01, 8.6721e-02, 8.8449e-01, - 3.0131e-01, 6.1337e-01, 2.0715e-01, 6.5670e-01, - 6.6356e-01, 5.0388e-01, 5.3994e-02, 7.6746e-01, - 3.1174e-01, 5.2533e-01, 8.3430e-01, 4.7419e-01, - 8.4903e-01, 5.1841e-02, 8.7369e-01, 1.6657e-01, - 2.6480e-01, 5.0181e-01, 9.1884e-01, 6.5373e-01, - 8.3049e-01, 1.0155e-01, 9.5966e-01, 7.4983e-01, - 9.2494e-01, 6.2545e-01, 3.8009e-01, 4.3846e-01, - 5.3243e-01, 5.6466e-01, 4.4788e-01, 3.2358e-01, - 3.7891e-01, 6.3413e-01, 9.4905e-01, 1.5019e-01, - 3.4665e-01, 6.1715e-01, 9.7884e-01, 4.8076e-01, - 6.8409e-01, 5.6276e-01, 2.5776e-01, 5.9295e-01, - 5.6309e-01, 8.8570e-01, 5.4832e-01, 2.3113e-01, - 5.8061e-02, 5.2045e-01, 2.8550e-02, 5.8372e-01, - 6.0029e-01, 3.4628e-01, 2.6036e-01, 5.4241e-01, - 8.6231e-01, 1.9735e-01, 1.0981e-02, 5.1942e-01, - 1.9123e-01, 6.8642e-01, 7.9201e-01, 8.5589e-01, - 5.6638e-01, 9.2391e-02, 4.4085e-01, 9.6051e-01, - 8.9348e-01, 8.3127e-01, 3.9537e-01, 8.3521e-01, - 9.0785e-01, 3.2641e-01, 5.5074e-01, 9.4401e-01, - 4.2242e-01, 3.3514e-01, 3.9064e-01, 5.4355e-01, - 8.1424e-01, 8.4824e-01, 5.7587e-02, 3.6997e-01, - 5.9435e-01, 1.9519e-01, 7.1499e-02, 2.4317e-01, - 3.8685e-01, 4.3594e-01, 5.8046e-01, 5.2862e-01, - 8.1213e-01, 4.8371e-01, 9.6173e-02, 3.2546e-01, - 4.0478e-01, 1.1293e-01, 2.8434e-01, 8.2273e-01, - 2.2273e-01, 7.3877e-01, 9.0112e-01, 9.3139e-01, - 8.9611e-01, 3.0616e-01, 8.5263e-01, 2.4969e-01, - 5.7411e-01, 2.4115e-01, 4.8960e-01, 1.1929e-01, - 7.4116e-01, 4.1231e-01, 1.1123e-01, 4.1891e-01, - 7.8463e-01, 3.8428e-01, 1.1161e-01, 8.8894e-01, - 4.6402e-01, 8.8871e-01, 9.3241e-01, 1.0341e-01, - 6.8167e-01, 1.4098e-01, 6.3176e-01, 5.9565e-01, - 6.4149e-01, 1.3563e-01, 7.0398e-01, 9.4793e-02, - 9.0778e-01, 6.1760e-01, 2.8402e-01, 6.9130e-01, - 7.1512e-01, 8.6276e-02, 4.7148e-01, 1.5984e-01, - 3.2708e-01, 1.5988e-01, 7.0532e-01, 2.7324e-01, - 4.5553e-01, 6.9693e-01, 7.2516e-01, 5.9042e-01, - 6.6409e-01, 5.2520e-01, 5.9028e-01, 5.2594e-03, - 1.7020e-01, 9.5731e-01, 7.3976e-01, 6.5886e-01, - 8.1167e-01, 6.5585e-01, 5.8074e-01, 9.8075e-01, - 2.7620e-01, 1.7420e-01, 1.7389e-01, 2.8958e-01, - 8.2996e-01, 4.9363e-01, 9.8162e-01, 2.1271e-01, - 1.8857e-01, 3.4194e-01, 9.3022e-01, 4.0994e-01, - 1.0186e-01, 6.2889e-01, 8.4027e-01, 6.9745e-01, - 6.6164e-01, 7.1285e-01, 6.5873e-01, 4.5775e-01, - 6.3704e-01, 3.1214e-01, 7.9821e-01, 1.6445e-01, - 3.4925e-01, 3.5537e-01, 5.7713e-01, 8.9407e-01, - 6.1655e-02, 4.8189e-01, 7.3009e-02, 2.0535e-02, - 2.2246e-01, 9.6049e-01, 8.5961e-01, 9.6152e-01, - 3.4685e-01, 4.7829e-01, 2.9967e-01, 6.2980e-01, - 9.8429e-01, 5.5508e-01, 3.6981e-01, 1.2625e-01, - 1.8588e-01, 5.7301e-01, 1.7900e-01, 6.8391e-01, - 7.2136e-02, 8.1464e-01, 4.0282e-01, 6.2285e-01, - 8.6256e-01, 6.4473e-01, 1.7643e-01, 8.0509e-02, - 1.6351e-01, 6.5290e-01, 6.7791e-01, 9.4538e-01, - 6.0149e-02, 4.8255e-01, 9.9090e-01, 3.5000e-01, - 9.1364e-01, 9.3901e-01, 4.3818e-01, 3.8250e-01, - 4.3645e-01, 6.5990e-01, 3.0384e-01, 5.9985e-01, - 7.1319e-02, 7.1882e-01, 6.0076e-01, 4.7750e-01, - 8.9314e-01, 3.9320e-02, 1.5893e-01, 2.1442e-01, - 9.1463e-01, 7.7751e-01, 2.8961e-01, 8.7025e-01, - 1.4621e-01, 8.3547e-01, 2.5098e-03, 6.6570e-01, - 8.1723e-01, 3.2587e-01, 9.7151e-01, 9.1676e-01, - 2.9237e-01, 1.0121e-01, 8.9548e-01, 1.0875e-01, - 7.6296e-01, 2.3321e-01, 2.2049e-01, 6.7288e-01, - 7.6597e-01, 5.7920e-01, 8.9616e-01, 8.4265e-01, - 1.8498e-01, 7.0272e-01, 7.1095e-01, 2.6754e-01, - 8.5795e-01, 7.3704e-01, 9.5476e-01, 7.8569e-01, - 1.8101e-01, 1.4706e-01, 3.0283e-01, 5.4697e-02, - 5.5065e-01, 2.6568e-01, 7.0411e-01, 7.7720e-01, - 4.9450e-02, 3.2306e-01, 9.8554e-01, 4.8869e-01, - 1.9444e-02, 5.1812e-01, 7.6981e-01, 4.1618e-01, - 1.8798e-01, 5.6832e-01, 4.0810e-01, 3.6012e-02, - 8.8896e-01, 4.3334e-01, 2.0118e-01, 7.7859e-01, - 8.1567e-01, 4.6627e-01, 4.8676e-02, 8.5248e-01, - 1.0666e-01, 6.2271e-01, 3.4460e-01, 9.5338e-02, - 1.5574e-01, 1.0619e-01, 6.0324e-01, 4.5748e-01, - 2.0038e-01, 8.9476e-01, 2.2804e-02, 5.0267e-01, - 8.3807e-01, 4.9035e-01, 1.0350e-01, 3.0884e-01, - 7.0873e-01, 4.9276e-02, 6.9721e-01, 3.2552e-02, - 9.2197e-01, 9.0321e-01, 1.2656e-01, 3.5564e-01, - 6.0609e-01, 9.0070e-01, 7.2233e-01, 9.0867e-01, - 9.8007e-01, 8.7702e-02, 3.6717e-01, 2.2574e-01, - 4.3013e-01, 8.8646e-01, 4.5950e-01, 6.1044e-01, - 1.5369e-01, 8.5311e-01, 5.5853e-01, 8.4199e-01, - 7.7230e-01, 8.8165e-01, 1.9953e-01, 9.3348e-01, - 7.6262e-01, 1.9492e-02, 1.1943e-01, 6.1210e-01, - 9.7282e-01, 5.5187e-01, 3.9215e-01, 9.9177e-01, - 3.6067e-01, 8.2342e-01, 4.5631e-01, 3.2249e-01, - 8.4451e-01, 3.8025e-01, 2.1633e-01, 5.3458e-01, - 8.4114e-01, 1.5167e-01, 7.5240e-01, 8.9791e-01, - 1.8719e-01, 3.8953e-01, 7.3237e-01, 8.8189e-01, - 2.9193e-01, 7.0931e-01, 1.6238e-01, 1.5133e-02, - 4.8376e-01, 6.8459e-01, 4.3864e-01, 2.4581e-01, - 5.3727e-01, 6.5572e-01, 3.5869e-01, 9.6524e-01, - 2.5379e-01, 2.5522e-01, 2.2015e-01, 3.7162e-01, - 2.5045e-01, 7.1346e-01, 4.9608e-01, 1.3997e-01, - 4.6764e-01, 9.9921e-01, 3.1977e-02, 3.0630e-01, - 4.7370e-01, 4.0248e-01, 9.4744e-01, 5.5876e-02, - 3.9404e-01, 7.5583e-01, 9.3499e-02, 3.7133e-01, - 6.0169e-01, 7.8815e-02, 7.0424e-01, 3.3512e-01, - 7.2778e-01, 9.4109e-01, 5.8460e-01, 7.6482e-01, - 1.2389e-01, 7.4178e-01, 4.9334e-01, 3.1208e-01, - 5.8337e-01, 7.3349e-01, 1.4592e-01, 2.3507e-02, - 6.8025e-01, 5.0354e-01, 8.8652e-01, 5.5730e-01, - 6.8635e-01, 5.7054e-01, 8.8729e-01, 6.5747e-01, - 9.1643e-01, 4.4757e-01, 3.8172e-01, 8.4742e-01, - 3.3806e-01, 3.2990e-01, 7.2082e-01, 6.5516e-01, - 3.9343e-01, 5.2851e-01, 3.5215e-01, 8.9943e-01, - 8.5310e-02, 4.6109e-01, 2.7228e-01, 2.5087e-01, - 7.3184e-01, 3.3624e-01, 2.9789e-02, 6.3599e-01, - 7.3976e-01, 9.5517e-01, 9.8230e-02, 9.5790e-01, - 8.6575e-01, 4.6194e-01, 4.2709e-01, 1.7618e-01, - 9.1857e-01, 4.6170e-01, 6.5113e-01, 2.1729e-01, - 6.2435e-01, 8.0807e-01, 1.1998e-01, 1.5588e-01, - 7.6909e-01, 8.6399e-01, 6.1219e-01, 1.7921e-01, - 3.2192e-01, 6.3371e-02, 9.1241e-01, 4.3118e-01, - 8.2668e-01, 1.1257e-02, 9.4272e-01, 2.6933e-01, - 4.5500e-01, 3.3222e-02, 9.1619e-01, 6.6223e-01, - 3.6654e-01, 4.8614e-01, 4.2769e-01, 5.0930e-01, - 4.9510e-01, 3.2870e-01, 2.1569e-01, 4.6276e-01, - 7.3097e-01, 4.1930e-01, 8.6018e-01, 1.1403e-01, - 2.5612e-01, 7.7389e-01, 6.3098e-01, 1.7552e-02, - 2.3396e-01, 1.6546e-01, 6.8993e-02, 4.9563e-01, - 3.1002e-01, 6.0278e-01, 2.8753e-01, 6.4696e-01, - 2.6087e-01, 9.7781e-01, 3.2437e-01, 4.7091e-01, - 8.6564e-01, 6.1093e-01, 9.3888e-01, 2.3736e-01, - 3.0046e-01, 4.7032e-01, 9.3908e-01, 2.3036e-01, - 4.7200e-01, 4.0949e-02, 3.4834e-01, 1.8942e-01, - 8.0161e-01, 3.0313e-01, 7.5308e-01, 9.7943e-01, - 7.7952e-02, 1.3705e-01, 4.3503e-01, 5.8610e-01, - 3.2749e-01, 3.2801e-01, 2.5919e-01, 5.1009e-02, - 6.7773e-01, 5.0372e-01, 7.3673e-01, 8.2916e-01, - 9.1008e-01, 7.8988e-01, 2.6091e-01, 5.1048e-01, - 8.6688e-02, 3.1160e-01, 8.3740e-01, 5.6689e-01, - 6.0570e-01, 1.4997e-01, 3.5762e-01, 5.2280e-01, - 4.4260e-01, 6.6609e-02, 4.6975e-02, 4.5667e-01, - 4.2481e-01, 6.2475e-01, 7.5520e-01, 6.9917e-01, - 7.8946e-01, 5.5935e-01, 9.4564e-01, 7.8184e-01, - 9.8520e-01, 4.6026e-01, 4.1162e-01, 1.4734e-01, - 3.2346e-01, 4.1372e-01, 3.6035e-01, 1.5890e-01, - 1.2942e-02, 8.4469e-01, 2.3086e-01, 1.0481e-01, - 2.8996e-02, 7.1843e-01, 3.8455e-01, 4.7881e-02, - 8.5951e-01, 1.3895e-02, 3.6721e-01, 4.7506e-01, - 9.9857e-01, 6.8451e-01, 8.9693e-01, 5.4998e-01, - 9.1203e-01, 3.2063e-01, 8.8551e-01, 5.9994e-01, - 3.4701e-01, 1.1208e-01, 4.2449e-01, 6.7214e-01, - 6.4926e-01, 8.2128e-01, 9.0654e-01, 5.1682e-01, - 1.6141e-01, 4.6470e-01, 4.9683e-01, 3.4524e-01, - 8.3144e-01, 8.4160e-02, 5.3669e-01, 6.5843e-01, - 3.3396e-01, 4.4003e-01, 1.6841e-01, 1.2564e-01, - 9.9780e-01, 6.5761e-01, 9.2580e-01, 1.4045e-01, - 6.7747e-01, 5.3899e-02, 1.5371e-01, 7.4862e-01, - 7.2244e-01, 7.3240e-01, 4.1674e-01, 2.9084e-01, - 3.8422e-01, 3.2193e-01, 9.7078e-02, 6.6289e-01, - 3.8451e-01, 4.7906e-01, 2.3582e-01, 2.4804e-01, - 1.0163e-02, 7.8941e-01, 3.4831e-01, 5.1381e-01, - 5.0897e-01, 4.2292e-01, 5.3914e-01, 1.5560e-01, - 2.4160e-01, 7.1337e-01, 7.4806e-01, 5.3731e-01, - 1.4673e-01, 1.3904e-01, 5.3549e-01, 1.0994e-02, - 2.3778e-02, 1.8931e-01, 9.1482e-01, 1.0697e-01, - 1.8807e-01, 2.0276e-01, 1.2276e-01, 8.9636e-01, - 7.3523e-01, 5.7822e-01, 8.2860e-02, 7.1754e-01, - 6.6528e-01, 4.1632e-01, 2.3182e-01, 1.5358e-01, - 5.5655e-01, 9.5710e-01, 3.7351e-01, 4.8842e-01, - 7.7247e-01, 7.5011e-01, 2.0041e-01, 6.0151e-01, - 9.7363e-02, 8.2944e-01, 4.1461e-01, 5.5743e-01, - 7.7117e-01, 3.7433e-01, 4.4980e-01, 9.0170e-01, - 1.3620e-01, 8.1261e-02, 8.5240e-01, 2.6109e-01, - 3.0587e-01, 5.8262e-01, 6.5324e-01, 6.9486e-01, - 2.5380e-01, 1.6738e-01, 1.2447e-01, 4.0116e-03, - 9.1541e-01, 9.1885e-01, 9.3721e-01, 7.2081e-01, - 7.1873e-01, 5.1154e-02, 3.4872e-01, 2.4575e-01, - 2.4659e-01, 7.1231e-01, 3.4407e-01, 4.4845e-01, - 9.9038e-01, 4.6909e-01, 5.8454e-01, 5.1459e-01, - 5.0915e-01, 4.8420e-02, 5.2465e-01, 3.2527e-01, - 3.8356e-01, 8.2838e-01, 8.3750e-01, 3.4062e-01, - 3.4952e-01, 6.0127e-01, 5.6000e-01, 4.2562e-02, - 1.3090e-01, 7.0560e-01, 2.4448e-01, 1.3227e-01, - 2.8028e-01, 7.4637e-01, 1.8169e-01, 9.1756e-01, - 3.1327e-01, 7.4767e-01, 8.9569e-01, 9.2290e-01, - 5.2879e-02, 2.2450e-01, 1.8187e-01, 4.2333e-01, - 1.5213e-01, 1.9392e-01, 8.2459e-01, 7.1279e-01, - 4.5811e-01, 9.7620e-01, 2.4922e-01, 6.5392e-01, - 1.9646e-01, 6.3548e-01, 5.1762e-01, 1.5281e-02, - 2.7032e-01, 3.0031e-01, 4.4666e-01, 9.7583e-01, - 3.4809e-01, 6.3541e-01, 2.7362e-01, 3.3316e-01, - 3.1575e-01, 3.0889e-01, 9.8474e-01, 7.4065e-02, - 7.1531e-01, 5.7194e-01, 6.7460e-01, 6.8581e-01, - 8.7334e-01, 8.9807e-01, 9.9213e-01, 3.0764e-01, - 3.3737e-01, 3.1126e-01, 9.4054e-01, 2.4441e-01, - 5.1426e-01, 2.9211e-01, 2.0716e-01, 1.7699e-01, - 2.5259e-01, 9.3716e-01, 8.6693e-01, 1.8135e-01, - 5.2341e-01, 1.3576e-01, 7.1039e-01, 7.4562e-01, - 4.0702e-01, 6.4884e-01, 9.0496e-01, 3.7562e-01, - 6.3827e-01, 9.1218e-01, 1.1600e-01, 2.7970e-01, - 1.4970e-01, 6.4340e-01, 6.5217e-01, 1.4505e-01, - 3.0174e-01, 4.2314e-03, 8.4965e-01, 9.6558e-01, - 2.4175e-01, 8.6591e-01, 4.1091e-01, 5.9543e-01, - 2.4393e-02, 3.8384e-01, 1.4653e-01, 1.4427e-01, - 6.3697e-01, 8.1833e-01, 7.4199e-01, 8.9179e-01, - 3.3436e-01, 4.2849e-01, 7.1702e-01, 6.6497e-01, - 9.7510e-01, 8.7206e-01, 2.0471e-01, 8.0458e-01, - 3.4321e-01, 4.7425e-01, 2.7165e-01, 6.9890e-01, - 1.0161e-01, 7.5881e-01, 7.5748e-01, 8.7062e-01, - 1.6763e-01, 9.4785e-01, 9.0252e-01, 1.4004e-01, - 5.2871e-02, 5.2068e-01, 7.9779e-01, 3.1595e-01, - 6.8799e-01, 6.9740e-01, 4.1601e-01, 2.3491e-01, - 9.7922e-01, 7.9598e-01, 8.2769e-01, 4.4848e-01, - 2.6935e-01, 4.4881e-02, 7.1115e-01, 1.5257e-01, - 8.7181e-01, 7.7291e-02, 8.7127e-01, 3.6259e-01, - 9.6231e-01, 1.3931e-01, 3.7950e-02, 3.8402e-01, - 4.1550e-01, 6.3353e-01, 8.4500e-01, 6.1639e-01, - 1.7991e-01, 2.7893e-01, 3.4979e-01, 3.5307e-01, - 6.4305e-01, 6.2841e-01, 9.0120e-01, 3.5146e-03, - 1.4264e-02, 9.9702e-01, 9.5609e-02, 3.5812e-01, - 7.0151e-01, 5.0813e-01, 2.9331e-02, 6.1800e-01, - 7.6002e-01, 2.0376e-01, 9.0981e-01, 3.2033e-02, - 4.4308e-01, 6.2436e-01, 2.0355e-01, 5.5886e-01, - 9.0826e-01, 2.4707e-01, 6.1248e-01, 3.1151e-01, - 9.7040e-02, 9.7841e-01, 7.1300e-01, 4.5479e-01, - 9.1231e-01, 5.8596e-01, 1.1626e-01, 9.6037e-01, - 8.0614e-01, 6.3959e-01, 8.0099e-01, 1.7915e-01, - 7.0708e-01, 7.0591e-01, 8.3896e-01, 4.7115e-01, - 3.0242e-01, 7.3102e-01, 5.3305e-01, 4.7165e-01, - 7.2291e-01, 7.3873e-02, 8.8843e-01, 1.0409e-01, - 9.1128e-01, 4.0722e-01, 6.6838e-01, 9.4428e-01, - 5.3158e-01, 9.1080e-01, 5.4546e-01, 3.9265e-02, - 5.2569e-01, 7.3028e-01, 1.4321e-01, 2.3526e-01, - 5.4820e-01, 6.5518e-01, 7.8832e-01, 1.0003e-01, - 2.0256e-01, 3.7168e-01, 7.7038e-01, 8.8011e-01, - 6.2133e-01, 2.5102e-01, 8.1558e-01, 2.7129e-01, - 3.8339e-01, 3.5884e-01, 2.8205e-01, 7.9701e-01, - 6.1148e-01, 1.4972e-01, 4.6659e-01, 8.2519e-01, - 9.3842e-01, 5.2980e-01, 6.2804e-01, 8.6728e-02, - 3.4629e-01, 9.7018e-01, 5.1173e-01, 7.9800e-02, - 2.4861e-01, 3.0615e-01, 3.3816e-01, 7.2494e-01, - 8.0962e-01, 4.2877e-01, 2.9116e-02, 3.4328e-01, - 3.8819e-01, 3.6091e-01, 1.4916e-01, 1.0834e-01, - 1.1699e-01, 4.7431e-01, 2.8661e-01, 2.8768e-01, - 3.2315e-01, 9.2466e-01, 2.7461e-01, 7.8467e-01, - 2.8489e-01, 4.7278e-01, 8.3981e-01, 7.6965e-01, - 8.3562e-01, 2.2207e-01, 3.7585e-02, 7.4761e-02]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 999, 999, 1000]), + col_indices=tensor([8690, 1861, 4903, 1985, 5995, 5133, 3649, 6848, 9888, + 5313, 1367, 6414, 5407, 8918, 331, 630, 7545, 9107, + 3109, 3571, 7241, 7083, 7466, 8084, 2727, 3640, 4567, + 9919, 948, 9219, 4437, 725, 7475, 1603, 9410, 5378, + 1267, 1566, 2735, 5978, 7044, 9006, 2830, 2291, 2928, + 246, 5452, 5303, 9481, 9784, 3316, 511, 9042, 689, + 1633, 1432, 7308, 4565, 9940, 5588, 4670, 74, 3920, + 5855, 8957, 3500, 7187, 8512, 3908, 9837, 8166, 2653, + 7148, 8369, 6481, 6454, 7191, 3138, 1912, 8141, 7068, + 663, 2545, 9875, 1574, 4296, 1631, 8653, 1587, 4471, + 8482, 7123, 2944, 4403, 3050, 8451, 4956, 9785, 7618, + 6529, 9271, 9559, 9158, 5049, 3870, 9460, 6015, 9128, + 9779, 3634, 7478, 2718, 4829, 5557, 2520, 8161, 1201, + 9733, 9896, 3536, 2437, 9760, 1331, 2860, 210, 958, + 4424, 8707, 1531, 2806, 9137, 5267, 8292, 4132, 7990, + 2715, 8713, 3219, 7635, 8360, 5058, 4018, 1011, 3165, + 7271, 4671, 490, 6635, 7416, 4229, 2810, 1235, 9589, + 4173, 7150, 5653, 5283, 4143, 2170, 7879, 1964, 5898, + 1301, 7280, 6637, 7038, 4540, 7863, 3916, 7240, 4473, + 2810, 4563, 211, 2724, 5297, 2314, 7383, 7549, 7593, + 7902, 5189, 2848, 4020, 504, 929, 1871, 6291, 4019, + 6918, 1232, 1712, 2725, 3019, 9775, 6611, 3101, 4550, + 5864, 8103, 9150, 1578, 2256, 8242, 9493, 4873, 8528, + 7748, 4250, 9306, 8224, 2493, 9732, 8604, 5119, 3521, + 9573, 9524, 1509, 8848, 4100, 83, 9396, 99, 4460, + 4971, 6971, 8025, 37, 5487, 5102, 9467, 3925, 1812, + 6496, 2474, 3416, 770, 3079, 3632, 3928, 1261, 9627, + 6023, 7720, 2373, 7178, 1154, 1976, 686, 4234, 2525, + 2386, 3801, 1084, 4920, 9622, 3186, 1781, 369, 4632, + 9872, 8667, 9674, 1830, 1374, 4683, 9070, 214, 4518, + 5534, 5601, 5258, 7170, 5140, 6908, 5657, 1888, 7447, + 1417, 9074, 7958, 6603, 6682, 6812, 3871, 7796, 2656, + 9359, 549, 6120, 2930, 102, 2531, 6544, 9391, 4378, + 2047, 7512, 2190, 5751, 8525, 1136, 2747, 720, 2145, + 641, 301, 2496, 9622, 4510, 395, 6798, 6226, 7892, + 8532, 6935, 1823, 2827, 7370, 1020, 1943, 9056, 9966, + 4214, 2013, 7924, 3776, 5066, 5635, 5665, 7059, 9145, + 1876, 8463, 436, 8889, 3554, 7616, 5977, 2123, 8891, + 3093, 3444, 8298, 7769, 9996, 4534, 8206, 8590, 6868, + 4918, 9753, 405, 7432, 1938, 8244, 4199, 9661, 6123, + 1379, 4616, 988, 2639, 7628, 7893, 3929, 385, 3197, + 4048, 1422, 7166, 8817, 2223, 9867, 9076, 1609, 3957, + 3493, 5154, 9868, 383, 7966, 6016, 658, 39, 8072, + 3272, 3200, 3031, 6375, 5122, 6299, 8569, 5223, 9918, + 5752, 9906, 1624, 8619, 8885, 6, 1530, 8345, 5934, + 8759, 9362, 374, 7647, 9500, 6878, 8651, 730, 6206, + 6922, 2425, 3365, 3507, 1146, 1333, 4027, 3526, 8032, + 5545, 3816, 1420, 9477, 2919, 6588, 7717, 5586, 4372, + 9646, 4459, 6004, 5986, 3951, 9102, 933, 8920, 3777, + 4007, 6453, 9682, 7501, 2970, 6620, 5252, 1515, 8683, + 2270, 854, 4792, 3612, 2431, 3646, 391, 7432, 8791, + 5732, 2927, 2736, 3415, 9331, 660, 891, 2087, 1032, + 8108, 6196, 2271, 6743, 2298, 1495, 4443, 8578, 1211, + 299, 721, 3596, 9782, 931, 1754, 5985, 3356, 1915, + 6959, 9781, 2014, 2812, 5587, 2995, 2083, 2550, 4199, + 6159, 4633, 7287, 4370, 2671, 1567, 7232, 98, 7463, + 7674, 2991, 9261, 9455, 5907, 4437, 5403, 1604, 5472, + 1763, 3134, 2907, 2000, 4953, 8723, 3834, 290, 3287, + 9580, 5999, 9187, 5984, 8847, 3554, 9248, 7111, 8325, + 841, 3919, 3951, 3645, 221, 9865, 3994, 1127, 6695, + 6647, 7177, 8098, 6023, 9342, 6911, 1870, 4718, 8147, + 4235, 2965, 9837, 856, 3022, 1380, 6101, 6210, 3833, + 151, 1194, 7661, 3944, 7192, 9278, 996, 2039, 2037, + 9067, 4888, 1694, 2687, 3653, 6840, 4033, 5950, 1549, + 9846, 3081, 8066, 769, 8909, 1020, 5908, 2460, 1635, + 6323, 3903, 2849, 1854, 8624, 2421, 4866, 2864, 2488, + 4161, 6088, 2649, 324, 6423, 1433, 256, 8338, 431, + 8627, 3937, 3277, 8971, 7852, 6671, 7455, 6555, 3396, + 8138, 6922, 752, 7772, 9156, 9141, 595, 7469, 2581, + 190, 1418, 3064, 4469, 9408, 4854, 5691, 7984, 3024, + 2707, 9907, 4453, 9093, 7027, 8141, 9776, 3748, 6818, + 449, 2814, 8326, 5427, 3095, 5063, 639, 4947, 8753, + 4085, 8976, 4506, 4719, 4537, 7038, 6831, 9759, 7338, + 1913, 6779, 8933, 9136, 2598, 8897, 2297, 4611, 4639, + 7797, 5128, 4819, 2032, 8142, 518, 2044, 8267, 2677, + 4008, 9440, 3807, 8, 6735, 6851, 8177, 7296, 5290, + 648, 7625, 4802, 7175, 9799, 8657, 1777, 555, 8998, + 4747, 886, 3741, 1929, 7012, 272, 8315, 4721, 9533, + 6549, 8950, 5461, 927, 6305, 3504, 6127, 6161, 2963, + 3469, 7401, 3538, 7768, 2131, 2614, 1885, 3070, 7477, + 309, 6358, 7534, 1977, 6187, 6783, 3213, 1290, 4328, + 3842, 1354, 885, 2517, 4560, 1926, 3961, 9227, 6935, + 462, 1438, 5292, 5574, 6029, 1889, 1936, 960, 2051, + 1095, 6855, 7130, 1000, 5214, 7545, 6674, 6222, 1624, + 7924, 9255, 3113, 4359, 6452, 8545, 3149, 1090, 8723, + 12, 728, 2926, 7047, 8841, 4132, 5028, 6320, 5710, + 4095, 7091, 9196, 6592, 256, 2595, 7555, 7220, 7503, + 3678, 5193, 1846, 8863, 8465, 4921, 8958, 4119, 9500, + 4218, 6868, 2278, 5286, 9717, 3507, 602, 5539, 4997, + 7677, 5129, 5927, 5269, 2466, 7276, 3295, 3682, 4275, + 6823, 7704, 1751, 638, 3376, 2815, 274, 6800, 2001, + 3097, 2067, 6878, 7193, 29, 3760, 2048, 9336, 4726, + 4307, 1514, 4722, 4667, 2541, 78, 1702, 235, 1797, + 7126, 5089, 9859, 451, 2808, 1114, 2670, 2921, 253, + 6747, 1120, 9233, 7774, 2196, 5790, 868, 4297, 6901, + 2496, 4705, 3152, 9611, 3987, 1345, 4561, 8845, 759, + 6029, 1321, 9564, 7293, 8230, 3303, 2114, 4705, 8878, + 7072, 9781, 7747, 5473, 4845, 3268, 9804, 5918, 5813, + 2229, 9728, 8829, 5296, 9089, 4507, 4478, 9771, 189, + 4361, 7386, 5404, 4216, 6784, 835, 9468, 1174, 4062, + 635, 8468, 9399, 9956, 5927, 3633, 2041, 5438, 9398, + 7769, 318, 2503, 2115, 154, 2105, 4783, 6026, 5722, + 6257, 9069, 98, 6855, 3977, 6916, 5247, 551, 6874, + 453, 2707, 2022, 9182, 7602, 9284, 5224, 8443, 1184, + 1616, 9624, 3609, 2194, 7767, 2938, 7851, 777, 5938, + 1546, 2652, 4044, 384, 1875, 8817, 9415, 7504, 2445, + 9751, 4207, 4920, 9990, 7213, 397, 4131, 5253, 5120, + 7687, 7708, 2743, 3453, 2942, 6367, 5198, 5257, 6863, + 2109, 2297, 5048, 7698, 629, 5215, 3378, 7559, 2206, + 1086, 7684, 3748, 107, 1983, 2852, 2220, 7374, 3217, + 1908, 5874, 591, 5820, 7786, 8995, 9449, 3085, 4817, + 4114]), + values=tensor([0.9947, 0.8142, 0.8795, 0.8201, 0.5910, 0.5774, 0.6787, + 0.3836, 0.6040, 0.8398, 0.1259, 0.1459, 0.6698, 0.4976, + 0.2937, 0.5676, 0.1634, 0.5111, 0.1879, 0.3973, 0.1173, + 0.9735, 0.0848, 0.2837, 0.8090, 0.2099, 0.1769, 0.1252, + 0.0842, 0.4799, 0.3065, 0.7083, 0.1028, 0.6702, 0.1950, + 0.4882, 0.7283, 0.0312, 0.3765, 0.3697, 0.4492, 0.7468, + 0.8657, 0.3983, 0.7946, 0.9615, 0.8286, 0.6214, 0.8592, + 0.4413, 0.6006, 0.4282, 0.4277, 0.2990, 0.0424, 0.3419, + 0.9831, 0.0184, 0.6371, 0.4181, 0.8034, 0.6733, 0.6806, + 0.0607, 0.2397, 0.3099, 0.0864, 0.7937, 0.9239, 0.8015, + 0.8287, 0.5835, 0.9482, 0.9914, 0.6012, 0.1646, 0.5987, + 0.5880, 0.5622, 0.9711, 0.9774, 0.5218, 0.6976, 0.3425, + 0.2975, 0.4278, 0.8927, 0.4119, 0.2833, 0.2442, 0.7490, + 0.5147, 0.0807, 0.4474, 0.6798, 0.9577, 0.2109, 0.8632, + 0.3757, 0.2105, 0.7130, 0.3533, 0.1690, 0.4804, 0.5167, + 0.9728, 0.8527, 0.1135, 0.5917, 0.8791, 0.6425, 0.6958, + 0.1839, 0.0340, 0.5170, 0.2015, 0.1209, 0.6163, 0.4274, + 0.2144, 0.9963, 0.8824, 0.1649, 0.6969, 0.8670, 0.6598, + 0.3078, 0.5853, 0.9715, 0.7585, 0.5095, 0.3547, 0.7490, + 0.6322, 0.9507, 0.2170, 0.1205, 0.5733, 0.2161, 0.5475, + 0.0400, 0.0701, 0.6929, 0.4946, 0.9048, 0.5536, 0.7938, + 0.7391, 0.7036, 0.3338, 0.2405, 0.9038, 0.2827, 0.8615, + 0.0612, 0.2391, 0.4980, 0.5230, 0.3775, 0.0596, 0.6816, + 0.7299, 0.1596, 0.7477, 0.6113, 0.5079, 0.5112, 0.9304, + 0.2833, 0.4280, 0.7909, 0.0432, 0.3695, 0.1434, 0.1742, + 0.6911, 0.4963, 0.7415, 0.3341, 0.6418, 0.9104, 0.8708, + 0.5701, 0.4124, 0.0029, 0.2245, 0.0994, 0.3629, 0.9844, + 0.1808, 0.8216, 0.7584, 0.5728, 0.8761, 0.5793, 0.6932, + 0.4974, 0.0700, 0.8292, 0.6429, 0.6033, 0.6616, 0.0149, + 0.6410, 0.6499, 0.6682, 0.9018, 0.5612, 0.6244, 0.2034, + 0.3658, 0.3966, 0.3922, 0.7716, 0.7977, 0.5707, 0.0225, + 0.3566, 0.5719, 0.1669, 0.0521, 0.4122, 0.2017, 0.1862, + 0.7381, 0.4027, 0.5838, 0.1008, 0.5373, 0.2087, 0.8661, + 0.2967, 0.0213, 0.6365, 0.4971, 0.3991, 0.3183, 0.3688, + 0.3715, 0.2215, 0.5958, 0.0267, 0.3628, 0.1001, 0.2964, + 0.0307, 0.4153, 0.0718, 0.7484, 0.2708, 0.2602, 0.2771, + 0.8610, 0.8293, 0.7956, 0.7654, 0.5931, 0.1625, 0.6279, + 0.7151, 0.0419, 0.5125, 0.4553, 0.3215, 0.8706, 0.3694, + 0.7482, 0.7484, 0.3599, 0.7352, 0.6473, 0.9284, 0.0336, + 0.7077, 0.6412, 0.7363, 0.5240, 0.2691, 0.4230, 0.7906, + 0.6749, 0.2034, 0.4311, 0.0985, 0.4884, 0.0018, 0.6710, + 0.6790, 0.2098, 0.0750, 0.9750, 0.1155, 0.2968, 0.0360, + 0.9908, 0.4869, 0.3234, 0.1121, 0.4329, 0.8802, 0.3352, + 0.0936, 0.2726, 0.0378, 0.1903, 0.6851, 0.7012, 0.7926, + 0.6112, 0.2624, 0.2279, 0.8759, 0.0133, 0.8322, 0.4001, + 0.7463, 0.9062, 0.7759, 0.6849, 0.1473, 0.1215, 0.1255, + 0.1145, 0.6020, 0.1543, 0.0107, 0.6992, 0.2812, 0.8585, + 0.0415, 0.8397, 0.0506, 0.0567, 0.7237, 0.0505, 0.1581, + 0.1605, 0.1352, 0.1599, 0.5700, 0.9940, 0.1719, 0.1559, + 0.3442, 0.3517, 0.8650, 0.0507, 0.0774, 0.3044, 0.9415, + 0.9135, 0.8143, 0.8978, 0.5331, 0.8830, 0.9179, 0.5019, + 0.4481, 0.5985, 0.9501, 0.1300, 0.5891, 0.6023, 0.5878, + 0.2328, 0.9375, 0.6776, 0.6109, 0.2392, 0.4025, 0.8957, + 0.9293, 0.1514, 0.1062, 0.4517, 0.4744, 0.6135, 0.1092, + 0.8682, 0.7074, 0.4025, 0.8245, 0.5438, 0.0306, 0.5168, + 0.3435, 0.4368, 0.3254, 0.8575, 0.6522, 0.4516, 0.3889, + 0.7155, 0.5152, 0.3747, 0.1934, 0.8799, 0.7374, 0.8447, + 0.3303, 0.6124, 0.4096, 0.4168, 0.5871, 0.3973, 0.3709, + 0.4537, 0.2659, 0.9664, 0.2243, 0.7921, 0.4993, 0.3369, + 0.8845, 0.0353, 0.1029, 0.2229, 0.2944, 0.8975, 0.0870, + 0.9800, 0.0723, 0.2951, 0.3726, 0.9571, 0.1796, 0.6703, + 0.7356, 0.3061, 0.0102, 0.8339, 0.5728, 0.2850, 0.7175, + 0.7657, 0.9492, 0.9447, 0.8513, 0.1834, 0.3093, 0.9589, + 0.5785, 0.8799, 0.1563, 0.7102, 0.6827, 0.3414, 0.8059, + 0.4581, 0.3597, 0.0538, 0.8422, 0.3224, 0.2475, 0.8804, + 0.5505, 0.1860, 0.4825, 0.8233, 0.1705, 0.0018, 0.0070, + 0.5773, 0.0579, 0.9972, 0.7747, 0.0470, 0.8156, 0.7927, + 0.4961, 0.6170, 0.3308, 0.0946, 0.7921, 0.7075, 0.3192, + 0.3788, 0.2776, 0.5390, 0.0384, 0.7359, 0.8495, 0.3753, + 0.9297, 0.8609, 0.4756, 0.2075, 0.8723, 0.3556, 0.0178, + 0.5989, 0.4108, 0.6837, 0.2932, 0.4760, 0.1799, 0.8359, + 0.5516, 0.6924, 0.8639, 0.7611, 0.6684, 0.1628, 0.2499, + 0.7207, 0.4081, 0.2940, 0.1700, 0.3918, 0.1758, 0.3345, + 0.6827, 0.4177, 0.3104, 0.9645, 0.7915, 0.1430, 0.3047, + 0.6795, 0.3375, 0.5591, 0.5936, 0.7980, 0.6445, 0.6860, + 0.0813, 0.3502, 0.7347, 0.4464, 0.2210, 0.6725, 0.4676, + 0.9791, 0.1497, 0.2862, 0.1219, 0.1364, 0.1316, 0.1297, + 0.6676, 0.4648, 0.2785, 0.8402, 0.3104, 0.9823, 0.2575, + 0.4886, 0.8197, 0.9829, 0.0189, 0.3986, 0.0292, 0.3866, + 0.7344, 0.1934, 0.2571, 0.1218, 0.8332, 0.4134, 0.3935, + 0.4616, 0.6195, 0.4292, 0.5706, 0.2806, 0.4996, 0.1909, + 0.0528, 0.9268, 0.1029, 0.4934, 0.3040, 0.5857, 0.8663, + 0.8906, 0.0055, 0.0546, 0.6358, 0.5019, 0.8777, 0.9935, + 0.0896, 0.1857, 0.6371, 0.8996, 0.9816, 0.0571, 0.1826, + 0.4938, 0.0971, 0.6974, 0.3356, 0.1667, 0.8245, 0.3869, + 0.4390, 0.0351, 0.8901, 0.7794, 0.3075, 0.8284, 0.9392, + 0.6635, 0.3030, 0.3204, 0.1313, 0.4423, 0.2744, 0.4833, + 0.1684, 0.6597, 0.4624, 0.9157, 0.3101, 0.2889, 0.6835, + 0.7483, 0.9270, 0.9666, 0.9122, 0.6004, 0.3227, 0.3422, + 0.0559, 0.3329, 0.3847, 0.8364, 0.0683, 0.8726, 0.6441, + 0.2936, 0.9992, 0.5330, 0.2236, 0.7202, 0.1538, 0.1463, + 0.9975, 0.1073, 0.9785, 0.8323, 0.9179, 0.1768, 0.2374, + 0.8639, 0.8611, 0.2135, 0.7852, 0.1105, 0.3423, 0.0570, + 0.7971, 0.2463, 0.9421, 0.6500, 0.4273, 0.4870, 0.7569, + 0.4048, 0.0956, 0.1875, 0.2027, 0.5062, 0.2254, 0.9266, + 0.0270, 1.0000, 0.5358, 0.7957, 0.0992, 0.7476, 0.7134, + 0.3073, 0.9515, 0.4257, 0.7879, 0.7532, 0.8575, 0.7050, + 0.6162, 0.9639, 0.9647, 0.3985, 0.9411, 0.7396, 0.6996, + 0.0128, 0.3998, 0.3520, 0.5992, 0.6274, 0.6761, 0.9873, + 0.5050, 0.6012, 0.7518, 0.4731, 0.3321, 0.8593, 0.9781, + 0.6447, 0.3754, 0.4844, 0.3666, 0.8391, 0.2700, 0.4478, + 0.1196, 0.7313, 0.5924, 0.5257, 0.2515, 0.9787, 0.2667, + 0.9283, 0.7402, 0.2859, 0.5403, 0.8058, 0.2628, 0.1197, + 0.8079, 0.1418, 0.9980, 0.9578, 0.3931, 0.1985, 0.1167, + 0.5381, 0.4910, 0.4833, 0.9287, 0.2466, 0.7456, 0.6309, + 0.3680, 0.9610, 0.5857, 0.5926, 0.5408, 0.3344, 0.8273, + 0.5335, 0.1632, 0.3312, 0.6302, 0.7085, 0.7097, 0.0159, + 0.2602, 0.3872, 0.3869, 0.3249, 0.4454, 0.7036, 0.5390, + 0.7601, 0.2638, 0.6789, 0.2000, 0.8420, 0.0251, 0.3391, + 0.4358, 0.1688, 0.4442, 0.5648, 0.1351, 0.9596, 0.2138, + 0.0593, 0.7952, 0.0630, 0.7767, 0.6377, 0.9130, 0.1033, + 0.7046, 0.0318, 0.5844, 0.3118, 0.4860, 0.4688, 0.8553, + 0.8250, 0.4534, 0.4391, 0.8300, 0.5927, 0.7682, 0.3238, + 0.7033, 0.4352, 0.4954, 0.9437, 0.9054, 0.5747, 0.7765, + 0.4424, 0.2929, 0.9590, 0.4225, 0.4001, 0.5871, 0.8788, + 0.3040, 0.9523, 0.7164, 0.5327, 0.5236, 0.4380, 0.1134, + 0.8473, 0.2882, 0.3523, 0.3539, 0.4121, 0.4672, 0.8323, + 0.7208, 0.2181, 0.0170, 0.1058, 0.3764, 0.0125, 0.6340, + 0.4615, 0.6428, 0.5669, 0.0760, 0.6375, 0.8387, 0.0311, + 0.1713, 0.0010, 0.4339, 0.8789, 0.1521, 0.9377, 0.9324, + 0.6243, 0.2326, 0.2212, 0.8896, 0.1088, 0.3471, 0.7677, + 0.3772, 0.5205, 0.3621, 0.8824, 0.7207, 0.7693, 0.7233, + 0.8563, 0.4628, 0.5350, 0.0480, 0.7499, 0.9678, 0.7096, + 0.1312, 0.6863, 0.8271, 0.7358, 0.3418, 0.4553, 0.0225, + 0.8086, 0.0509, 0.9180, 0.0939, 0.1837, 0.4209, 0.0984, + 0.9255, 0.7897, 0.0222, 0.2331, 0.7623, 0.7332, 0.8990, + 0.2149, 0.2188, 0.7875, 0.2162, 0.2017, 0.2101, 0.7395, + 0.5164, 0.1275, 0.5592, 0.8372, 0.9368, 0.1299, 0.2527, + 0.2989, 0.4241, 0.6867, 0.0859, 0.4353, 0.3463, 0.0852, + 0.7482, 0.9930, 0.0910, 0.3557, 0.8138, 0.1492, 0.1226, + 0.9829, 0.3178, 0.1808, 0.6471, 0.0049, 0.9936, 0.6310, + 0.8742, 0.7801, 0.9697, 0.4503, 0.7427, 0.6939, 0.9810, + 0.1235, 0.6928, 0.2526, 0.6531, 0.7614, 0.2766, 0.3928, + 0.6482, 0.3744, 0.9162, 0.9336, 0.3657, 0.9951, 0.7185, + 0.4848, 0.0849, 0.3631, 0.6820, 0.9602, 0.9376, 0.8353, + 0.1594, 0.7467, 0.8948, 0.6829, 0.9264, 0.7902, 0.7239, + 0.8735, 0.3875, 0.0528, 0.7009, 0.5289, 0.2944, 0.4509, + 0.5369, 0.6869, 0.2268, 0.2641, 0.2154, 0.3362, 0.6552, + 0.1327, 0.1218, 0.6611, 0.0654, 0.0444, 0.7917, 0.2227, + 0.5871, 0.9300, 0.2691, 0.6683, 0.2163, 0.8183, 0.6909, + 0.0176, 0.0647, 0.9873, 0.6484, 0.3203, 0.2129, 0.9498, + 0.8937, 0.7407, 0.7572, 0.9827, 0.2276, 0.4923, 0.3868, + 0.3238, 0.4139, 0.4523, 0.9314, 0.2645, 0.3099]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.1139, 0.8384, 0.4762, ..., 0.6681, 0.9732, 0.3908]) +tensor([0.9394, 0.5361, 0.0926, ..., 0.1333, 0.7033, 0.7122]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -754,378 +540,378 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 9.197555303573608 seconds +Time: 9.234081745147705 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 145400 -ss 10000 -sd 1e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.376285076141357} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 142368 -ss 10000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.641618490219116} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([2248, 4486, 2578, 3740, 621, 6003, 5803, 7740, 8017, - 8357, 2886, 8788, 9848, 2845, 3345, 9526, 6879, 849, - 9475, 1600, 5380, 5334, 6629, 9937, 1676, 3949, 9759, - 1297, 1271, 554, 8126, 7607, 6824, 8955, 3784, 6636, - 6716, 7362, 236, 4770, 377, 1035, 7094, 4817, 9140, - 2937, 60, 7489, 6793, 9918, 3932, 6069, 5062, 5030, - 1223, 3975, 150, 7966, 1822, 242, 7431, 4532, 9014, - 8126, 915, 7358, 2001, 3806, 564, 5560, 6173, 620, - 8900, 1133, 6344, 486, 265, 5173, 6593, 9511, 1972, - 6657, 9996, 3207, 27, 7301, 9620, 504, 7560, 1601, - 7424, 6685, 9645, 8602, 1386, 2669, 7610, 3723, 4006, - 2340, 4530, 2647, 5701, 4426, 8272, 3355, 7800, 1132, - 6460, 5948, 6002, 5599, 7637, 1754, 3726, 7844, 4922, - 6626, 3071, 5112, 9488, 6276, 63, 6759, 8385, 8552, - 1584, 8134, 9707, 7442, 9004, 7463, 5317, 8848, 2939, - 2603, 8194, 2739, 5486, 256, 2365, 9508, 2237, 2470, - 6761, 3844, 820, 3960, 1427, 3014, 6205, 9526, 187, - 5743, 8313, 7945, 4733, 316, 4870, 8198, 2605, 745, - 6138, 3766, 3604, 9846, 2494, 2928, 9060, 9638, 2293, - 1334, 3566, 5466, 8151, 4330, 3680, 6199, 2041, 6318, - 1803, 5034, 4823, 7162, 9670, 280, 8489, 1737, 2606, - 6216, 1938, 8976, 5856, 1117, 6857, 4276, 4103, 6680, - 9260, 6437, 1742, 9346, 2197, 2419, 6198, 9840, 242, - 5981, 117, 8727, 6383, 8881, 1592, 6491, 291, 8533, - 4799, 8873, 6031, 492, 9755, 8898, 9538, 3126, 3673, - 7165, 7259, 875, 9998, 4153, 3488, 4806, 1041, 6719, - 6621, 5171, 1220, 6019, 5757, 7236, 8256, 5996, 5674, - 9515, 884, 4587, 7341, 6133, 103, 4029, 3802, 6524, - 2472, 5443, 6806, 2814, 1694, 9023, 2383, 8670, 150, - 8994, 6959, 4597, 769, 5991, 1821, 3685, 7931, 9192, - 3640, 71, 9825, 1072, 4022, 3163, 389, 3350, 3717, - 5813, 4622, 1106, 4472, 4551, 1684, 6929, 301, 7590, - 7048, 8693, 7825, 1347, 1110, 2977, 261, 4463, 7673, - 6591, 9953, 9930, 6563, 4448, 8022, 5661, 6717, 9701, - 8152, 330, 4317, 726, 7842, 2771, 5867, 8845, 6599, - 345, 711, 3072, 5521, 7893, 9001, 7298, 4506, 7966, - 9217, 3422, 9151, 4171, 6251, 7471, 8643, 1037, 6053, - 7103, 6011, 9381, 7007, 3198, 7413, 5504, 868, 807, - 9558, 4156, 1608, 151, 6572, 5212, 4737, 4239, 8899, - 1675, 4293, 6235, 3154, 638, 2173, 3370, 5882, 3746, - 1224, 6845, 7214, 7881, 8306, 9751, 8223, 7814, 3399, - 8182, 9634, 2816, 6681, 9974, 5336, 2828, 7713, 4327, - 7163, 9008, 4567, 1895, 12, 9582, 9593, 8980, 272, - 8032, 8958, 2398, 6519, 6198, 2543, 922, 7509, 1247, - 8974, 5180, 6179, 5748, 196, 904, 6210, 2122, 9168, - 8422, 6490, 4019, 3865, 4779, 2054, 5563, 6204, 2106, - 5654, 7998, 597, 7416, 2479, 6142, 770, 4293, 5948, - 8141, 8297, 9216, 3867, 2651, 4047, 5002, 2595, 3555, - 2174, 7868, 7327, 342, 5550, 3470, 1746, 5793, 7441, - 7004, 2390, 1078, 4771, 5727, 30, 758, 5897, 2502, - 9822, 9987, 7356, 859, 2903, 2564, 2920, 9271, 3693, - 5356, 3524, 8301, 1656, 7891, 4145, 9135, 6976, 8579, - 833, 8722, 6055, 9041, 9614, 4985, 9991, 8673, 9414, - 9019, 3390, 725, 9820, 2425, 8099, 6415, 9969, 6125, - 1611, 3306, 4585, 645, 1773, 8803, 2043, 2286, 1213, - 9485, 2215, 7640, 7708, 2399, 3154, 8276, 1166, 6491, - 2501, 725, 5875, 8477, 2966, 1446, 3395, 9153, 3079, - 1192, 9045, 7932, 9179, 8868, 3781, 4831, 8933, 9803, - 3724, 6396, 4228, 7956, 4967, 8351, 7905, 7539, 8192, - 9993, 4457, 5387, 8296, 5158, 3333, 708, 476, 3968, - 5277, 7708, 1410, 3177, 5458, 4440, 4550, 296, 4323, - 3462, 3083, 9337, 8193, 6143, 3740, 897, 7990, 2278, - 6384, 7710, 3296, 331, 462, 6916, 6923, 8978, 6177, - 9290, 1417, 5590, 5685, 6828, 8296, 8381, 9216, 476, - 6358, 7285, 1855, 3164, 4932, 8288, 6409, 5094, 1306, - 6007, 8471, 5458, 6254, 3652, 4046, 7815, 8715, 3786, - 1795, 6315, 2882, 8286, 6349, 9713, 8963, 66, 2329, - 6124, 9842, 1259, 9947, 8722, 2982, 4275, 8310, 6873, - 4768, 8622, 3931, 7924, 7390, 3002, 7246, 2227, 171, - 6920, 3991, 9945, 6413, 3754, 8242, 5360, 4923, 5926, - 695, 9843, 1823, 1545, 2212, 6864, 1755, 2272, 215, - 7802, 6355, 7266, 1309, 1497, 8441, 9715, 6376, 9230, - 5931, 4201, 3022, 3307, 5726, 8235, 1964, 6883, 8294, - 3695, 9567, 4015, 9092, 4811, 5087, 3500, 9002, 6559, - 2649, 290, 2110, 9321, 4170, 164, 9410, 3089, 9451, - 3947, 7753, 1392, 1306, 243, 1301, 704, 2798, 2938, - 4752, 3676, 9586, 703, 7082, 9412, 1050, 6555, 4617, - 3094, 981, 412, 6322, 6998, 7588, 2932, 523, 2537, - 3071, 5598, 4943, 5205, 9249, 4919, 4693, 3046, 1397, - 1395, 2346, 6770, 8788, 7234, 1431, 6167, 5943, 4227, - 9979, 652, 3735, 1165, 308, 9727, 8879, 4796, 8375, - 6258, 506, 1990, 5666, 4495, 5415, 7727, 9298, 5899, - 2244, 4112, 5158, 4073, 937, 2378, 4452, 1403, 8204, - 7797, 2213, 6334, 1980, 6425, 7740, 8281, 867, 9774, - 4781, 5252, 9729, 3926, 6156, 4407, 6838, 226, 4075, - 793, 7067, 9361, 6893, 7979, 5619, 2360, 9592, 9930, - 5095, 2717, 226, 8917, 7076, 8913, 7750, 2503, 3399, - 6842, 5068, 8251, 3296, 9411, 4198, 126, 5660, 1235, - 1833, 9365, 8923, 2076, 5603, 5346, 328, 2378, 5545, - 1993, 2974, 8099, 7909, 1311, 8275, 3756, 2486, 705, - 5361, 5735, 5047, 367, 7801, 6827, 6671, 2053, 1314, - 1929, 9497, 9506, 9494, 7770, 8412, 7304, 169, 7103, - 4686, 2341, 9645, 80, 7906, 1210, 2186, 9641, 334, - 8190, 3885, 2984, 8518, 8245, 8533, 5286, 5258, 7956, - 7408, 1862, 2077, 5580, 5935, 7292, 6939, 1548, 6790, - 7824, 8270, 1817, 6609, 6093, 4744, 3164, 2927, 2913, - 1259, 1989, 7667, 6058, 7524, 5404, 9197, 4190, 1346, - 5022, 8117, 6599, 93, 3011, 7992, 6769, 4569, 7376, - 7531, 1048, 4000, 490, 5996, 5906, 5365, 4036, 6734, - 4388, 3829, 2746, 9309, 6508, 9594, 624, 2217, 4304, - 1773, 7985, 1454, 3880, 3015, 6129, 9561, 3818, 6896, - 978, 3666, 4908, 1992, 5403, 6907, 2501, 7887, 2732, - 4383, 2172, 3313, 4934, 9340, 7685, 1693, 3520, 4423, - 203, 9116, 8657, 9778, 4440, 8123, 5395, 160, 8069, - 2984, 7361, 9385, 529, 1338, 6935, 2925, 7149, 5643, - 5833, 2323, 7031, 3050, 1864, 8938, 5395, 732, 194, - 932, 4702, 2935, 435, 3305, 4504, 4306, 5520, 2583, - 2711, 2433, 2449, 1007, 418, 4026, 895, 5276, 1175, - 4970, 716, 2193, 4184, 5233, 6745, 5418, 4772, 4938, - 326, 675, 1579, 5312, 977, 1458, 7168, 4339, 357, - 2034, 3928, 9009, 5821, 97, 793, 3006, 5787, 1958, - 879]), - values=tensor([1.7784e-02, 6.7536e-01, 3.2815e-01, 5.0212e-01, - 8.2459e-01, 4.0831e-01, 3.5370e-01, 4.7010e-01, - 6.4249e-01, 8.4444e-01, 1.2869e-01, 5.1729e-01, - 7.8578e-01, 8.1935e-01, 2.2395e-01, 9.1242e-01, - 2.3102e-01, 2.5964e-01, 7.5333e-01, 4.0994e-01, - 7.8409e-01, 8.3098e-02, 4.4159e-02, 6.9849e-01, - 5.4876e-01, 7.7185e-02, 1.4256e-01, 9.4588e-01, - 3.8493e-01, 7.9894e-01, 2.6476e-01, 6.5332e-01, - 2.7952e-01, 9.6279e-01, 6.7687e-01, 1.5134e-01, - 2.0992e-01, 1.8817e-01, 1.5476e-01, 3.7619e-01, - 6.0915e-01, 2.5395e-01, 7.0490e-01, 5.1472e-01, - 8.1531e-01, 8.1250e-01, 2.4074e-01, 6.2823e-01, - 9.6154e-02, 8.5599e-01, 4.0257e-01, 9.7211e-01, - 3.1491e-01, 5.8549e-01, 5.1698e-01, 1.4512e-01, - 5.6220e-01, 8.5455e-01, 1.5383e-01, 3.4851e-01, - 1.6742e-01, 2.8756e-01, 8.4743e-01, 6.4905e-01, - 3.2047e-02, 7.9299e-01, 4.4112e-02, 8.2065e-01, - 6.4074e-01, 5.0950e-01, 3.9067e-01, 6.9817e-01, - 2.6907e-01, 1.1875e-01, 2.0775e-01, 8.1476e-01, - 6.4289e-01, 3.9998e-01, 1.7138e-01, 8.2458e-01, - 7.2667e-01, 8.8010e-02, 1.5554e-01, 5.5880e-01, - 4.8597e-01, 2.5555e-01, 4.1458e-02, 7.8508e-01, - 2.8802e-02, 5.5833e-01, 1.9918e-01, 2.7091e-01, - 6.0720e-02, 4.4126e-01, 3.7238e-01, 7.4795e-02, - 4.4719e-01, 9.3574e-01, 6.9669e-01, 3.3986e-01, - 5.9333e-01, 8.5560e-01, 6.1018e-02, 8.9694e-01, - 2.2288e-01, 8.3254e-01, 7.4048e-01, 1.0655e-01, - 6.1968e-01, 4.9236e-01, 3.9374e-01, 9.8600e-01, - 6.2727e-01, 6.3699e-01, 1.5077e-01, 6.8923e-01, - 8.1866e-02, 9.4491e-01, 4.2495e-01, 6.1924e-01, - 5.9097e-01, 8.9631e-01, 4.0896e-02, 2.7195e-01, - 9.2606e-01, 7.9629e-01, 1.3798e-01, 9.5497e-01, - 5.5609e-01, 7.6608e-01, 2.0744e-01, 9.0828e-01, - 7.3828e-01, 2.4178e-02, 6.3042e-01, 9.0157e-02, - 8.0575e-01, 6.4091e-01, 8.4001e-01, 8.0655e-01, - 6.3773e-01, 5.2165e-01, 7.3692e-01, 9.8339e-03, - 2.4304e-01, 4.0600e-01, 5.3472e-01, 7.3039e-01, - 1.8193e-01, 6.5386e-01, 8.9109e-01, 1.4270e-01, - 9.9755e-01, 1.9643e-01, 3.1731e-01, 8.4375e-01, - 8.4723e-01, 4.5911e-02, 4.7723e-01, 5.8049e-01, - 7.4714e-01, 9.5452e-01, 6.9020e-01, 7.3868e-01, - 8.9090e-01, 4.6800e-01, 1.2098e-02, 9.0383e-01, - 2.8584e-01, 8.4536e-02, 8.2007e-01, 4.0686e-01, - 3.2004e-01, 6.9340e-02, 9.6068e-01, 1.0858e-01, - 7.8312e-01, 5.4643e-02, 8.4437e-01, 6.9654e-02, - 9.7882e-01, 3.6535e-01, 7.6403e-01, 5.6204e-02, - 2.1405e-02, 8.8165e-01, 6.5928e-01, 7.1005e-01, - 6.2375e-01, 6.5806e-01, 3.5559e-02, 1.4669e-01, - 1.6843e-01, 9.0943e-01, 5.9699e-01, 3.9861e-01, - 7.9046e-01, 4.2964e-01, 3.2524e-01, 2.0212e-02, - 3.7890e-01, 5.4298e-01, 6.1412e-01, 3.4376e-01, - 3.9039e-01, 3.7773e-01, 5.0347e-01, 6.3176e-01, - 6.1923e-01, 2.6321e-01, 4.7552e-01, 3.5546e-01, - 4.9177e-01, 8.1631e-01, 6.5120e-01, 4.6596e-01, - 2.3883e-01, 3.7781e-01, 5.1278e-01, 4.6530e-02, - 9.2397e-02, 3.2990e-01, 5.7737e-03, 9.1643e-01, - 6.2213e-01, 9.8931e-01, 6.7022e-01, 9.9456e-01, - 6.7420e-01, 8.9914e-01, 6.2276e-01, 1.6743e-01, - 1.6278e-01, 3.2006e-01, 3.0192e-01, 6.6995e-01, - 4.7404e-01, 2.9640e-01, 1.2818e-01, 7.9687e-03, - 2.4825e-01, 4.3761e-01, 6.3144e-01, 1.5385e-02, - 1.6076e-01, 1.3500e-01, 6.2363e-01, 3.7179e-03, - 6.2808e-01, 7.7587e-01, 3.3617e-01, 2.8067e-01, - 4.8957e-01, 8.6733e-01, 3.2273e-01, 2.0147e-01, - 1.6166e-01, 1.4503e-01, 6.1851e-01, 7.6022e-02, - 9.5480e-01, 3.3003e-01, 7.4260e-01, 5.5179e-01, - 2.2819e-01, 8.9926e-01, 4.6642e-01, 9.1739e-01, - 2.8484e-01, 1.5083e-01, 7.3850e-01, 6.2898e-01, - 2.6437e-01, 8.2121e-02, 2.1812e-01, 5.9090e-01, - 5.1624e-01, 3.1861e-01, 6.4228e-01, 9.4735e-01, - 7.0057e-01, 6.7393e-01, 7.5735e-01, 5.6290e-01, - 8.6359e-01, 7.5982e-01, 8.9830e-01, 8.9589e-01, - 9.2167e-01, 1.6984e-01, 4.1872e-01, 7.0953e-01, - 5.8248e-01, 5.1128e-01, 4.9473e-01, 9.3201e-01, - 4.3207e-04, 4.4583e-01, 3.1218e-01, 7.2647e-01, - 2.1753e-01, 5.8396e-01, 9.6181e-01, 1.8968e-01, - 7.5732e-01, 7.7034e-01, 5.4270e-01, 2.5345e-01, - 1.1261e-01, 5.3952e-01, 6.7120e-01, 5.7430e-01, - 5.7452e-01, 1.9481e-01, 5.4907e-01, 9.8805e-01, - 5.5217e-01, 5.2533e-02, 3.2655e-01, 7.4265e-01, - 3.3929e-01, 3.8987e-01, 3.8084e-01, 8.2952e-01, - 9.8247e-01, 3.9827e-01, 5.2188e-01, 7.5684e-01, - 5.7018e-01, 7.8082e-01, 2.7279e-01, 8.5286e-01, - 5.6357e-01, 6.0478e-01, 2.6466e-01, 5.2700e-01, - 6.7922e-01, 7.6419e-01, 6.4983e-02, 9.9524e-01, - 1.8506e-01, 9.8193e-01, 8.5914e-01, 3.9608e-01, - 3.1767e-01, 7.5937e-01, 4.5263e-01, 8.2957e-01, - 7.3658e-01, 6.0163e-01, 5.0224e-01, 6.8388e-01, - 7.1932e-01, 6.8113e-01, 6.8211e-01, 1.9733e-01, - 3.3945e-01, 1.4133e-01, 5.2321e-01, 6.5309e-01, - 7.3928e-01, 5.9226e-01, 8.6895e-01, 8.7555e-01, - 8.0389e-01, 9.7514e-01, 5.2371e-01, 1.9607e-01, - 6.4999e-01, 1.2679e-01, 3.1110e-01, 9.0763e-01, - 4.9753e-01, 2.3690e-01, 9.7634e-01, 2.0327e-02, - 8.6119e-02, 3.8622e-01, 7.5046e-01, 1.0498e-01, - 2.3977e-01, 3.3615e-01, 7.8112e-01, 7.5236e-01, - 6.1657e-01, 4.1513e-01, 1.7229e-01, 4.9383e-01, - 1.0446e-01, 3.4158e-01, 7.5812e-01, 2.7031e-01, - 3.2779e-01, 7.9353e-01, 4.1051e-01, 2.7920e-01, - 7.1389e-01, 6.6997e-01, 6.0140e-01, 4.3603e-01, - 3.5002e-01, 9.9051e-01, 3.2108e-01, 1.2518e-01, - 2.5630e-01, 2.6310e-01, 9.8697e-01, 9.2055e-01, - 4.7638e-01, 7.0146e-01, 5.3125e-01, 7.1807e-01, - 2.3959e-01, 8.4717e-01, 2.9144e-01, 9.0787e-01, - 5.0453e-01, 1.0290e-01, 3.7515e-01, 1.0833e-01, - 9.5140e-01, 5.2024e-01, 1.5227e-01, 2.3945e-01, - 5.9483e-01, 1.2835e-01, 4.1922e-01, 4.3056e-01, - 6.7601e-01, 9.9151e-01, 2.7772e-01, 6.2498e-01, - 1.4961e-01, 8.2116e-01, 6.3550e-01, 2.4633e-01, - 8.0063e-01, 9.0852e-01, 7.0469e-01, 5.0673e-02, - 6.8987e-01, 2.8266e-01, 4.0356e-01, 9.4879e-01, - 8.1800e-01, 4.3028e-01, 1.5262e-01, 8.6723e-01, - 5.5898e-01, 4.0008e-01, 4.1198e-02, 2.4922e-01, - 4.6623e-01, 5.9920e-01, 2.4748e-01, 5.3751e-01, - 7.9826e-01, 7.9340e-01, 5.4706e-01, 3.1348e-01, - 8.8152e-01, 3.6602e-01, 6.5734e-01, 7.5456e-01, - 1.8360e-01, 2.9506e-01, 7.2951e-01, 3.9081e-02, - 3.3378e-01, 6.2120e-01, 1.8041e-01, 9.1382e-01, - 6.9023e-01, 9.4426e-01, 8.6070e-01, 7.5081e-01, - 9.0051e-01, 3.0756e-01, 6.9718e-01, 7.5190e-01, - 7.2288e-01, 3.7386e-01, 2.2439e-01, 9.3179e-01, - 4.8208e-01, 7.0572e-02, 9.3952e-01, 5.0097e-01, - 3.9199e-01, 2.4114e-01, 9.2975e-01, 8.9070e-03, - 7.4881e-01, 8.4434e-01, 1.8233e-01, 9.3556e-02, - 4.5752e-02, 8.6701e-01, 5.5636e-01, 3.8475e-01, - 1.0103e-02, 2.1789e-01, 8.0072e-01, 6.3665e-01, - 1.3845e-01, 3.3886e-01, 5.0949e-01, 9.5029e-01, - 8.6700e-01, 8.1981e-01, 9.7561e-01, 6.3823e-01, - 4.8593e-01, 2.7508e-01, 7.3125e-01, 3.5563e-01, - 2.9572e-01, 6.7049e-01, 9.6207e-01, 4.3129e-01, - 3.9912e-01, 1.8416e-01, 4.2178e-01, 2.2591e-01, - 2.7116e-01, 8.4944e-02, 5.3846e-01, 4.9377e-01, - 1.8625e-01, 1.0443e-01, 4.0045e-01, 7.8568e-01, - 7.8932e-01, 5.1024e-02, 6.1194e-01, 9.9630e-01, - 4.1847e-01, 6.4383e-01, 8.7660e-01, 2.2864e-01, - 3.8631e-01, 8.6041e-02, 9.8976e-01, 9.6159e-01, - 1.5407e-01, 3.7114e-01, 8.6685e-01, 2.9952e-01, - 4.3743e-01, 6.0430e-01, 1.8023e-01, 8.9627e-01, - 3.4675e-01, 8.9111e-01, 4.9121e-01, 3.2340e-01, - 4.3401e-01, 4.5372e-01, 1.1315e-01, 2.6283e-01, - 4.1677e-02, 6.9109e-01, 8.3950e-01, 2.9105e-01, - 2.7116e-01, 3.2257e-01, 6.5006e-01, 1.3440e-01, - 8.7399e-01, 8.5258e-01, 5.1670e-01, 9.5831e-01, - 1.8248e-03, 2.2603e-01, 4.3010e-01, 4.4390e-01, - 3.6180e-01, 5.7994e-01, 5.4039e-02, 9.0240e-01, - 9.2264e-01, 8.4106e-01, 5.2452e-02, 4.9914e-01, - 3.6467e-01, 4.7270e-01, 7.1339e-02, 9.7767e-02, - 9.1457e-01, 5.0307e-01, 1.4224e-01, 3.1225e-01, - 7.0030e-01, 5.8456e-01, 3.0705e-01, 7.1438e-01, - 4.9225e-01, 6.4899e-01, 8.4726e-01, 9.9534e-01, - 7.4622e-01, 5.8818e-01, 6.4092e-01, 6.7998e-01, - 8.7179e-01, 8.2931e-01, 3.5227e-04, 1.6905e-03, - 3.5530e-01, 2.2770e-01, 2.8730e-01, 7.0847e-01, - 7.3922e-01, 1.5764e-01, 7.5910e-01, 8.1155e-01, - 3.5789e-01, 8.1604e-01, 5.7121e-01, 5.1344e-01, - 7.1259e-01, 4.3783e-02, 7.6839e-01, 1.7140e-01, - 7.6808e-01, 9.6939e-01, 5.0871e-01, 9.0454e-02, - 1.6264e-01, 9.4724e-01, 9.7794e-01, 3.7937e-01, - 4.3858e-01, 3.9250e-01, 6.5494e-01, 4.4660e-02, - 9.5246e-01, 6.3661e-01, 4.8289e-02, 7.6435e-01, - 8.9623e-01, 8.6627e-01, 8.3806e-01, 6.3298e-01, - 4.4901e-01, 8.8890e-01, 1.1387e-01, 8.7033e-01, - 7.9632e-02, 6.7734e-01, 9.7420e-01, 3.5013e-01, - 9.7815e-01, 5.6389e-01, 8.9754e-01, 7.5863e-01, - 9.0599e-01, 4.9037e-01, 8.2672e-01, 9.3774e-02, - 9.2781e-01, 2.5121e-01, 8.2025e-01, 1.3401e-01, - 8.9350e-01, 5.7261e-01, 6.4828e-01, 1.5136e-01, - 3.1037e-02, 3.7618e-01, 8.0341e-02, 7.8249e-01, - 4.4596e-01, 7.3260e-01, 6.7366e-01, 8.7493e-01, - 8.2283e-01, 8.3144e-01, 7.4080e-01, 6.5985e-01, - 3.8350e-01, 6.8871e-01, 1.6347e-01, 3.2368e-01, - 5.8567e-01, 6.1090e-01, 5.5092e-01, 7.1963e-01, - 3.7645e-01, 2.1788e-01, 1.5348e-01, 3.8599e-01, - 6.2359e-01, 1.5142e-02, 9.9220e-01, 7.1255e-01, - 3.6554e-02, 7.4579e-01, 8.6648e-01, 4.8711e-01, - 3.1108e-01, 4.0288e-01, 4.0072e-02, 7.3039e-01, - 8.3462e-01, 9.6954e-01, 7.7647e-01, 7.6143e-01, - 9.4618e-01, 3.9950e-01, 8.5579e-01, 2.4883e-01, - 7.7346e-03, 6.6880e-01, 9.1827e-01, 2.9585e-01, - 1.3272e-01, 4.5063e-01, 9.6004e-01, 3.8617e-01, - 6.1488e-01, 8.9428e-01, 8.7533e-01, 5.4282e-01, - 4.6344e-01, 4.0858e-02, 4.6086e-01, 4.5823e-01, - 4.5897e-01, 8.6181e-01, 7.1824e-01, 4.2757e-01, - 2.8457e-01, 6.3509e-01, 3.3824e-02, 7.5136e-01, - 2.6126e-01, 4.6785e-01, 8.9734e-01, 4.5190e-01, - 5.7147e-01, 7.3131e-01, 6.2913e-01, 6.1694e-01, - 5.1423e-01, 1.7321e-01, 6.2877e-01, 1.7045e-01, - 4.5231e-02, 7.2188e-01, 1.6031e-01, 5.5732e-01, - 1.6212e-01, 6.8915e-01, 7.6515e-01, 2.1449e-01, - 8.6821e-01, 6.5088e-01, 1.4701e-02, 5.8673e-01, - 8.6510e-01, 1.8752e-01, 3.4821e-01, 7.8249e-05, - 9.0048e-01, 3.4917e-01, 8.7994e-01, 8.3745e-01, - 1.0882e-01, 2.6136e-01, 4.8219e-01, 9.8171e-01, - 4.1806e-01, 4.5685e-01, 8.3561e-01, 6.7487e-01, - 2.4726e-01, 8.3310e-01, 7.8277e-01, 5.0739e-01, - 3.8135e-01, 7.0854e-03, 5.7741e-01, 4.9996e-01, - 9.4008e-02, 9.9417e-01, 5.2042e-04, 2.7752e-01, - 3.1344e-01, 2.6242e-01, 6.5438e-02, 2.8291e-01, - 4.1667e-01, 6.2725e-01, 7.1390e-01, 2.4205e-01, - 7.5873e-01, 6.2084e-01, 7.6212e-02, 7.1245e-01, - 5.4023e-02, 7.7038e-01, 2.4446e-01, 8.3162e-01, - 5.6013e-01, 3.2106e-01, 3.3449e-01, 7.0203e-01, - 7.0579e-01, 8.9030e-02, 7.0165e-02, 9.1623e-01, - 5.2715e-01, 9.7322e-01, 9.7823e-01, 5.1098e-01, - 1.6600e-01, 6.8958e-01, 1.7375e-01, 7.4450e-01, - 8.9019e-01, 1.6373e-01, 3.6149e-01, 4.8630e-03, - 4.3784e-01, 8.1716e-02, 9.9846e-01, 2.7020e-01, - 3.0542e-01, 3.2143e-01, 1.1526e-01, 9.8921e-01, - 2.0663e-01, 3.1757e-01, 2.6436e-01, 4.8578e-01, - 7.4604e-01, 1.8078e-01, 2.3623e-01, 3.6711e-01, - 7.8926e-01, 2.8736e-01, 6.0619e-01, 3.1924e-01, - 1.0584e-01, 6.2344e-01, 8.4074e-01, 4.8546e-01, - 6.5987e-02, 6.5535e-01, 9.9584e-01, 1.3311e-01, - 6.4704e-01, 7.6017e-01, 9.3796e-01, 8.2183e-01, - 7.1555e-01, 6.8949e-01, 4.2039e-01, 5.0010e-01, - 9.3388e-02, 2.6961e-02, 3.5883e-01, 9.1810e-01, - 5.2881e-01, 7.1083e-01, 5.6422e-01, 7.4372e-01, - 7.6688e-01, 3.0736e-01, 9.1361e-01, 1.4268e-01, - 9.8265e-03, 2.8973e-01, 8.3844e-01, 4.8399e-01, - 5.9975e-01, 7.2048e-02, 5.0537e-01, 1.9415e-01, - 9.0142e-01, 1.6063e-01, 8.3747e-01, 7.7715e-01, - 9.3624e-01, 6.7023e-01, 7.8642e-01, 3.8946e-01, - 5.7739e-01, 4.0402e-01, 7.0018e-01, 6.4440e-01, - 6.8152e-01, 8.3599e-01, 1.0687e-01, 6.3174e-01, - 7.1109e-01, 1.1298e-02, 7.2255e-01, 3.1842e-01, - 3.0260e-01, 2.0738e-01, 3.1742e-01, 9.3670e-01, - 2.1424e-01, 4.7140e-01, 4.4421e-01, 8.5256e-01, - 3.8647e-01, 6.8511e-01, 2.1262e-01, 9.9373e-02, - 7.8022e-02, 2.0199e-01, 1.7345e-01, 7.2863e-01, - 4.7128e-01, 6.2733e-01, 6.0961e-01, 3.7460e-01, - 2.1610e-01, 7.3730e-01, 8.5230e-01, 1.6917e-01, - 7.0643e-01, 9.5513e-01, 7.3051e-02, 9.8510e-01, - 2.0092e-01, 4.3241e-01, 8.0765e-01, 7.1129e-01, - 9.4627e-01, 1.8831e-01, 1.2066e-01, 2.5488e-01, - 2.6294e-01, 8.6045e-01, 6.7885e-01, 9.2268e-01, - 9.7165e-01, 3.8553e-02, 2.3898e-01, 3.6820e-01, - 3.1687e-01, 1.4802e-01, 1.1460e-01, 9.4054e-01, - 2.7835e-01, 8.3789e-01, 7.1509e-01, 6.6596e-02, - 7.0322e-01, 6.7972e-02, 7.9658e-01, 6.6196e-01, - 7.4672e-01, 9.2136e-01, 6.6804e-01, 3.4306e-01, - 4.8283e-01, 7.4549e-01, 5.5160e-01, 3.8359e-01, - 4.5479e-01, 2.3253e-01, 1.2656e-01, 4.0585e-02, - 5.6244e-01, 6.4034e-03, 9.0407e-01, 7.9786e-02, - 6.0034e-01, 6.5899e-01, 8.2659e-01, 1.3903e-01, - 5.4187e-01, 4.8715e-01, 9.9846e-01, 5.8032e-01, - 9.2359e-01, 9.8268e-01, 4.4520e-01, 5.6869e-01, - 7.0005e-03, 4.7278e-02, 2.7563e-01, 5.8058e-01, - 1.5187e-01, 1.5041e-01, 6.7326e-01, 5.1848e-01, - 8.4097e-01, 3.3985e-01, 8.7930e-01, 6.0871e-01, - 4.7442e-01, 6.2568e-01, 5.9426e-01, 5.8463e-01]), + col_indices=tensor([1350, 1465, 4190, 6900, 6571, 5844, 4736, 324, 9249, + 4549, 8900, 1195, 9063, 17, 7365, 9356, 2846, 1690, + 3749, 1888, 862, 8180, 9473, 3977, 5876, 6416, 6859, + 7325, 678, 7412, 524, 1679, 6675, 3544, 6761, 5863, + 1068, 1910, 8050, 5074, 3644, 5672, 2657, 2220, 3680, + 3869, 2170, 9920, 5472, 6846, 1556, 5671, 175, 5132, + 2577, 8845, 2796, 3794, 8679, 3242, 2471, 9643, 3149, + 1963, 477, 3306, 128, 7262, 8119, 314, 7239, 5180, + 7202, 2643, 4302, 4311, 1590, 7790, 3773, 8804, 9774, + 2553, 9496, 5566, 1143, 7175, 1004, 2781, 372, 2208, + 7381, 6760, 7287, 1604, 2915, 9765, 1879, 938, 8046, + 4870, 6940, 4820, 8392, 5340, 4182, 5114, 2023, 1770, + 6402, 82, 2384, 7877, 2701, 2498, 2104, 9483, 669, + 9528, 5633, 1059, 3421, 3906, 4248, 6650, 9824, 1201, + 6102, 134, 2120, 8662, 7792, 9525, 5975, 1053, 6589, + 2129, 3517, 7592, 8589, 4303, 8461, 4477, 5747, 151, + 4870, 4529, 3641, 7250, 392, 4557, 4767, 3949, 3212, + 6761, 5818, 2538, 1345, 6586, 9842, 9149, 9262, 9581, + 8332, 5787, 6867, 3334, 3929, 9915, 9165, 4571, 9626, + 7362, 4285, 9980, 8689, 7517, 3312, 4487, 2526, 8915, + 9807, 716, 901, 2067, 6046, 9001, 7911, 408, 148, + 1317, 5920, 3284, 7801, 7588, 8770, 6560, 348, 6988, + 8097, 3984, 3485, 3409, 9201, 5638, 6421, 3563, 3632, + 1452, 6631, 6346, 2233, 5327, 64, 3152, 8667, 5946, + 4510, 1417, 3779, 8867, 2905, 1369, 135, 8079, 9417, + 187, 7716, 9929, 5719, 3498, 2120, 8640, 5146, 9731, + 8710, 9996, 9692, 1206, 6217, 1310, 7099, 2614, 8044, + 1982, 4290, 9595, 7939, 9679, 9571, 7958, 1758, 6195, + 5533, 9545, 406, 917, 1357, 5574, 1085, 7923, 7220, + 6422, 8549, 2682, 40, 3795, 9767, 3767, 5240, 158, + 3048, 7355, 4332, 6816, 4937, 8246, 9715, 8200, 9814, + 6924, 8065, 6446, 6243, 4976, 4917, 5512, 4910, 3708, + 6023, 7639, 1133, 6448, 4950, 8492, 7993, 8584, 1340, + 5468, 6297, 6716, 8997, 6226, 6910, 7244, 6616, 1449, + 5171, 5393, 8543, 2497, 1323, 5505, 4755, 5970, 388, + 2929, 385, 1335, 7880, 9728, 5434, 6161, 2897, 3622, + 6161, 1650, 8903, 6820, 7357, 9843, 3756, 4645, 5310, + 1445, 7447, 1450, 6550, 1861, 6021, 3492, 1610, 5698, + 4827, 6115, 5157, 1608, 4965, 737, 1903, 4025, 2119, + 6552, 595, 4810, 130, 3982, 137, 8138, 5899, 7598, + 5942, 3724, 5818, 6843, 8546, 4893, 4811, 391, 3514, + 9590, 3749, 2481, 8733, 9761, 9190, 5512, 9765, 134, + 8737, 2194, 9046, 3630, 3577, 1253, 9106, 1281, 5455, + 2534, 8025, 1117, 9647, 9761, 3834, 8587, 9342, 4191, + 9213, 3915, 2071, 2631, 849, 3316, 4207, 5597, 9512, + 7014, 9934, 1359, 6351, 1942, 6581, 6730, 6231, 5782, + 5904, 9677, 532, 7482, 4382, 7965, 9754, 4297, 5473, + 3176, 7459, 9266, 9112, 9056, 5480, 4330, 3498, 3249, + 887, 2323, 930, 7809, 58, 2094, 769, 5008, 5797, + 6898, 9453, 808, 7053, 398, 7426, 1736, 634, 7990, + 3100, 1720, 4719, 4760, 7828, 648, 5462, 3365, 7079, + 7345, 2413, 8136, 6049, 5463, 2961, 6046, 4524, 6494, + 9373, 3832, 7595, 4732, 9656, 3425, 2265, 3526, 7277, + 1225, 8233, 9557, 960, 3905, 1045, 4119, 8053, 8659, + 1372, 8807, 1493, 4002, 9729, 3897, 7672, 5249, 1203, + 6003, 5841, 3864, 7129, 5663, 2654, 493, 7397, 7287, + 8513, 5254, 1707, 4926, 5022, 9139, 5676, 8296, 5115, + 9657, 591, 3536, 5723, 4822, 5176, 7346, 8375, 9143, + 2356, 7880, 2943, 6276, 2137, 3628, 59, 290, 6930, + 2063, 7839, 1196, 5604, 6563, 8782, 9536, 5108, 3877, + 429, 6184, 7060, 3523, 3867, 5457, 5143, 8932, 5594, + 5593, 2868, 1998, 3179, 1071, 9873, 6395, 5298, 9745, + 5572, 1722, 4856, 9429, 1499, 3117, 6370, 8257, 4390, + 3738, 9994, 4823, 6585, 1467, 2601, 5458, 2716, 1146, + 1738, 33, 9872, 5036, 1557, 1944, 2879, 4044, 2272, + 7676, 6388, 1904, 6101, 7987, 1700, 4395, 343, 9841, + 1734, 7922, 7616, 779, 5161, 468, 8805, 3563, 3932, + 1112, 1437, 1123, 8651, 9745, 3719, 4795, 7772, 9403, + 4517, 7987, 9197, 2190, 6957, 7720, 6929, 726, 1110, + 9372, 5858, 5956, 8878, 9550, 1968, 4362, 1304, 9796, + 7292, 3154, 8295, 3865, 7777, 3161, 3378, 1822, 7853, + 208, 589, 1972, 9911, 1971, 862, 9849, 3414, 9130, + 579, 4654, 92, 1475, 5598, 8930, 3936, 5479, 4313, + 185, 4782, 8779, 9083, 418, 1106, 2313, 6235, 7286, + 152, 2775, 6751, 650, 4919, 8921, 9634, 1984, 5779, + 1783, 7443, 6754, 7424, 1444, 6666, 409, 7791, 2642, + 1544, 5312, 3341, 5058, 6924, 1677, 5851, 5827, 2727, + 2075, 6264, 3079, 359, 2843, 330, 8651, 3205, 9111, + 3779, 6781, 64, 3792, 2666, 6804, 8462, 1474, 2290, + 1836, 9949, 6173, 4550, 9201, 1274, 5067, 351, 6480, + 6711, 8407, 7904, 5366, 4929, 444, 108, 2997, 7484, + 8340, 6161, 8073, 6104, 8943, 694, 6149, 8601, 1196, + 6575, 9937, 5094, 7925, 5722, 4565, 1084, 8988, 2783, + 7886, 6397, 7800, 2120, 9277, 5526, 5542, 1074, 1789, + 5085, 3061, 759, 7623, 4901, 1186, 3367, 1888, 9959, + 2396, 9680, 360, 1868, 6943, 9939, 5200, 5990, 7250, + 6064, 4697, 2291, 9159, 5133, 7533, 58, 7263, 8045, + 653, 6028, 7381, 932, 5857, 3072, 5547, 6647, 6032, + 5013, 1366, 1918, 6452, 4364, 581, 9863, 1502, 2117, + 5639, 4065, 1307, 3603, 5711, 481, 9225, 4458, 5133, + 5766, 2525, 6579, 6101, 955, 4014, 6750, 3371, 125, + 7603, 502, 2892, 6603, 2344, 8601, 3165, 2311, 9192, + 9546, 5043, 7844, 8591, 4935, 1765, 6671, 3193, 7181, + 2586, 5711, 3133, 5348, 7042, 9417, 8381, 100, 9386, + 3078, 5274, 1139, 6265, 8146, 6636, 5554, 6892, 1948, + 4479, 3693, 6919, 173, 9040, 5511, 2922, 6123, 9263, + 1418, 7264, 9857, 4588, 2740, 3894, 934, 1897, 9124, + 2393, 5100, 6701, 4211, 7855, 8655, 7893, 6373, 880, + 3410, 9500, 817, 9700, 622, 8424, 8906, 1192, 8082, + 5342, 4260, 2298, 5844, 8318, 1796, 365, 9287, 9942, + 1981, 5532, 9956, 1644, 1875, 1279, 1792, 9840, 4785, + 2478, 8953, 6214, 7060, 7798, 5291, 3255, 4370, 6301, + 9417, 5935, 5835, 7402, 7682, 5438, 9081, 5509, 8833, + 4566, 7011, 7184, 9706, 5339, 6813, 8443, 6535, 4377, + 9556, 509, 7923, 1426, 269, 4969, 957, 9645, 915, + 2300, 7682, 2902, 3368, 6024, 5901, 8615, 9741, 4861, + 1715, 1852, 8544, 3363, 7624, 4922, 8831, 1155, 5064, + 5765, 8030, 5214, 394, 5236, 4059, 7423, 941, 4185, + 7107, 525, 2019, 9115, 9184, 9216, 3622, 6792, 7696, + 6453, 9771, 541, 5255, 3317, 5383, 7392, 5897, 2023, + 6649, 4678, 2681, 6438, 365, 8269, 3577, 3723, 8264, + 2108]), + values=tensor([4.4965e-01, 1.3769e-01, 9.5035e-01, 5.7293e-01, + 7.7954e-01, 9.7503e-01, 1.4322e-01, 1.1818e-01, + 9.2953e-01, 8.1415e-01, 2.5238e-01, 6.0614e-01, + 4.5041e-01, 9.3390e-01, 9.7710e-01, 9.9499e-01, + 6.9516e-01, 1.2549e-01, 6.3390e-02, 4.9786e-01, + 2.6705e-01, 8.4035e-01, 4.7765e-01, 3.0191e-01, + 6.1763e-01, 6.4828e-01, 3.9165e-01, 4.0915e-01, + 4.2696e-01, 5.5891e-01, 9.9986e-01, 6.3730e-01, + 2.9036e-01, 6.8686e-01, 6.5946e-01, 4.8732e-01, + 3.4969e-01, 4.5554e-01, 8.7839e-01, 2.7698e-01, + 3.7226e-01, 1.5065e-02, 7.8865e-01, 6.0802e-01, + 5.6511e-01, 2.4244e-01, 5.1242e-02, 4.0738e-01, + 9.8710e-01, 1.9007e-01, 5.9800e-01, 8.4842e-01, + 9.1312e-01, 5.7446e-01, 5.4363e-01, 8.4750e-01, + 7.2541e-01, 8.4173e-01, 2.0399e-01, 4.5103e-01, + 1.4989e-01, 4.0298e-02, 1.7179e-01, 1.9945e-01, + 4.2389e-01, 3.1067e-01, 4.1838e-01, 4.7559e-01, + 8.5569e-01, 1.8368e-01, 4.8212e-01, 2.5203e-01, + 9.8680e-01, 4.5715e-01, 3.9215e-01, 8.5828e-01, + 9.4023e-01, 6.4093e-01, 7.0762e-01, 9.1246e-02, + 1.3960e-01, 8.7422e-01, 2.0504e-01, 5.2817e-01, + 4.5923e-02, 7.5340e-01, 4.3166e-02, 2.0079e-01, + 7.8776e-01, 7.4413e-01, 8.2810e-01, 4.3091e-02, + 6.3484e-01, 9.8935e-01, 4.7099e-01, 7.2918e-01, + 5.9483e-01, 8.6295e-01, 7.3936e-01, 3.6303e-01, + 5.6024e-01, 7.7622e-02, 1.0867e-01, 4.7041e-01, + 8.9265e-02, 9.1467e-01, 5.3849e-01, 1.8243e-01, + 5.3073e-01, 1.6104e-01, 5.5963e-01, 7.7433e-01, + 6.2087e-01, 2.9998e-01, 8.4995e-01, 2.2873e-02, + 8.7271e-02, 5.9852e-02, 6.5516e-01, 8.3316e-01, + 6.1547e-01, 6.3866e-01, 3.5342e-02, 9.7500e-01, + 4.1725e-01, 8.5943e-02, 3.3098e-02, 2.9393e-01, + 1.5172e-01, 6.5877e-01, 2.8169e-01, 8.0348e-01, + 7.7073e-01, 8.8877e-02, 6.6121e-01, 5.8345e-01, + 2.0661e-01, 3.7722e-01, 7.1221e-01, 4.5307e-01, + 3.1638e-01, 3.0489e-01, 1.1920e-01, 4.1591e-01, + 8.9518e-01, 5.0347e-01, 5.0858e-01, 3.2186e-01, + 7.4868e-01, 6.7171e-01, 9.3244e-01, 5.1430e-01, + 5.5016e-01, 2.8409e-01, 8.7062e-01, 2.5604e-01, + 9.6683e-01, 6.2006e-01, 8.0527e-02, 8.3669e-01, + 4.1226e-01, 3.0537e-01, 8.3561e-01, 3.9863e-01, + 5.3343e-01, 7.4499e-01, 4.3410e-01, 2.7897e-02, + 9.3765e-01, 8.4855e-01, 1.2336e-01, 1.1406e-01, + 3.0397e-01, 5.7188e-01, 3.3977e-01, 5.2731e-01, + 9.7160e-01, 3.2639e-01, 4.2242e-01, 3.2683e-02, + 9.8412e-01, 1.9910e-01, 2.5882e-01, 5.8518e-01, + 6.3761e-01, 7.2072e-01, 8.3748e-01, 2.4796e-02, + 9.0469e-01, 3.8373e-01, 8.3620e-01, 9.4335e-02, + 4.4129e-01, 4.0587e-01, 7.4687e-01, 5.2634e-01, + 1.3717e-01, 6.0904e-01, 4.2155e-01, 4.1423e-01, + 3.9847e-01, 8.8079e-02, 1.4881e-01, 3.8816e-01, + 5.2840e-02, 9.9597e-01, 3.7101e-01, 6.4058e-01, + 2.2169e-01, 9.4454e-01, 7.6950e-01, 1.2550e-01, + 3.3727e-02, 1.9034e-01, 7.9984e-01, 1.3267e-01, + 5.6674e-01, 3.2196e-01, 8.4859e-01, 4.3616e-01, + 5.3671e-02, 4.6213e-01, 3.4971e-01, 9.0445e-01, + 6.7961e-02, 6.2929e-01, 4.7567e-01, 1.9572e-01, + 9.0426e-01, 9.0016e-01, 4.5447e-01, 2.6630e-01, + 4.9915e-01, 4.7508e-01, 9.2903e-01, 8.0348e-01, + 5.8895e-01, 5.5352e-01, 2.2582e-01, 5.7060e-01, + 8.1496e-01, 4.4092e-01, 7.1190e-01, 9.3405e-01, + 4.8151e-01, 4.4619e-02, 5.0619e-01, 6.3555e-02, + 1.1898e-01, 9.5625e-01, 4.3337e-02, 8.4608e-01, + 5.4897e-01, 7.6274e-01, 5.6999e-01, 5.3584e-01, + 9.6706e-01, 4.7222e-01, 6.2173e-01, 2.9964e-01, + 5.3674e-01, 4.0259e-01, 2.0631e-01, 7.9009e-01, + 5.8390e-01, 3.0329e-01, 4.0575e-01, 1.9726e-01, + 4.3732e-01, 6.0232e-01, 4.7348e-01, 1.9995e-01, + 5.4697e-01, 8.5390e-01, 9.9359e-01, 6.6034e-01, + 3.4418e-01, 8.4510e-02, 8.6703e-01, 2.9267e-01, + 2.0810e-03, 7.2851e-01, 6.6168e-01, 6.6317e-01, + 1.1115e-01, 6.0774e-03, 6.7142e-01, 1.1584e-01, + 7.1563e-01, 1.7755e-01, 5.1168e-01, 9.3821e-02, + 8.1249e-01, 1.8413e-01, 3.5159e-01, 2.4573e-01, + 4.2074e-01, 7.3776e-01, 5.3183e-01, 8.0631e-01, + 7.7750e-01, 5.5525e-01, 2.8671e-01, 6.1549e-01, + 7.1549e-01, 9.9895e-01, 5.2674e-01, 7.3041e-01, + 6.6176e-01, 3.6282e-01, 7.6034e-01, 4.0504e-01, + 2.0401e-01, 2.5487e-01, 1.9033e-01, 1.9623e-01, + 3.6284e-02, 4.2903e-01, 9.9371e-01, 6.3885e-01, + 2.5968e-02, 4.8586e-01, 6.6560e-01, 2.0616e-01, + 3.1688e-01, 6.5113e-01, 1.4279e-01, 2.1929e-01, + 5.0027e-01, 4.5512e-01, 7.2149e-01, 9.4504e-01, + 1.7168e-01, 2.2541e-01, 8.0417e-01, 3.4956e-01, + 8.3679e-01, 1.7641e-01, 9.6793e-01, 5.6210e-01, + 3.9701e-01, 1.3797e-01, 8.9202e-01, 3.2792e-01, + 4.6708e-03, 5.7283e-01, 6.0612e-01, 1.8272e-01, + 3.6646e-01, 7.2284e-01, 1.8383e-01, 2.9022e-01, + 7.9856e-01, 1.1343e-01, 2.3634e-01, 7.7130e-01, + 6.8456e-01, 1.7480e-01, 4.6250e-01, 2.4163e-01, + 8.8702e-01, 2.3058e-01, 5.1560e-02, 4.7027e-01, + 3.3772e-03, 7.2097e-02, 8.7375e-01, 4.0896e-01, + 2.9561e-01, 6.5640e-01, 5.7376e-01, 4.6184e-01, + 1.1663e-03, 9.9713e-01, 3.9355e-01, 8.6592e-01, + 6.8146e-02, 5.3923e-01, 9.7404e-01, 5.4723e-01, + 7.0125e-01, 5.4053e-01, 7.3082e-01, 9.7785e-01, + 3.3078e-02, 6.3749e-01, 6.2345e-01, 8.3051e-01, + 2.7383e-01, 3.6590e-01, 7.1021e-02, 6.9549e-01, + 7.7323e-03, 3.9891e-01, 6.7410e-01, 3.3211e-01, + 6.5325e-01, 3.6018e-01, 6.0362e-01, 8.1452e-01, + 9.4481e-01, 5.7955e-02, 4.9542e-01, 2.6184e-01, + 9.7487e-01, 4.9456e-01, 8.7892e-01, 6.6865e-01, + 5.8486e-01, 2.1761e-01, 8.5696e-01, 5.0919e-01, + 5.9215e-01, 3.5729e-01, 1.3459e-02, 1.6525e-01, + 7.5009e-03, 1.0552e-02, 5.5984e-01, 3.7025e-01, + 3.7418e-01, 8.5195e-01, 7.4716e-01, 6.0013e-01, + 4.7662e-01, 8.2335e-01, 6.4900e-01, 8.0351e-01, + 2.5588e-01, 1.9713e-02, 6.8762e-01, 5.8259e-01, + 3.1885e-01, 3.6384e-01, 8.9646e-01, 7.1272e-01, + 2.8574e-01, 3.1895e-02, 2.6414e-03, 9.8227e-02, + 9.4357e-02, 9.7996e-01, 4.3905e-02, 2.3021e-01, + 7.1295e-01, 6.1474e-01, 7.0395e-01, 5.6433e-01, + 7.0563e-01, 4.9830e-01, 1.9892e-01, 8.6877e-01, + 6.0980e-01, 4.8155e-02, 6.3296e-01, 5.8453e-01, + 1.9986e-01, 3.3112e-01, 4.0151e-01, 5.4000e-01, + 1.8111e-01, 1.3640e-01, 8.8587e-01, 3.0679e-01, + 6.3513e-01, 7.3512e-01, 7.9733e-01, 4.2402e-01, + 1.6855e-01, 6.3117e-01, 4.8672e-01, 1.8039e-01, + 3.3868e-01, 2.7695e-01, 8.9797e-01, 4.2602e-01, + 6.0119e-01, 6.6892e-01, 4.7692e-01, 9.6219e-01, + 8.0484e-03, 6.4306e-01, 5.9041e-01, 8.0576e-01, + 9.0822e-01, 1.9501e-01, 4.1124e-01, 7.9817e-01, + 9.1499e-01, 9.9412e-01, 5.3613e-01, 4.8755e-01, + 3.6868e-01, 6.5275e-01, 8.9945e-01, 4.7785e-01, + 2.3655e-01, 6.0433e-01, 1.1690e-01, 6.1542e-01, + 7.9383e-01, 1.1610e-01, 3.0181e-01, 2.1347e-01, + 3.3610e-01, 2.0677e-01, 7.2341e-03, 6.2776e-01, + 6.5419e-01, 9.4441e-01, 8.8461e-04, 5.3031e-01, + 4.1369e-01, 9.4148e-01, 3.5549e-01, 4.1555e-01, + 5.8924e-01, 3.9711e-02, 6.9593e-02, 9.8539e-01, + 5.4075e-01, 4.7812e-01, 8.5548e-01, 8.1065e-01, + 6.5941e-01, 6.6644e-01, 9.6480e-01, 5.4150e-01, + 3.9903e-01, 4.8873e-01, 3.2687e-01, 9.4824e-01, + 5.1894e-01, 5.8274e-01, 1.6827e-01, 5.5825e-01, + 8.2071e-01, 1.6344e-01, 5.2556e-01, 1.1193e-01, + 5.0648e-01, 3.6618e-01, 9.5810e-01, 1.2800e-01, + 1.3206e-03, 8.7762e-01, 2.8271e-01, 1.0213e-01, + 5.9400e-01, 3.9522e-01, 9.9260e-01, 7.4976e-01, + 2.6447e-01, 7.7409e-01, 4.1362e-01, 2.2793e-01, + 2.8623e-01, 9.4784e-01, 1.8629e-01, 5.5902e-01, + 9.8596e-01, 4.8008e-01, 2.9452e-01, 1.9370e-01, + 3.6762e-01, 8.4825e-01, 3.2358e-01, 2.2154e-01, + 5.2395e-01, 6.2301e-01, 4.6454e-01, 1.9464e-01, + 8.7654e-01, 6.1940e-01, 9.7699e-01, 3.6808e-01, + 5.8060e-01, 9.3912e-01, 9.3195e-01, 1.9902e-01, + 5.0862e-01, 6.4487e-01, 2.7873e-01, 8.0248e-02, + 7.8608e-01, 4.3327e-01, 7.4604e-01, 3.0959e-01, + 3.9353e-01, 9.8239e-01, 7.6245e-01, 9.2901e-01, + 4.6128e-01, 3.6167e-01, 9.0955e-01, 3.1202e-01, + 4.4085e-02, 6.2633e-01, 8.7656e-01, 7.4675e-01, + 2.9172e-01, 6.1556e-01, 3.1529e-01, 3.4902e-01, + 5.4348e-01, 4.2744e-01, 9.8776e-01, 8.2751e-01, + 5.7028e-01, 6.7691e-01, 3.1950e-01, 7.9881e-01, + 8.9268e-01, 2.2836e-01, 7.5059e-01, 5.6039e-02, + 8.4118e-01, 6.5572e-01, 5.5163e-01, 3.1458e-01, + 6.1704e-01, 5.6093e-02, 7.9767e-01, 9.4600e-01, + 2.1928e-01, 3.9326e-02, 5.6560e-01, 7.8186e-01, + 8.9823e-01, 6.0816e-01, 4.7673e-01, 4.3366e-01, + 9.1734e-01, 5.8754e-01, 9.5818e-01, 5.0338e-01, + 4.7101e-01, 4.2574e-01, 8.8347e-01, 9.9629e-01, + 3.7466e-01, 9.2046e-01, 1.9630e-01, 5.6342e-01, + 3.9690e-01, 3.8488e-01, 7.6991e-01, 4.0605e-01, + 8.9007e-01, 6.1929e-01, 7.5840e-01, 3.6900e-01, + 7.9131e-01, 4.8074e-01, 2.5482e-01, 3.6580e-01, + 6.8986e-01, 1.9103e-01, 8.0298e-01, 5.5040e-01, + 6.0138e-01, 2.4459e-01, 1.8774e-02, 7.6177e-01, + 1.9747e-01, 7.4636e-01, 2.7241e-01, 6.3384e-01, + 6.8666e-01, 5.8283e-01, 2.4638e-01, 4.5053e-01, + 7.7113e-02, 9.6400e-01, 7.5084e-01, 9.9097e-01, + 8.2369e-01, 1.2518e-01, 8.7079e-01, 8.8731e-01, + 6.6279e-01, 2.0421e-01, 8.1221e-02, 9.8411e-01, + 2.6106e-01, 2.3338e-02, 2.6480e-02, 4.0599e-01, + 2.9908e-01, 4.8339e-01, 7.8936e-01, 9.9101e-01, + 9.0942e-01, 2.0788e-01, 2.2735e-01, 8.7072e-01, + 5.0509e-01, 2.3135e-01, 4.7407e-01, 7.4017e-01, + 4.9692e-01, 3.6074e-01, 4.9514e-02, 1.2661e-01, + 9.7856e-02, 2.2153e-01, 1.5779e-02, 8.8678e-01, + 9.8514e-01, 7.5711e-01, 2.2177e-01, 2.4731e-01, + 8.5599e-02, 7.0071e-01, 8.3437e-01, 6.5447e-01, + 2.5607e-01, 4.0526e-01, 4.8411e-01, 6.8148e-01, + 6.9610e-01, 3.9874e-01, 4.8623e-01, 8.7722e-01, + 2.8142e-01, 2.9443e-01, 8.8637e-01, 1.8955e-01, + 3.6086e-02, 1.6641e-01, 7.8338e-01, 4.6497e-01, + 1.4233e-02, 5.7884e-01, 3.3947e-01, 4.7233e-02, + 4.2210e-02, 2.4198e-01, 7.7171e-01, 7.9947e-01, + 9.6589e-01, 6.1429e-01, 1.2277e-01, 2.0682e-01, + 3.4285e-01, 7.9675e-01, 2.9050e-01, 7.8865e-01, + 7.5171e-01, 4.5477e-01, 5.8210e-01, 7.8888e-01, + 9.8955e-02, 2.5763e-01, 8.5833e-01, 3.0164e-01, + 3.9566e-01, 6.7231e-01, 3.3608e-01, 6.6018e-01, + 1.3947e-01, 2.1432e-01, 8.3950e-01, 8.7634e-02, + 8.7303e-02, 7.9920e-01, 2.1686e-01, 2.3929e-01, + 6.2304e-01, 2.5707e-01, 4.4665e-01, 6.8967e-01, + 6.9303e-01, 2.2866e-01, 9.2399e-01, 5.7817e-01, + 5.2984e-01, 3.2154e-01, 7.8238e-01, 4.0391e-01, + 9.0909e-01, 4.9607e-02, 5.2078e-01, 2.3283e-01, + 4.9465e-01, 6.1527e-01, 4.3228e-01, 9.0330e-01, + 8.0827e-01, 6.9746e-01, 9.6769e-01, 9.2643e-01, + 9.3397e-01, 3.8602e-01, 6.5755e-01, 8.2635e-01, + 1.0202e-01, 1.5856e-01, 1.2288e-01, 8.4879e-02, + 3.2458e-01, 2.9655e-01, 1.7742e-01, 9.3450e-01, + 1.0076e-01, 7.2645e-01, 8.1806e-02, 4.6658e-01, + 3.8093e-01, 3.3309e-01, 6.5422e-01, 2.5819e-01, + 1.5107e-01, 8.8510e-01, 4.3293e-01, 9.2351e-01, + 8.9183e-01, 3.2430e-01, 4.3852e-01, 8.7239e-01, + 3.5201e-01, 4.2486e-01, 7.0879e-01, 6.1583e-01, + 1.1565e-01, 7.0478e-01, 8.8022e-01, 2.5408e-01, + 7.3129e-02, 8.2514e-01, 8.2613e-01, 3.2247e-01, + 3.3085e-01, 8.9530e-01, 1.0707e-01, 4.3826e-01, + 7.8570e-01, 9.4198e-01, 7.2354e-01, 3.4066e-01, + 7.3480e-01, 4.2036e-01, 6.3356e-01, 2.9797e-01, + 8.2827e-01, 7.8339e-01, 9.0276e-01, 2.0607e-01, + 9.7252e-01, 7.1435e-01, 4.2314e-01, 9.8891e-01, + 6.9301e-01, 5.7219e-01, 9.0386e-01, 7.0463e-01, + 7.1908e-04, 5.4692e-01, 6.0821e-01, 9.8439e-01, + 2.8373e-01, 4.8915e-01, 5.5931e-01, 9.7619e-01, + 7.4567e-01, 7.0938e-01, 1.2593e-01, 6.1318e-01, + 8.1283e-01, 9.8667e-02, 3.3716e-02, 5.7522e-01, + 1.9639e-01, 2.9416e-01, 5.5823e-01, 1.8549e-01, + 3.1942e-01, 7.4665e-01, 7.2327e-01, 9.5305e-01, + 1.9036e-02, 5.3955e-01, 2.4116e-01, 9.2667e-01, + 1.4653e-01, 9.8715e-01, 5.3480e-01, 6.0516e-01, + 6.0680e-01, 8.5987e-01, 3.9984e-01, 3.1004e-01, + 8.8724e-01, 7.4026e-01, 4.4687e-01, 8.9641e-01, + 2.4418e-01, 2.4025e-02, 5.5509e-01, 8.9647e-01, + 1.9417e-01, 6.6319e-01, 5.1484e-02, 3.4897e-01, + 5.1031e-01, 3.0611e-01, 3.8996e-01, 3.8358e-01, + 6.9904e-01, 6.9426e-01, 7.9483e-01, 3.3074e-01, + 4.4849e-01, 4.5771e-01, 3.3817e-01, 9.0376e-01, + 2.9871e-01, 1.3521e-01, 4.3356e-01, 8.7768e-02, + 2.4144e-01, 7.4490e-01, 5.2568e-01, 6.6800e-01, + 6.9455e-01, 8.8174e-01, 2.7533e-01, 9.6499e-01, + 9.5226e-01, 6.3027e-01, 6.0446e-02, 2.4209e-01, + 8.6906e-01, 3.5261e-01, 1.4614e-01, 9.4982e-01, + 7.0784e-02, 4.6539e-01, 8.8096e-01, 6.3553e-01, + 5.2585e-01, 6.7815e-02, 6.7186e-01, 7.0013e-01, + 3.2879e-01, 8.4313e-01, 2.0230e-01, 6.7661e-01, + 2.5127e-02, 8.3948e-01, 7.1261e-01, 9.8116e-01, + 5.7618e-01, 7.3962e-01, 4.1140e-01, 1.7002e-01, + 2.9786e-02, 6.1256e-01, 2.2368e-01, 2.3720e-01, + 5.1041e-01, 5.8688e-01, 3.2746e-01, 3.0206e-01, + 4.6125e-01, 3.9820e-01, 9.6772e-01, 2.2109e-01, + 6.7044e-01, 9.0422e-02, 7.0940e-01, 4.4105e-01, + 8.1398e-01, 1.1710e-01, 4.8937e-02, 6.8242e-02, + 2.0881e-01, 5.1602e-01, 9.9962e-01, 5.4247e-01, + 2.9660e-01, 5.2390e-01, 5.7505e-01, 8.5464e-01, + 9.4683e-01, 8.0727e-01, 2.3938e-01, 5.1948e-01, + 4.7982e-01, 5.9710e-01, 1.9899e-01, 5.7719e-01, + 9.9101e-01, 8.2375e-01, 4.2012e-01, 4.5169e-01, + 4.0205e-02, 5.1058e-03, 5.9797e-01, 3.2629e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.6283, 0.6554, 0.1926, ..., 0.5716, 0.9993, 0.6492]) +tensor([0.5424, 0.9332, 0.7035, ..., 0.9872, 0.5484, 0.9353]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -1133,375 +919,375 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 10.376285076141357 seconds +Time: 10.641618490219116 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([2248, 4486, 2578, 3740, 621, 6003, 5803, 7740, 8017, - 8357, 2886, 8788, 9848, 2845, 3345, 9526, 6879, 849, - 9475, 1600, 5380, 5334, 6629, 9937, 1676, 3949, 9759, - 1297, 1271, 554, 8126, 7607, 6824, 8955, 3784, 6636, - 6716, 7362, 236, 4770, 377, 1035, 7094, 4817, 9140, - 2937, 60, 7489, 6793, 9918, 3932, 6069, 5062, 5030, - 1223, 3975, 150, 7966, 1822, 242, 7431, 4532, 9014, - 8126, 915, 7358, 2001, 3806, 564, 5560, 6173, 620, - 8900, 1133, 6344, 486, 265, 5173, 6593, 9511, 1972, - 6657, 9996, 3207, 27, 7301, 9620, 504, 7560, 1601, - 7424, 6685, 9645, 8602, 1386, 2669, 7610, 3723, 4006, - 2340, 4530, 2647, 5701, 4426, 8272, 3355, 7800, 1132, - 6460, 5948, 6002, 5599, 7637, 1754, 3726, 7844, 4922, - 6626, 3071, 5112, 9488, 6276, 63, 6759, 8385, 8552, - 1584, 8134, 9707, 7442, 9004, 7463, 5317, 8848, 2939, - 2603, 8194, 2739, 5486, 256, 2365, 9508, 2237, 2470, - 6761, 3844, 820, 3960, 1427, 3014, 6205, 9526, 187, - 5743, 8313, 7945, 4733, 316, 4870, 8198, 2605, 745, - 6138, 3766, 3604, 9846, 2494, 2928, 9060, 9638, 2293, - 1334, 3566, 5466, 8151, 4330, 3680, 6199, 2041, 6318, - 1803, 5034, 4823, 7162, 9670, 280, 8489, 1737, 2606, - 6216, 1938, 8976, 5856, 1117, 6857, 4276, 4103, 6680, - 9260, 6437, 1742, 9346, 2197, 2419, 6198, 9840, 242, - 5981, 117, 8727, 6383, 8881, 1592, 6491, 291, 8533, - 4799, 8873, 6031, 492, 9755, 8898, 9538, 3126, 3673, - 7165, 7259, 875, 9998, 4153, 3488, 4806, 1041, 6719, - 6621, 5171, 1220, 6019, 5757, 7236, 8256, 5996, 5674, - 9515, 884, 4587, 7341, 6133, 103, 4029, 3802, 6524, - 2472, 5443, 6806, 2814, 1694, 9023, 2383, 8670, 150, - 8994, 6959, 4597, 769, 5991, 1821, 3685, 7931, 9192, - 3640, 71, 9825, 1072, 4022, 3163, 389, 3350, 3717, - 5813, 4622, 1106, 4472, 4551, 1684, 6929, 301, 7590, - 7048, 8693, 7825, 1347, 1110, 2977, 261, 4463, 7673, - 6591, 9953, 9930, 6563, 4448, 8022, 5661, 6717, 9701, - 8152, 330, 4317, 726, 7842, 2771, 5867, 8845, 6599, - 345, 711, 3072, 5521, 7893, 9001, 7298, 4506, 7966, - 9217, 3422, 9151, 4171, 6251, 7471, 8643, 1037, 6053, - 7103, 6011, 9381, 7007, 3198, 7413, 5504, 868, 807, - 9558, 4156, 1608, 151, 6572, 5212, 4737, 4239, 8899, - 1675, 4293, 6235, 3154, 638, 2173, 3370, 5882, 3746, - 1224, 6845, 7214, 7881, 8306, 9751, 8223, 7814, 3399, - 8182, 9634, 2816, 6681, 9974, 5336, 2828, 7713, 4327, - 7163, 9008, 4567, 1895, 12, 9582, 9593, 8980, 272, - 8032, 8958, 2398, 6519, 6198, 2543, 922, 7509, 1247, - 8974, 5180, 6179, 5748, 196, 904, 6210, 2122, 9168, - 8422, 6490, 4019, 3865, 4779, 2054, 5563, 6204, 2106, - 5654, 7998, 597, 7416, 2479, 6142, 770, 4293, 5948, - 8141, 8297, 9216, 3867, 2651, 4047, 5002, 2595, 3555, - 2174, 7868, 7327, 342, 5550, 3470, 1746, 5793, 7441, - 7004, 2390, 1078, 4771, 5727, 30, 758, 5897, 2502, - 9822, 9987, 7356, 859, 2903, 2564, 2920, 9271, 3693, - 5356, 3524, 8301, 1656, 7891, 4145, 9135, 6976, 8579, - 833, 8722, 6055, 9041, 9614, 4985, 9991, 8673, 9414, - 9019, 3390, 725, 9820, 2425, 8099, 6415, 9969, 6125, - 1611, 3306, 4585, 645, 1773, 8803, 2043, 2286, 1213, - 9485, 2215, 7640, 7708, 2399, 3154, 8276, 1166, 6491, - 2501, 725, 5875, 8477, 2966, 1446, 3395, 9153, 3079, - 1192, 9045, 7932, 9179, 8868, 3781, 4831, 8933, 9803, - 3724, 6396, 4228, 7956, 4967, 8351, 7905, 7539, 8192, - 9993, 4457, 5387, 8296, 5158, 3333, 708, 476, 3968, - 5277, 7708, 1410, 3177, 5458, 4440, 4550, 296, 4323, - 3462, 3083, 9337, 8193, 6143, 3740, 897, 7990, 2278, - 6384, 7710, 3296, 331, 462, 6916, 6923, 8978, 6177, - 9290, 1417, 5590, 5685, 6828, 8296, 8381, 9216, 476, - 6358, 7285, 1855, 3164, 4932, 8288, 6409, 5094, 1306, - 6007, 8471, 5458, 6254, 3652, 4046, 7815, 8715, 3786, - 1795, 6315, 2882, 8286, 6349, 9713, 8963, 66, 2329, - 6124, 9842, 1259, 9947, 8722, 2982, 4275, 8310, 6873, - 4768, 8622, 3931, 7924, 7390, 3002, 7246, 2227, 171, - 6920, 3991, 9945, 6413, 3754, 8242, 5360, 4923, 5926, - 695, 9843, 1823, 1545, 2212, 6864, 1755, 2272, 215, - 7802, 6355, 7266, 1309, 1497, 8441, 9715, 6376, 9230, - 5931, 4201, 3022, 3307, 5726, 8235, 1964, 6883, 8294, - 3695, 9567, 4015, 9092, 4811, 5087, 3500, 9002, 6559, - 2649, 290, 2110, 9321, 4170, 164, 9410, 3089, 9451, - 3947, 7753, 1392, 1306, 243, 1301, 704, 2798, 2938, - 4752, 3676, 9586, 703, 7082, 9412, 1050, 6555, 4617, - 3094, 981, 412, 6322, 6998, 7588, 2932, 523, 2537, - 3071, 5598, 4943, 5205, 9249, 4919, 4693, 3046, 1397, - 1395, 2346, 6770, 8788, 7234, 1431, 6167, 5943, 4227, - 9979, 652, 3735, 1165, 308, 9727, 8879, 4796, 8375, - 6258, 506, 1990, 5666, 4495, 5415, 7727, 9298, 5899, - 2244, 4112, 5158, 4073, 937, 2378, 4452, 1403, 8204, - 7797, 2213, 6334, 1980, 6425, 7740, 8281, 867, 9774, - 4781, 5252, 9729, 3926, 6156, 4407, 6838, 226, 4075, - 793, 7067, 9361, 6893, 7979, 5619, 2360, 9592, 9930, - 5095, 2717, 226, 8917, 7076, 8913, 7750, 2503, 3399, - 6842, 5068, 8251, 3296, 9411, 4198, 126, 5660, 1235, - 1833, 9365, 8923, 2076, 5603, 5346, 328, 2378, 5545, - 1993, 2974, 8099, 7909, 1311, 8275, 3756, 2486, 705, - 5361, 5735, 5047, 367, 7801, 6827, 6671, 2053, 1314, - 1929, 9497, 9506, 9494, 7770, 8412, 7304, 169, 7103, - 4686, 2341, 9645, 80, 7906, 1210, 2186, 9641, 334, - 8190, 3885, 2984, 8518, 8245, 8533, 5286, 5258, 7956, - 7408, 1862, 2077, 5580, 5935, 7292, 6939, 1548, 6790, - 7824, 8270, 1817, 6609, 6093, 4744, 3164, 2927, 2913, - 1259, 1989, 7667, 6058, 7524, 5404, 9197, 4190, 1346, - 5022, 8117, 6599, 93, 3011, 7992, 6769, 4569, 7376, - 7531, 1048, 4000, 490, 5996, 5906, 5365, 4036, 6734, - 4388, 3829, 2746, 9309, 6508, 9594, 624, 2217, 4304, - 1773, 7985, 1454, 3880, 3015, 6129, 9561, 3818, 6896, - 978, 3666, 4908, 1992, 5403, 6907, 2501, 7887, 2732, - 4383, 2172, 3313, 4934, 9340, 7685, 1693, 3520, 4423, - 203, 9116, 8657, 9778, 4440, 8123, 5395, 160, 8069, - 2984, 7361, 9385, 529, 1338, 6935, 2925, 7149, 5643, - 5833, 2323, 7031, 3050, 1864, 8938, 5395, 732, 194, - 932, 4702, 2935, 435, 3305, 4504, 4306, 5520, 2583, - 2711, 2433, 2449, 1007, 418, 4026, 895, 5276, 1175, - 4970, 716, 2193, 4184, 5233, 6745, 5418, 4772, 4938, - 326, 675, 1579, 5312, 977, 1458, 7168, 4339, 357, - 2034, 3928, 9009, 5821, 97, 793, 3006, 5787, 1958, - 879]), - values=tensor([1.7784e-02, 6.7536e-01, 3.2815e-01, 5.0212e-01, - 8.2459e-01, 4.0831e-01, 3.5370e-01, 4.7010e-01, - 6.4249e-01, 8.4444e-01, 1.2869e-01, 5.1729e-01, - 7.8578e-01, 8.1935e-01, 2.2395e-01, 9.1242e-01, - 2.3102e-01, 2.5964e-01, 7.5333e-01, 4.0994e-01, - 7.8409e-01, 8.3098e-02, 4.4159e-02, 6.9849e-01, - 5.4876e-01, 7.7185e-02, 1.4256e-01, 9.4588e-01, - 3.8493e-01, 7.9894e-01, 2.6476e-01, 6.5332e-01, - 2.7952e-01, 9.6279e-01, 6.7687e-01, 1.5134e-01, - 2.0992e-01, 1.8817e-01, 1.5476e-01, 3.7619e-01, - 6.0915e-01, 2.5395e-01, 7.0490e-01, 5.1472e-01, - 8.1531e-01, 8.1250e-01, 2.4074e-01, 6.2823e-01, - 9.6154e-02, 8.5599e-01, 4.0257e-01, 9.7211e-01, - 3.1491e-01, 5.8549e-01, 5.1698e-01, 1.4512e-01, - 5.6220e-01, 8.5455e-01, 1.5383e-01, 3.4851e-01, - 1.6742e-01, 2.8756e-01, 8.4743e-01, 6.4905e-01, - 3.2047e-02, 7.9299e-01, 4.4112e-02, 8.2065e-01, - 6.4074e-01, 5.0950e-01, 3.9067e-01, 6.9817e-01, - 2.6907e-01, 1.1875e-01, 2.0775e-01, 8.1476e-01, - 6.4289e-01, 3.9998e-01, 1.7138e-01, 8.2458e-01, - 7.2667e-01, 8.8010e-02, 1.5554e-01, 5.5880e-01, - 4.8597e-01, 2.5555e-01, 4.1458e-02, 7.8508e-01, - 2.8802e-02, 5.5833e-01, 1.9918e-01, 2.7091e-01, - 6.0720e-02, 4.4126e-01, 3.7238e-01, 7.4795e-02, - 4.4719e-01, 9.3574e-01, 6.9669e-01, 3.3986e-01, - 5.9333e-01, 8.5560e-01, 6.1018e-02, 8.9694e-01, - 2.2288e-01, 8.3254e-01, 7.4048e-01, 1.0655e-01, - 6.1968e-01, 4.9236e-01, 3.9374e-01, 9.8600e-01, - 6.2727e-01, 6.3699e-01, 1.5077e-01, 6.8923e-01, - 8.1866e-02, 9.4491e-01, 4.2495e-01, 6.1924e-01, - 5.9097e-01, 8.9631e-01, 4.0896e-02, 2.7195e-01, - 9.2606e-01, 7.9629e-01, 1.3798e-01, 9.5497e-01, - 5.5609e-01, 7.6608e-01, 2.0744e-01, 9.0828e-01, - 7.3828e-01, 2.4178e-02, 6.3042e-01, 9.0157e-02, - 8.0575e-01, 6.4091e-01, 8.4001e-01, 8.0655e-01, - 6.3773e-01, 5.2165e-01, 7.3692e-01, 9.8339e-03, - 2.4304e-01, 4.0600e-01, 5.3472e-01, 7.3039e-01, - 1.8193e-01, 6.5386e-01, 8.9109e-01, 1.4270e-01, - 9.9755e-01, 1.9643e-01, 3.1731e-01, 8.4375e-01, - 8.4723e-01, 4.5911e-02, 4.7723e-01, 5.8049e-01, - 7.4714e-01, 9.5452e-01, 6.9020e-01, 7.3868e-01, - 8.9090e-01, 4.6800e-01, 1.2098e-02, 9.0383e-01, - 2.8584e-01, 8.4536e-02, 8.2007e-01, 4.0686e-01, - 3.2004e-01, 6.9340e-02, 9.6068e-01, 1.0858e-01, - 7.8312e-01, 5.4643e-02, 8.4437e-01, 6.9654e-02, - 9.7882e-01, 3.6535e-01, 7.6403e-01, 5.6204e-02, - 2.1405e-02, 8.8165e-01, 6.5928e-01, 7.1005e-01, - 6.2375e-01, 6.5806e-01, 3.5559e-02, 1.4669e-01, - 1.6843e-01, 9.0943e-01, 5.9699e-01, 3.9861e-01, - 7.9046e-01, 4.2964e-01, 3.2524e-01, 2.0212e-02, - 3.7890e-01, 5.4298e-01, 6.1412e-01, 3.4376e-01, - 3.9039e-01, 3.7773e-01, 5.0347e-01, 6.3176e-01, - 6.1923e-01, 2.6321e-01, 4.7552e-01, 3.5546e-01, - 4.9177e-01, 8.1631e-01, 6.5120e-01, 4.6596e-01, - 2.3883e-01, 3.7781e-01, 5.1278e-01, 4.6530e-02, - 9.2397e-02, 3.2990e-01, 5.7737e-03, 9.1643e-01, - 6.2213e-01, 9.8931e-01, 6.7022e-01, 9.9456e-01, - 6.7420e-01, 8.9914e-01, 6.2276e-01, 1.6743e-01, - 1.6278e-01, 3.2006e-01, 3.0192e-01, 6.6995e-01, - 4.7404e-01, 2.9640e-01, 1.2818e-01, 7.9687e-03, - 2.4825e-01, 4.3761e-01, 6.3144e-01, 1.5385e-02, - 1.6076e-01, 1.3500e-01, 6.2363e-01, 3.7179e-03, - 6.2808e-01, 7.7587e-01, 3.3617e-01, 2.8067e-01, - 4.8957e-01, 8.6733e-01, 3.2273e-01, 2.0147e-01, - 1.6166e-01, 1.4503e-01, 6.1851e-01, 7.6022e-02, - 9.5480e-01, 3.3003e-01, 7.4260e-01, 5.5179e-01, - 2.2819e-01, 8.9926e-01, 4.6642e-01, 9.1739e-01, - 2.8484e-01, 1.5083e-01, 7.3850e-01, 6.2898e-01, - 2.6437e-01, 8.2121e-02, 2.1812e-01, 5.9090e-01, - 5.1624e-01, 3.1861e-01, 6.4228e-01, 9.4735e-01, - 7.0057e-01, 6.7393e-01, 7.5735e-01, 5.6290e-01, - 8.6359e-01, 7.5982e-01, 8.9830e-01, 8.9589e-01, - 9.2167e-01, 1.6984e-01, 4.1872e-01, 7.0953e-01, - 5.8248e-01, 5.1128e-01, 4.9473e-01, 9.3201e-01, - 4.3207e-04, 4.4583e-01, 3.1218e-01, 7.2647e-01, - 2.1753e-01, 5.8396e-01, 9.6181e-01, 1.8968e-01, - 7.5732e-01, 7.7034e-01, 5.4270e-01, 2.5345e-01, - 1.1261e-01, 5.3952e-01, 6.7120e-01, 5.7430e-01, - 5.7452e-01, 1.9481e-01, 5.4907e-01, 9.8805e-01, - 5.5217e-01, 5.2533e-02, 3.2655e-01, 7.4265e-01, - 3.3929e-01, 3.8987e-01, 3.8084e-01, 8.2952e-01, - 9.8247e-01, 3.9827e-01, 5.2188e-01, 7.5684e-01, - 5.7018e-01, 7.8082e-01, 2.7279e-01, 8.5286e-01, - 5.6357e-01, 6.0478e-01, 2.6466e-01, 5.2700e-01, - 6.7922e-01, 7.6419e-01, 6.4983e-02, 9.9524e-01, - 1.8506e-01, 9.8193e-01, 8.5914e-01, 3.9608e-01, - 3.1767e-01, 7.5937e-01, 4.5263e-01, 8.2957e-01, - 7.3658e-01, 6.0163e-01, 5.0224e-01, 6.8388e-01, - 7.1932e-01, 6.8113e-01, 6.8211e-01, 1.9733e-01, - 3.3945e-01, 1.4133e-01, 5.2321e-01, 6.5309e-01, - 7.3928e-01, 5.9226e-01, 8.6895e-01, 8.7555e-01, - 8.0389e-01, 9.7514e-01, 5.2371e-01, 1.9607e-01, - 6.4999e-01, 1.2679e-01, 3.1110e-01, 9.0763e-01, - 4.9753e-01, 2.3690e-01, 9.7634e-01, 2.0327e-02, - 8.6119e-02, 3.8622e-01, 7.5046e-01, 1.0498e-01, - 2.3977e-01, 3.3615e-01, 7.8112e-01, 7.5236e-01, - 6.1657e-01, 4.1513e-01, 1.7229e-01, 4.9383e-01, - 1.0446e-01, 3.4158e-01, 7.5812e-01, 2.7031e-01, - 3.2779e-01, 7.9353e-01, 4.1051e-01, 2.7920e-01, - 7.1389e-01, 6.6997e-01, 6.0140e-01, 4.3603e-01, - 3.5002e-01, 9.9051e-01, 3.2108e-01, 1.2518e-01, - 2.5630e-01, 2.6310e-01, 9.8697e-01, 9.2055e-01, - 4.7638e-01, 7.0146e-01, 5.3125e-01, 7.1807e-01, - 2.3959e-01, 8.4717e-01, 2.9144e-01, 9.0787e-01, - 5.0453e-01, 1.0290e-01, 3.7515e-01, 1.0833e-01, - 9.5140e-01, 5.2024e-01, 1.5227e-01, 2.3945e-01, - 5.9483e-01, 1.2835e-01, 4.1922e-01, 4.3056e-01, - 6.7601e-01, 9.9151e-01, 2.7772e-01, 6.2498e-01, - 1.4961e-01, 8.2116e-01, 6.3550e-01, 2.4633e-01, - 8.0063e-01, 9.0852e-01, 7.0469e-01, 5.0673e-02, - 6.8987e-01, 2.8266e-01, 4.0356e-01, 9.4879e-01, - 8.1800e-01, 4.3028e-01, 1.5262e-01, 8.6723e-01, - 5.5898e-01, 4.0008e-01, 4.1198e-02, 2.4922e-01, - 4.6623e-01, 5.9920e-01, 2.4748e-01, 5.3751e-01, - 7.9826e-01, 7.9340e-01, 5.4706e-01, 3.1348e-01, - 8.8152e-01, 3.6602e-01, 6.5734e-01, 7.5456e-01, - 1.8360e-01, 2.9506e-01, 7.2951e-01, 3.9081e-02, - 3.3378e-01, 6.2120e-01, 1.8041e-01, 9.1382e-01, - 6.9023e-01, 9.4426e-01, 8.6070e-01, 7.5081e-01, - 9.0051e-01, 3.0756e-01, 6.9718e-01, 7.5190e-01, - 7.2288e-01, 3.7386e-01, 2.2439e-01, 9.3179e-01, - 4.8208e-01, 7.0572e-02, 9.3952e-01, 5.0097e-01, - 3.9199e-01, 2.4114e-01, 9.2975e-01, 8.9070e-03, - 7.4881e-01, 8.4434e-01, 1.8233e-01, 9.3556e-02, - 4.5752e-02, 8.6701e-01, 5.5636e-01, 3.8475e-01, - 1.0103e-02, 2.1789e-01, 8.0072e-01, 6.3665e-01, - 1.3845e-01, 3.3886e-01, 5.0949e-01, 9.5029e-01, - 8.6700e-01, 8.1981e-01, 9.7561e-01, 6.3823e-01, - 4.8593e-01, 2.7508e-01, 7.3125e-01, 3.5563e-01, - 2.9572e-01, 6.7049e-01, 9.6207e-01, 4.3129e-01, - 3.9912e-01, 1.8416e-01, 4.2178e-01, 2.2591e-01, - 2.7116e-01, 8.4944e-02, 5.3846e-01, 4.9377e-01, - 1.8625e-01, 1.0443e-01, 4.0045e-01, 7.8568e-01, - 7.8932e-01, 5.1024e-02, 6.1194e-01, 9.9630e-01, - 4.1847e-01, 6.4383e-01, 8.7660e-01, 2.2864e-01, - 3.8631e-01, 8.6041e-02, 9.8976e-01, 9.6159e-01, - 1.5407e-01, 3.7114e-01, 8.6685e-01, 2.9952e-01, - 4.3743e-01, 6.0430e-01, 1.8023e-01, 8.9627e-01, - 3.4675e-01, 8.9111e-01, 4.9121e-01, 3.2340e-01, - 4.3401e-01, 4.5372e-01, 1.1315e-01, 2.6283e-01, - 4.1677e-02, 6.9109e-01, 8.3950e-01, 2.9105e-01, - 2.7116e-01, 3.2257e-01, 6.5006e-01, 1.3440e-01, - 8.7399e-01, 8.5258e-01, 5.1670e-01, 9.5831e-01, - 1.8248e-03, 2.2603e-01, 4.3010e-01, 4.4390e-01, - 3.6180e-01, 5.7994e-01, 5.4039e-02, 9.0240e-01, - 9.2264e-01, 8.4106e-01, 5.2452e-02, 4.9914e-01, - 3.6467e-01, 4.7270e-01, 7.1339e-02, 9.7767e-02, - 9.1457e-01, 5.0307e-01, 1.4224e-01, 3.1225e-01, - 7.0030e-01, 5.8456e-01, 3.0705e-01, 7.1438e-01, - 4.9225e-01, 6.4899e-01, 8.4726e-01, 9.9534e-01, - 7.4622e-01, 5.8818e-01, 6.4092e-01, 6.7998e-01, - 8.7179e-01, 8.2931e-01, 3.5227e-04, 1.6905e-03, - 3.5530e-01, 2.2770e-01, 2.8730e-01, 7.0847e-01, - 7.3922e-01, 1.5764e-01, 7.5910e-01, 8.1155e-01, - 3.5789e-01, 8.1604e-01, 5.7121e-01, 5.1344e-01, - 7.1259e-01, 4.3783e-02, 7.6839e-01, 1.7140e-01, - 7.6808e-01, 9.6939e-01, 5.0871e-01, 9.0454e-02, - 1.6264e-01, 9.4724e-01, 9.7794e-01, 3.7937e-01, - 4.3858e-01, 3.9250e-01, 6.5494e-01, 4.4660e-02, - 9.5246e-01, 6.3661e-01, 4.8289e-02, 7.6435e-01, - 8.9623e-01, 8.6627e-01, 8.3806e-01, 6.3298e-01, - 4.4901e-01, 8.8890e-01, 1.1387e-01, 8.7033e-01, - 7.9632e-02, 6.7734e-01, 9.7420e-01, 3.5013e-01, - 9.7815e-01, 5.6389e-01, 8.9754e-01, 7.5863e-01, - 9.0599e-01, 4.9037e-01, 8.2672e-01, 9.3774e-02, - 9.2781e-01, 2.5121e-01, 8.2025e-01, 1.3401e-01, - 8.9350e-01, 5.7261e-01, 6.4828e-01, 1.5136e-01, - 3.1037e-02, 3.7618e-01, 8.0341e-02, 7.8249e-01, - 4.4596e-01, 7.3260e-01, 6.7366e-01, 8.7493e-01, - 8.2283e-01, 8.3144e-01, 7.4080e-01, 6.5985e-01, - 3.8350e-01, 6.8871e-01, 1.6347e-01, 3.2368e-01, - 5.8567e-01, 6.1090e-01, 5.5092e-01, 7.1963e-01, - 3.7645e-01, 2.1788e-01, 1.5348e-01, 3.8599e-01, - 6.2359e-01, 1.5142e-02, 9.9220e-01, 7.1255e-01, - 3.6554e-02, 7.4579e-01, 8.6648e-01, 4.8711e-01, - 3.1108e-01, 4.0288e-01, 4.0072e-02, 7.3039e-01, - 8.3462e-01, 9.6954e-01, 7.7647e-01, 7.6143e-01, - 9.4618e-01, 3.9950e-01, 8.5579e-01, 2.4883e-01, - 7.7346e-03, 6.6880e-01, 9.1827e-01, 2.9585e-01, - 1.3272e-01, 4.5063e-01, 9.6004e-01, 3.8617e-01, - 6.1488e-01, 8.9428e-01, 8.7533e-01, 5.4282e-01, - 4.6344e-01, 4.0858e-02, 4.6086e-01, 4.5823e-01, - 4.5897e-01, 8.6181e-01, 7.1824e-01, 4.2757e-01, - 2.8457e-01, 6.3509e-01, 3.3824e-02, 7.5136e-01, - 2.6126e-01, 4.6785e-01, 8.9734e-01, 4.5190e-01, - 5.7147e-01, 7.3131e-01, 6.2913e-01, 6.1694e-01, - 5.1423e-01, 1.7321e-01, 6.2877e-01, 1.7045e-01, - 4.5231e-02, 7.2188e-01, 1.6031e-01, 5.5732e-01, - 1.6212e-01, 6.8915e-01, 7.6515e-01, 2.1449e-01, - 8.6821e-01, 6.5088e-01, 1.4701e-02, 5.8673e-01, - 8.6510e-01, 1.8752e-01, 3.4821e-01, 7.8249e-05, - 9.0048e-01, 3.4917e-01, 8.7994e-01, 8.3745e-01, - 1.0882e-01, 2.6136e-01, 4.8219e-01, 9.8171e-01, - 4.1806e-01, 4.5685e-01, 8.3561e-01, 6.7487e-01, - 2.4726e-01, 8.3310e-01, 7.8277e-01, 5.0739e-01, - 3.8135e-01, 7.0854e-03, 5.7741e-01, 4.9996e-01, - 9.4008e-02, 9.9417e-01, 5.2042e-04, 2.7752e-01, - 3.1344e-01, 2.6242e-01, 6.5438e-02, 2.8291e-01, - 4.1667e-01, 6.2725e-01, 7.1390e-01, 2.4205e-01, - 7.5873e-01, 6.2084e-01, 7.6212e-02, 7.1245e-01, - 5.4023e-02, 7.7038e-01, 2.4446e-01, 8.3162e-01, - 5.6013e-01, 3.2106e-01, 3.3449e-01, 7.0203e-01, - 7.0579e-01, 8.9030e-02, 7.0165e-02, 9.1623e-01, - 5.2715e-01, 9.7322e-01, 9.7823e-01, 5.1098e-01, - 1.6600e-01, 6.8958e-01, 1.7375e-01, 7.4450e-01, - 8.9019e-01, 1.6373e-01, 3.6149e-01, 4.8630e-03, - 4.3784e-01, 8.1716e-02, 9.9846e-01, 2.7020e-01, - 3.0542e-01, 3.2143e-01, 1.1526e-01, 9.8921e-01, - 2.0663e-01, 3.1757e-01, 2.6436e-01, 4.8578e-01, - 7.4604e-01, 1.8078e-01, 2.3623e-01, 3.6711e-01, - 7.8926e-01, 2.8736e-01, 6.0619e-01, 3.1924e-01, - 1.0584e-01, 6.2344e-01, 8.4074e-01, 4.8546e-01, - 6.5987e-02, 6.5535e-01, 9.9584e-01, 1.3311e-01, - 6.4704e-01, 7.6017e-01, 9.3796e-01, 8.2183e-01, - 7.1555e-01, 6.8949e-01, 4.2039e-01, 5.0010e-01, - 9.3388e-02, 2.6961e-02, 3.5883e-01, 9.1810e-01, - 5.2881e-01, 7.1083e-01, 5.6422e-01, 7.4372e-01, - 7.6688e-01, 3.0736e-01, 9.1361e-01, 1.4268e-01, - 9.8265e-03, 2.8973e-01, 8.3844e-01, 4.8399e-01, - 5.9975e-01, 7.2048e-02, 5.0537e-01, 1.9415e-01, - 9.0142e-01, 1.6063e-01, 8.3747e-01, 7.7715e-01, - 9.3624e-01, 6.7023e-01, 7.8642e-01, 3.8946e-01, - 5.7739e-01, 4.0402e-01, 7.0018e-01, 6.4440e-01, - 6.8152e-01, 8.3599e-01, 1.0687e-01, 6.3174e-01, - 7.1109e-01, 1.1298e-02, 7.2255e-01, 3.1842e-01, - 3.0260e-01, 2.0738e-01, 3.1742e-01, 9.3670e-01, - 2.1424e-01, 4.7140e-01, 4.4421e-01, 8.5256e-01, - 3.8647e-01, 6.8511e-01, 2.1262e-01, 9.9373e-02, - 7.8022e-02, 2.0199e-01, 1.7345e-01, 7.2863e-01, - 4.7128e-01, 6.2733e-01, 6.0961e-01, 3.7460e-01, - 2.1610e-01, 7.3730e-01, 8.5230e-01, 1.6917e-01, - 7.0643e-01, 9.5513e-01, 7.3051e-02, 9.8510e-01, - 2.0092e-01, 4.3241e-01, 8.0765e-01, 7.1129e-01, - 9.4627e-01, 1.8831e-01, 1.2066e-01, 2.5488e-01, - 2.6294e-01, 8.6045e-01, 6.7885e-01, 9.2268e-01, - 9.7165e-01, 3.8553e-02, 2.3898e-01, 3.6820e-01, - 3.1687e-01, 1.4802e-01, 1.1460e-01, 9.4054e-01, - 2.7835e-01, 8.3789e-01, 7.1509e-01, 6.6596e-02, - 7.0322e-01, 6.7972e-02, 7.9658e-01, 6.6196e-01, - 7.4672e-01, 9.2136e-01, 6.6804e-01, 3.4306e-01, - 4.8283e-01, 7.4549e-01, 5.5160e-01, 3.8359e-01, - 4.5479e-01, 2.3253e-01, 1.2656e-01, 4.0585e-02, - 5.6244e-01, 6.4034e-03, 9.0407e-01, 7.9786e-02, - 6.0034e-01, 6.5899e-01, 8.2659e-01, 1.3903e-01, - 5.4187e-01, 4.8715e-01, 9.9846e-01, 5.8032e-01, - 9.2359e-01, 9.8268e-01, 4.4520e-01, 5.6869e-01, - 7.0005e-03, 4.7278e-02, 2.7563e-01, 5.8058e-01, - 1.5187e-01, 1.5041e-01, 6.7326e-01, 5.1848e-01, - 8.4097e-01, 3.3985e-01, 8.7930e-01, 6.0871e-01, - 4.7442e-01, 6.2568e-01, 5.9426e-01, 5.8463e-01]), + col_indices=tensor([1350, 1465, 4190, 6900, 6571, 5844, 4736, 324, 9249, + 4549, 8900, 1195, 9063, 17, 7365, 9356, 2846, 1690, + 3749, 1888, 862, 8180, 9473, 3977, 5876, 6416, 6859, + 7325, 678, 7412, 524, 1679, 6675, 3544, 6761, 5863, + 1068, 1910, 8050, 5074, 3644, 5672, 2657, 2220, 3680, + 3869, 2170, 9920, 5472, 6846, 1556, 5671, 175, 5132, + 2577, 8845, 2796, 3794, 8679, 3242, 2471, 9643, 3149, + 1963, 477, 3306, 128, 7262, 8119, 314, 7239, 5180, + 7202, 2643, 4302, 4311, 1590, 7790, 3773, 8804, 9774, + 2553, 9496, 5566, 1143, 7175, 1004, 2781, 372, 2208, + 7381, 6760, 7287, 1604, 2915, 9765, 1879, 938, 8046, + 4870, 6940, 4820, 8392, 5340, 4182, 5114, 2023, 1770, + 6402, 82, 2384, 7877, 2701, 2498, 2104, 9483, 669, + 9528, 5633, 1059, 3421, 3906, 4248, 6650, 9824, 1201, + 6102, 134, 2120, 8662, 7792, 9525, 5975, 1053, 6589, + 2129, 3517, 7592, 8589, 4303, 8461, 4477, 5747, 151, + 4870, 4529, 3641, 7250, 392, 4557, 4767, 3949, 3212, + 6761, 5818, 2538, 1345, 6586, 9842, 9149, 9262, 9581, + 8332, 5787, 6867, 3334, 3929, 9915, 9165, 4571, 9626, + 7362, 4285, 9980, 8689, 7517, 3312, 4487, 2526, 8915, + 9807, 716, 901, 2067, 6046, 9001, 7911, 408, 148, + 1317, 5920, 3284, 7801, 7588, 8770, 6560, 348, 6988, + 8097, 3984, 3485, 3409, 9201, 5638, 6421, 3563, 3632, + 1452, 6631, 6346, 2233, 5327, 64, 3152, 8667, 5946, + 4510, 1417, 3779, 8867, 2905, 1369, 135, 8079, 9417, + 187, 7716, 9929, 5719, 3498, 2120, 8640, 5146, 9731, + 8710, 9996, 9692, 1206, 6217, 1310, 7099, 2614, 8044, + 1982, 4290, 9595, 7939, 9679, 9571, 7958, 1758, 6195, + 5533, 9545, 406, 917, 1357, 5574, 1085, 7923, 7220, + 6422, 8549, 2682, 40, 3795, 9767, 3767, 5240, 158, + 3048, 7355, 4332, 6816, 4937, 8246, 9715, 8200, 9814, + 6924, 8065, 6446, 6243, 4976, 4917, 5512, 4910, 3708, + 6023, 7639, 1133, 6448, 4950, 8492, 7993, 8584, 1340, + 5468, 6297, 6716, 8997, 6226, 6910, 7244, 6616, 1449, + 5171, 5393, 8543, 2497, 1323, 5505, 4755, 5970, 388, + 2929, 385, 1335, 7880, 9728, 5434, 6161, 2897, 3622, + 6161, 1650, 8903, 6820, 7357, 9843, 3756, 4645, 5310, + 1445, 7447, 1450, 6550, 1861, 6021, 3492, 1610, 5698, + 4827, 6115, 5157, 1608, 4965, 737, 1903, 4025, 2119, + 6552, 595, 4810, 130, 3982, 137, 8138, 5899, 7598, + 5942, 3724, 5818, 6843, 8546, 4893, 4811, 391, 3514, + 9590, 3749, 2481, 8733, 9761, 9190, 5512, 9765, 134, + 8737, 2194, 9046, 3630, 3577, 1253, 9106, 1281, 5455, + 2534, 8025, 1117, 9647, 9761, 3834, 8587, 9342, 4191, + 9213, 3915, 2071, 2631, 849, 3316, 4207, 5597, 9512, + 7014, 9934, 1359, 6351, 1942, 6581, 6730, 6231, 5782, + 5904, 9677, 532, 7482, 4382, 7965, 9754, 4297, 5473, + 3176, 7459, 9266, 9112, 9056, 5480, 4330, 3498, 3249, + 887, 2323, 930, 7809, 58, 2094, 769, 5008, 5797, + 6898, 9453, 808, 7053, 398, 7426, 1736, 634, 7990, + 3100, 1720, 4719, 4760, 7828, 648, 5462, 3365, 7079, + 7345, 2413, 8136, 6049, 5463, 2961, 6046, 4524, 6494, + 9373, 3832, 7595, 4732, 9656, 3425, 2265, 3526, 7277, + 1225, 8233, 9557, 960, 3905, 1045, 4119, 8053, 8659, + 1372, 8807, 1493, 4002, 9729, 3897, 7672, 5249, 1203, + 6003, 5841, 3864, 7129, 5663, 2654, 493, 7397, 7287, + 8513, 5254, 1707, 4926, 5022, 9139, 5676, 8296, 5115, + 9657, 591, 3536, 5723, 4822, 5176, 7346, 8375, 9143, + 2356, 7880, 2943, 6276, 2137, 3628, 59, 290, 6930, + 2063, 7839, 1196, 5604, 6563, 8782, 9536, 5108, 3877, + 429, 6184, 7060, 3523, 3867, 5457, 5143, 8932, 5594, + 5593, 2868, 1998, 3179, 1071, 9873, 6395, 5298, 9745, + 5572, 1722, 4856, 9429, 1499, 3117, 6370, 8257, 4390, + 3738, 9994, 4823, 6585, 1467, 2601, 5458, 2716, 1146, + 1738, 33, 9872, 5036, 1557, 1944, 2879, 4044, 2272, + 7676, 6388, 1904, 6101, 7987, 1700, 4395, 343, 9841, + 1734, 7922, 7616, 779, 5161, 468, 8805, 3563, 3932, + 1112, 1437, 1123, 8651, 9745, 3719, 4795, 7772, 9403, + 4517, 7987, 9197, 2190, 6957, 7720, 6929, 726, 1110, + 9372, 5858, 5956, 8878, 9550, 1968, 4362, 1304, 9796, + 7292, 3154, 8295, 3865, 7777, 3161, 3378, 1822, 7853, + 208, 589, 1972, 9911, 1971, 862, 9849, 3414, 9130, + 579, 4654, 92, 1475, 5598, 8930, 3936, 5479, 4313, + 185, 4782, 8779, 9083, 418, 1106, 2313, 6235, 7286, + 152, 2775, 6751, 650, 4919, 8921, 9634, 1984, 5779, + 1783, 7443, 6754, 7424, 1444, 6666, 409, 7791, 2642, + 1544, 5312, 3341, 5058, 6924, 1677, 5851, 5827, 2727, + 2075, 6264, 3079, 359, 2843, 330, 8651, 3205, 9111, + 3779, 6781, 64, 3792, 2666, 6804, 8462, 1474, 2290, + 1836, 9949, 6173, 4550, 9201, 1274, 5067, 351, 6480, + 6711, 8407, 7904, 5366, 4929, 444, 108, 2997, 7484, + 8340, 6161, 8073, 6104, 8943, 694, 6149, 8601, 1196, + 6575, 9937, 5094, 7925, 5722, 4565, 1084, 8988, 2783, + 7886, 6397, 7800, 2120, 9277, 5526, 5542, 1074, 1789, + 5085, 3061, 759, 7623, 4901, 1186, 3367, 1888, 9959, + 2396, 9680, 360, 1868, 6943, 9939, 5200, 5990, 7250, + 6064, 4697, 2291, 9159, 5133, 7533, 58, 7263, 8045, + 653, 6028, 7381, 932, 5857, 3072, 5547, 6647, 6032, + 5013, 1366, 1918, 6452, 4364, 581, 9863, 1502, 2117, + 5639, 4065, 1307, 3603, 5711, 481, 9225, 4458, 5133, + 5766, 2525, 6579, 6101, 955, 4014, 6750, 3371, 125, + 7603, 502, 2892, 6603, 2344, 8601, 3165, 2311, 9192, + 9546, 5043, 7844, 8591, 4935, 1765, 6671, 3193, 7181, + 2586, 5711, 3133, 5348, 7042, 9417, 8381, 100, 9386, + 3078, 5274, 1139, 6265, 8146, 6636, 5554, 6892, 1948, + 4479, 3693, 6919, 173, 9040, 5511, 2922, 6123, 9263, + 1418, 7264, 9857, 4588, 2740, 3894, 934, 1897, 9124, + 2393, 5100, 6701, 4211, 7855, 8655, 7893, 6373, 880, + 3410, 9500, 817, 9700, 622, 8424, 8906, 1192, 8082, + 5342, 4260, 2298, 5844, 8318, 1796, 365, 9287, 9942, + 1981, 5532, 9956, 1644, 1875, 1279, 1792, 9840, 4785, + 2478, 8953, 6214, 7060, 7798, 5291, 3255, 4370, 6301, + 9417, 5935, 5835, 7402, 7682, 5438, 9081, 5509, 8833, + 4566, 7011, 7184, 9706, 5339, 6813, 8443, 6535, 4377, + 9556, 509, 7923, 1426, 269, 4969, 957, 9645, 915, + 2300, 7682, 2902, 3368, 6024, 5901, 8615, 9741, 4861, + 1715, 1852, 8544, 3363, 7624, 4922, 8831, 1155, 5064, + 5765, 8030, 5214, 394, 5236, 4059, 7423, 941, 4185, + 7107, 525, 2019, 9115, 9184, 9216, 3622, 6792, 7696, + 6453, 9771, 541, 5255, 3317, 5383, 7392, 5897, 2023, + 6649, 4678, 2681, 6438, 365, 8269, 3577, 3723, 8264, + 2108]), + values=tensor([4.4965e-01, 1.3769e-01, 9.5035e-01, 5.7293e-01, + 7.7954e-01, 9.7503e-01, 1.4322e-01, 1.1818e-01, + 9.2953e-01, 8.1415e-01, 2.5238e-01, 6.0614e-01, + 4.5041e-01, 9.3390e-01, 9.7710e-01, 9.9499e-01, + 6.9516e-01, 1.2549e-01, 6.3390e-02, 4.9786e-01, + 2.6705e-01, 8.4035e-01, 4.7765e-01, 3.0191e-01, + 6.1763e-01, 6.4828e-01, 3.9165e-01, 4.0915e-01, + 4.2696e-01, 5.5891e-01, 9.9986e-01, 6.3730e-01, + 2.9036e-01, 6.8686e-01, 6.5946e-01, 4.8732e-01, + 3.4969e-01, 4.5554e-01, 8.7839e-01, 2.7698e-01, + 3.7226e-01, 1.5065e-02, 7.8865e-01, 6.0802e-01, + 5.6511e-01, 2.4244e-01, 5.1242e-02, 4.0738e-01, + 9.8710e-01, 1.9007e-01, 5.9800e-01, 8.4842e-01, + 9.1312e-01, 5.7446e-01, 5.4363e-01, 8.4750e-01, + 7.2541e-01, 8.4173e-01, 2.0399e-01, 4.5103e-01, + 1.4989e-01, 4.0298e-02, 1.7179e-01, 1.9945e-01, + 4.2389e-01, 3.1067e-01, 4.1838e-01, 4.7559e-01, + 8.5569e-01, 1.8368e-01, 4.8212e-01, 2.5203e-01, + 9.8680e-01, 4.5715e-01, 3.9215e-01, 8.5828e-01, + 9.4023e-01, 6.4093e-01, 7.0762e-01, 9.1246e-02, + 1.3960e-01, 8.7422e-01, 2.0504e-01, 5.2817e-01, + 4.5923e-02, 7.5340e-01, 4.3166e-02, 2.0079e-01, + 7.8776e-01, 7.4413e-01, 8.2810e-01, 4.3091e-02, + 6.3484e-01, 9.8935e-01, 4.7099e-01, 7.2918e-01, + 5.9483e-01, 8.6295e-01, 7.3936e-01, 3.6303e-01, + 5.6024e-01, 7.7622e-02, 1.0867e-01, 4.7041e-01, + 8.9265e-02, 9.1467e-01, 5.3849e-01, 1.8243e-01, + 5.3073e-01, 1.6104e-01, 5.5963e-01, 7.7433e-01, + 6.2087e-01, 2.9998e-01, 8.4995e-01, 2.2873e-02, + 8.7271e-02, 5.9852e-02, 6.5516e-01, 8.3316e-01, + 6.1547e-01, 6.3866e-01, 3.5342e-02, 9.7500e-01, + 4.1725e-01, 8.5943e-02, 3.3098e-02, 2.9393e-01, + 1.5172e-01, 6.5877e-01, 2.8169e-01, 8.0348e-01, + 7.7073e-01, 8.8877e-02, 6.6121e-01, 5.8345e-01, + 2.0661e-01, 3.7722e-01, 7.1221e-01, 4.5307e-01, + 3.1638e-01, 3.0489e-01, 1.1920e-01, 4.1591e-01, + 8.9518e-01, 5.0347e-01, 5.0858e-01, 3.2186e-01, + 7.4868e-01, 6.7171e-01, 9.3244e-01, 5.1430e-01, + 5.5016e-01, 2.8409e-01, 8.7062e-01, 2.5604e-01, + 9.6683e-01, 6.2006e-01, 8.0527e-02, 8.3669e-01, + 4.1226e-01, 3.0537e-01, 8.3561e-01, 3.9863e-01, + 5.3343e-01, 7.4499e-01, 4.3410e-01, 2.7897e-02, + 9.3765e-01, 8.4855e-01, 1.2336e-01, 1.1406e-01, + 3.0397e-01, 5.7188e-01, 3.3977e-01, 5.2731e-01, + 9.7160e-01, 3.2639e-01, 4.2242e-01, 3.2683e-02, + 9.8412e-01, 1.9910e-01, 2.5882e-01, 5.8518e-01, + 6.3761e-01, 7.2072e-01, 8.3748e-01, 2.4796e-02, + 9.0469e-01, 3.8373e-01, 8.3620e-01, 9.4335e-02, + 4.4129e-01, 4.0587e-01, 7.4687e-01, 5.2634e-01, + 1.3717e-01, 6.0904e-01, 4.2155e-01, 4.1423e-01, + 3.9847e-01, 8.8079e-02, 1.4881e-01, 3.8816e-01, + 5.2840e-02, 9.9597e-01, 3.7101e-01, 6.4058e-01, + 2.2169e-01, 9.4454e-01, 7.6950e-01, 1.2550e-01, + 3.3727e-02, 1.9034e-01, 7.9984e-01, 1.3267e-01, + 5.6674e-01, 3.2196e-01, 8.4859e-01, 4.3616e-01, + 5.3671e-02, 4.6213e-01, 3.4971e-01, 9.0445e-01, + 6.7961e-02, 6.2929e-01, 4.7567e-01, 1.9572e-01, + 9.0426e-01, 9.0016e-01, 4.5447e-01, 2.6630e-01, + 4.9915e-01, 4.7508e-01, 9.2903e-01, 8.0348e-01, + 5.8895e-01, 5.5352e-01, 2.2582e-01, 5.7060e-01, + 8.1496e-01, 4.4092e-01, 7.1190e-01, 9.3405e-01, + 4.8151e-01, 4.4619e-02, 5.0619e-01, 6.3555e-02, + 1.1898e-01, 9.5625e-01, 4.3337e-02, 8.4608e-01, + 5.4897e-01, 7.6274e-01, 5.6999e-01, 5.3584e-01, + 9.6706e-01, 4.7222e-01, 6.2173e-01, 2.9964e-01, + 5.3674e-01, 4.0259e-01, 2.0631e-01, 7.9009e-01, + 5.8390e-01, 3.0329e-01, 4.0575e-01, 1.9726e-01, + 4.3732e-01, 6.0232e-01, 4.7348e-01, 1.9995e-01, + 5.4697e-01, 8.5390e-01, 9.9359e-01, 6.6034e-01, + 3.4418e-01, 8.4510e-02, 8.6703e-01, 2.9267e-01, + 2.0810e-03, 7.2851e-01, 6.6168e-01, 6.6317e-01, + 1.1115e-01, 6.0774e-03, 6.7142e-01, 1.1584e-01, + 7.1563e-01, 1.7755e-01, 5.1168e-01, 9.3821e-02, + 8.1249e-01, 1.8413e-01, 3.5159e-01, 2.4573e-01, + 4.2074e-01, 7.3776e-01, 5.3183e-01, 8.0631e-01, + 7.7750e-01, 5.5525e-01, 2.8671e-01, 6.1549e-01, + 7.1549e-01, 9.9895e-01, 5.2674e-01, 7.3041e-01, + 6.6176e-01, 3.6282e-01, 7.6034e-01, 4.0504e-01, + 2.0401e-01, 2.5487e-01, 1.9033e-01, 1.9623e-01, + 3.6284e-02, 4.2903e-01, 9.9371e-01, 6.3885e-01, + 2.5968e-02, 4.8586e-01, 6.6560e-01, 2.0616e-01, + 3.1688e-01, 6.5113e-01, 1.4279e-01, 2.1929e-01, + 5.0027e-01, 4.5512e-01, 7.2149e-01, 9.4504e-01, + 1.7168e-01, 2.2541e-01, 8.0417e-01, 3.4956e-01, + 8.3679e-01, 1.7641e-01, 9.6793e-01, 5.6210e-01, + 3.9701e-01, 1.3797e-01, 8.9202e-01, 3.2792e-01, + 4.6708e-03, 5.7283e-01, 6.0612e-01, 1.8272e-01, + 3.6646e-01, 7.2284e-01, 1.8383e-01, 2.9022e-01, + 7.9856e-01, 1.1343e-01, 2.3634e-01, 7.7130e-01, + 6.8456e-01, 1.7480e-01, 4.6250e-01, 2.4163e-01, + 8.8702e-01, 2.3058e-01, 5.1560e-02, 4.7027e-01, + 3.3772e-03, 7.2097e-02, 8.7375e-01, 4.0896e-01, + 2.9561e-01, 6.5640e-01, 5.7376e-01, 4.6184e-01, + 1.1663e-03, 9.9713e-01, 3.9355e-01, 8.6592e-01, + 6.8146e-02, 5.3923e-01, 9.7404e-01, 5.4723e-01, + 7.0125e-01, 5.4053e-01, 7.3082e-01, 9.7785e-01, + 3.3078e-02, 6.3749e-01, 6.2345e-01, 8.3051e-01, + 2.7383e-01, 3.6590e-01, 7.1021e-02, 6.9549e-01, + 7.7323e-03, 3.9891e-01, 6.7410e-01, 3.3211e-01, + 6.5325e-01, 3.6018e-01, 6.0362e-01, 8.1452e-01, + 9.4481e-01, 5.7955e-02, 4.9542e-01, 2.6184e-01, + 9.7487e-01, 4.9456e-01, 8.7892e-01, 6.6865e-01, + 5.8486e-01, 2.1761e-01, 8.5696e-01, 5.0919e-01, + 5.9215e-01, 3.5729e-01, 1.3459e-02, 1.6525e-01, + 7.5009e-03, 1.0552e-02, 5.5984e-01, 3.7025e-01, + 3.7418e-01, 8.5195e-01, 7.4716e-01, 6.0013e-01, + 4.7662e-01, 8.2335e-01, 6.4900e-01, 8.0351e-01, + 2.5588e-01, 1.9713e-02, 6.8762e-01, 5.8259e-01, + 3.1885e-01, 3.6384e-01, 8.9646e-01, 7.1272e-01, + 2.8574e-01, 3.1895e-02, 2.6414e-03, 9.8227e-02, + 9.4357e-02, 9.7996e-01, 4.3905e-02, 2.3021e-01, + 7.1295e-01, 6.1474e-01, 7.0395e-01, 5.6433e-01, + 7.0563e-01, 4.9830e-01, 1.9892e-01, 8.6877e-01, + 6.0980e-01, 4.8155e-02, 6.3296e-01, 5.8453e-01, + 1.9986e-01, 3.3112e-01, 4.0151e-01, 5.4000e-01, + 1.8111e-01, 1.3640e-01, 8.8587e-01, 3.0679e-01, + 6.3513e-01, 7.3512e-01, 7.9733e-01, 4.2402e-01, + 1.6855e-01, 6.3117e-01, 4.8672e-01, 1.8039e-01, + 3.3868e-01, 2.7695e-01, 8.9797e-01, 4.2602e-01, + 6.0119e-01, 6.6892e-01, 4.7692e-01, 9.6219e-01, + 8.0484e-03, 6.4306e-01, 5.9041e-01, 8.0576e-01, + 9.0822e-01, 1.9501e-01, 4.1124e-01, 7.9817e-01, + 9.1499e-01, 9.9412e-01, 5.3613e-01, 4.8755e-01, + 3.6868e-01, 6.5275e-01, 8.9945e-01, 4.7785e-01, + 2.3655e-01, 6.0433e-01, 1.1690e-01, 6.1542e-01, + 7.9383e-01, 1.1610e-01, 3.0181e-01, 2.1347e-01, + 3.3610e-01, 2.0677e-01, 7.2341e-03, 6.2776e-01, + 6.5419e-01, 9.4441e-01, 8.8461e-04, 5.3031e-01, + 4.1369e-01, 9.4148e-01, 3.5549e-01, 4.1555e-01, + 5.8924e-01, 3.9711e-02, 6.9593e-02, 9.8539e-01, + 5.4075e-01, 4.7812e-01, 8.5548e-01, 8.1065e-01, + 6.5941e-01, 6.6644e-01, 9.6480e-01, 5.4150e-01, + 3.9903e-01, 4.8873e-01, 3.2687e-01, 9.4824e-01, + 5.1894e-01, 5.8274e-01, 1.6827e-01, 5.5825e-01, + 8.2071e-01, 1.6344e-01, 5.2556e-01, 1.1193e-01, + 5.0648e-01, 3.6618e-01, 9.5810e-01, 1.2800e-01, + 1.3206e-03, 8.7762e-01, 2.8271e-01, 1.0213e-01, + 5.9400e-01, 3.9522e-01, 9.9260e-01, 7.4976e-01, + 2.6447e-01, 7.7409e-01, 4.1362e-01, 2.2793e-01, + 2.8623e-01, 9.4784e-01, 1.8629e-01, 5.5902e-01, + 9.8596e-01, 4.8008e-01, 2.9452e-01, 1.9370e-01, + 3.6762e-01, 8.4825e-01, 3.2358e-01, 2.2154e-01, + 5.2395e-01, 6.2301e-01, 4.6454e-01, 1.9464e-01, + 8.7654e-01, 6.1940e-01, 9.7699e-01, 3.6808e-01, + 5.8060e-01, 9.3912e-01, 9.3195e-01, 1.9902e-01, + 5.0862e-01, 6.4487e-01, 2.7873e-01, 8.0248e-02, + 7.8608e-01, 4.3327e-01, 7.4604e-01, 3.0959e-01, + 3.9353e-01, 9.8239e-01, 7.6245e-01, 9.2901e-01, + 4.6128e-01, 3.6167e-01, 9.0955e-01, 3.1202e-01, + 4.4085e-02, 6.2633e-01, 8.7656e-01, 7.4675e-01, + 2.9172e-01, 6.1556e-01, 3.1529e-01, 3.4902e-01, + 5.4348e-01, 4.2744e-01, 9.8776e-01, 8.2751e-01, + 5.7028e-01, 6.7691e-01, 3.1950e-01, 7.9881e-01, + 8.9268e-01, 2.2836e-01, 7.5059e-01, 5.6039e-02, + 8.4118e-01, 6.5572e-01, 5.5163e-01, 3.1458e-01, + 6.1704e-01, 5.6093e-02, 7.9767e-01, 9.4600e-01, + 2.1928e-01, 3.9326e-02, 5.6560e-01, 7.8186e-01, + 8.9823e-01, 6.0816e-01, 4.7673e-01, 4.3366e-01, + 9.1734e-01, 5.8754e-01, 9.5818e-01, 5.0338e-01, + 4.7101e-01, 4.2574e-01, 8.8347e-01, 9.9629e-01, + 3.7466e-01, 9.2046e-01, 1.9630e-01, 5.6342e-01, + 3.9690e-01, 3.8488e-01, 7.6991e-01, 4.0605e-01, + 8.9007e-01, 6.1929e-01, 7.5840e-01, 3.6900e-01, + 7.9131e-01, 4.8074e-01, 2.5482e-01, 3.6580e-01, + 6.8986e-01, 1.9103e-01, 8.0298e-01, 5.5040e-01, + 6.0138e-01, 2.4459e-01, 1.8774e-02, 7.6177e-01, + 1.9747e-01, 7.4636e-01, 2.7241e-01, 6.3384e-01, + 6.8666e-01, 5.8283e-01, 2.4638e-01, 4.5053e-01, + 7.7113e-02, 9.6400e-01, 7.5084e-01, 9.9097e-01, + 8.2369e-01, 1.2518e-01, 8.7079e-01, 8.8731e-01, + 6.6279e-01, 2.0421e-01, 8.1221e-02, 9.8411e-01, + 2.6106e-01, 2.3338e-02, 2.6480e-02, 4.0599e-01, + 2.9908e-01, 4.8339e-01, 7.8936e-01, 9.9101e-01, + 9.0942e-01, 2.0788e-01, 2.2735e-01, 8.7072e-01, + 5.0509e-01, 2.3135e-01, 4.7407e-01, 7.4017e-01, + 4.9692e-01, 3.6074e-01, 4.9514e-02, 1.2661e-01, + 9.7856e-02, 2.2153e-01, 1.5779e-02, 8.8678e-01, + 9.8514e-01, 7.5711e-01, 2.2177e-01, 2.4731e-01, + 8.5599e-02, 7.0071e-01, 8.3437e-01, 6.5447e-01, + 2.5607e-01, 4.0526e-01, 4.8411e-01, 6.8148e-01, + 6.9610e-01, 3.9874e-01, 4.8623e-01, 8.7722e-01, + 2.8142e-01, 2.9443e-01, 8.8637e-01, 1.8955e-01, + 3.6086e-02, 1.6641e-01, 7.8338e-01, 4.6497e-01, + 1.4233e-02, 5.7884e-01, 3.3947e-01, 4.7233e-02, + 4.2210e-02, 2.4198e-01, 7.7171e-01, 7.9947e-01, + 9.6589e-01, 6.1429e-01, 1.2277e-01, 2.0682e-01, + 3.4285e-01, 7.9675e-01, 2.9050e-01, 7.8865e-01, + 7.5171e-01, 4.5477e-01, 5.8210e-01, 7.8888e-01, + 9.8955e-02, 2.5763e-01, 8.5833e-01, 3.0164e-01, + 3.9566e-01, 6.7231e-01, 3.3608e-01, 6.6018e-01, + 1.3947e-01, 2.1432e-01, 8.3950e-01, 8.7634e-02, + 8.7303e-02, 7.9920e-01, 2.1686e-01, 2.3929e-01, + 6.2304e-01, 2.5707e-01, 4.4665e-01, 6.8967e-01, + 6.9303e-01, 2.2866e-01, 9.2399e-01, 5.7817e-01, + 5.2984e-01, 3.2154e-01, 7.8238e-01, 4.0391e-01, + 9.0909e-01, 4.9607e-02, 5.2078e-01, 2.3283e-01, + 4.9465e-01, 6.1527e-01, 4.3228e-01, 9.0330e-01, + 8.0827e-01, 6.9746e-01, 9.6769e-01, 9.2643e-01, + 9.3397e-01, 3.8602e-01, 6.5755e-01, 8.2635e-01, + 1.0202e-01, 1.5856e-01, 1.2288e-01, 8.4879e-02, + 3.2458e-01, 2.9655e-01, 1.7742e-01, 9.3450e-01, + 1.0076e-01, 7.2645e-01, 8.1806e-02, 4.6658e-01, + 3.8093e-01, 3.3309e-01, 6.5422e-01, 2.5819e-01, + 1.5107e-01, 8.8510e-01, 4.3293e-01, 9.2351e-01, + 8.9183e-01, 3.2430e-01, 4.3852e-01, 8.7239e-01, + 3.5201e-01, 4.2486e-01, 7.0879e-01, 6.1583e-01, + 1.1565e-01, 7.0478e-01, 8.8022e-01, 2.5408e-01, + 7.3129e-02, 8.2514e-01, 8.2613e-01, 3.2247e-01, + 3.3085e-01, 8.9530e-01, 1.0707e-01, 4.3826e-01, + 7.8570e-01, 9.4198e-01, 7.2354e-01, 3.4066e-01, + 7.3480e-01, 4.2036e-01, 6.3356e-01, 2.9797e-01, + 8.2827e-01, 7.8339e-01, 9.0276e-01, 2.0607e-01, + 9.7252e-01, 7.1435e-01, 4.2314e-01, 9.8891e-01, + 6.9301e-01, 5.7219e-01, 9.0386e-01, 7.0463e-01, + 7.1908e-04, 5.4692e-01, 6.0821e-01, 9.8439e-01, + 2.8373e-01, 4.8915e-01, 5.5931e-01, 9.7619e-01, + 7.4567e-01, 7.0938e-01, 1.2593e-01, 6.1318e-01, + 8.1283e-01, 9.8667e-02, 3.3716e-02, 5.7522e-01, + 1.9639e-01, 2.9416e-01, 5.5823e-01, 1.8549e-01, + 3.1942e-01, 7.4665e-01, 7.2327e-01, 9.5305e-01, + 1.9036e-02, 5.3955e-01, 2.4116e-01, 9.2667e-01, + 1.4653e-01, 9.8715e-01, 5.3480e-01, 6.0516e-01, + 6.0680e-01, 8.5987e-01, 3.9984e-01, 3.1004e-01, + 8.8724e-01, 7.4026e-01, 4.4687e-01, 8.9641e-01, + 2.4418e-01, 2.4025e-02, 5.5509e-01, 8.9647e-01, + 1.9417e-01, 6.6319e-01, 5.1484e-02, 3.4897e-01, + 5.1031e-01, 3.0611e-01, 3.8996e-01, 3.8358e-01, + 6.9904e-01, 6.9426e-01, 7.9483e-01, 3.3074e-01, + 4.4849e-01, 4.5771e-01, 3.3817e-01, 9.0376e-01, + 2.9871e-01, 1.3521e-01, 4.3356e-01, 8.7768e-02, + 2.4144e-01, 7.4490e-01, 5.2568e-01, 6.6800e-01, + 6.9455e-01, 8.8174e-01, 2.7533e-01, 9.6499e-01, + 9.5226e-01, 6.3027e-01, 6.0446e-02, 2.4209e-01, + 8.6906e-01, 3.5261e-01, 1.4614e-01, 9.4982e-01, + 7.0784e-02, 4.6539e-01, 8.8096e-01, 6.3553e-01, + 5.2585e-01, 6.7815e-02, 6.7186e-01, 7.0013e-01, + 3.2879e-01, 8.4313e-01, 2.0230e-01, 6.7661e-01, + 2.5127e-02, 8.3948e-01, 7.1261e-01, 9.8116e-01, + 5.7618e-01, 7.3962e-01, 4.1140e-01, 1.7002e-01, + 2.9786e-02, 6.1256e-01, 2.2368e-01, 2.3720e-01, + 5.1041e-01, 5.8688e-01, 3.2746e-01, 3.0206e-01, + 4.6125e-01, 3.9820e-01, 9.6772e-01, 2.2109e-01, + 6.7044e-01, 9.0422e-02, 7.0940e-01, 4.4105e-01, + 8.1398e-01, 1.1710e-01, 4.8937e-02, 6.8242e-02, + 2.0881e-01, 5.1602e-01, 9.9962e-01, 5.4247e-01, + 2.9660e-01, 5.2390e-01, 5.7505e-01, 8.5464e-01, + 9.4683e-01, 8.0727e-01, 2.3938e-01, 5.1948e-01, + 4.7982e-01, 5.9710e-01, 1.9899e-01, 5.7719e-01, + 9.9101e-01, 8.2375e-01, 4.2012e-01, 4.5169e-01, + 4.0205e-02, 5.1058e-03, 5.9797e-01, 3.2629e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.6283, 0.6554, 0.1926, ..., 0.5716, 0.9993, 0.6492]) +tensor([0.5424, 0.9332, 0.7035, ..., 0.9872, 0.5484, 0.9353]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -1509,13 +1295,13 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 10.376285076141357 seconds +Time: 10.641618490219116 seconds -[20.4, 20.32, 20.32, 20.44, 20.32, 20.32, 20.52, 20.6, 20.96, 21.36] -[21.4, 21.52, 21.84, 22.84, 24.24, 24.88, 25.24, 25.24, 24.84, 24.56, 23.72, 23.8, 23.96, 23.68] -14.664767265319824 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 145400, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.376285076141357, 'TIME_S_1KI': 0.07136372129395707, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 324.9616888427734, 'W': 22.159348523127505} -[20.4, 20.32, 20.32, 20.44, 20.32, 20.32, 20.52, 20.6, 20.96, 21.36, 20.68, 20.64, 20.6, 20.52, 20.32, 20.28, 20.32, 20.32, 20.6, 20.68] -368.96000000000004 -18.448 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 145400, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.376285076141357, 'TIME_S_1KI': 0.07136372129395707, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 324.9616888427734, 'W': 22.159348523127505, 'J_1KI': 2.2349497169379187, 'W_1KI': 0.15240267209853856, 'W_D': 3.711348523127505, 'J_D': 54.42606233215331, 'W_D_1KI': 0.02552509300637899, 'J_D_1KI': 0.00017555084598610036} +[21.44, 21.48, 21.48, 21.12, 21.2, 21.24, 21.16, 21.48, 21.6, 21.92] +[21.92, 21.88, 24.96, 26.8, 28.08, 28.64, 29.24, 29.24, 26.08, 24.52, 23.6, 23.56, 23.36, 23.36] +14.664376258850098 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 142368, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.641618490219116, 'TIME_S_1KI': 0.07474726406368788, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 348.11982940673823, 'W': 23.739150118754246} +[21.44, 21.48, 21.48, 21.12, 21.2, 21.24, 21.16, 21.48, 21.6, 21.92, 20.56, 20.56, 20.2, 20.2, 20.08, 20.04, 20.44, 20.72, 20.72, 21.12] +376.24 +18.812 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 142368, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.641618490219116, 'TIME_S_1KI': 0.07474726406368788, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 348.11982940673823, 'W': 23.739150118754246, 'J_1KI': 2.445211209026876, 'W_1KI': 0.16674498566218704, 'W_D': 4.927150118754245, 'J_D': 72.25358322525018, 'W_D_1KI': 0.03460855050821986, 'J_D_1KI': 0.00024309220125463487} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_500000_1e-05.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_500000_1e-05.json index c0e7e6f..7ae266f 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_500000_1e-05.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_500000_1e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 98.37349367141724, "TIME_S_1KI": 98.37349367141724, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2530.5661976623533, "W": 23.87864246176097, "J_1KI": 2530.5661976623533, "W_1KI": 23.87864246176097, "W_D": 5.392642461760968, "J_D": 571.491396617889, "W_D_1KI": 5.392642461760968, "J_D_1KI": 5.392642461760968} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 101.81685495376587, "TIME_S_1KI": 101.81685495376587, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2509.431913032532, "W": 23.91615643501538, "J_1KI": 2509.431913032532, "W_1KI": 23.91615643501538, "W_D": 5.455156435015379, "J_D": 572.3889491109848, "W_D_1KI": 5.455156435015379, "J_D_1KI": 5.455156435015379} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_500000_1e-05.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_500000_1e-05.output index 3a0a0f2..06c1b57 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_500000_1e-05.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_500000_1e-05.output @@ -1,15 +1,15 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 500000 -sd 1e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 98.37349367141724} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 101.81685495376587} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 6, ..., 2499994, +tensor(crow_indices=tensor([ 0, 6, 8, ..., 2499992, 2499997, 2500000]), - col_indices=tensor([ 13104, 56490, 58201, ..., 30329, 136735, - 267614]), - values=tensor([0.2415, 0.0022, 0.5702, ..., 0.5534, 0.4567, 0.6374]), + col_indices=tensor([ 7138, 74289, 101345, ..., 58125, 215534, + 230533]), + values=tensor([0.6785, 0.9079, 0.1725, ..., 0.1754, 0.6680, 0.6302]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.5993, 0.2850, 0.9957, ..., 0.8791, 0.8991, 0.2848]) +tensor([0.7654, 0.8855, 0.1287, ..., 0.1047, 0.9719, 0.8120]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -17,17 +17,17 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 98.37349367141724 seconds +Time: 101.81685495376587 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 6, ..., 2499994, +tensor(crow_indices=tensor([ 0, 6, 8, ..., 2499992, 2499997, 2500000]), - col_indices=tensor([ 13104, 56490, 58201, ..., 30329, 136735, - 267614]), - values=tensor([0.2415, 0.0022, 0.5702, ..., 0.5534, 0.4567, 0.6374]), + col_indices=tensor([ 7138, 74289, 101345, ..., 58125, 215534, + 230533]), + values=tensor([0.6785, 0.9079, 0.1725, ..., 0.1754, 0.6680, 0.6302]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.5993, 0.2850, 0.9957, ..., 0.8791, 0.8991, 0.2848]) +tensor([0.7654, 0.8855, 0.1287, ..., 0.1047, 0.9719, 0.8120]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -35,13 +35,13 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 98.37349367141724 seconds +Time: 101.81685495376587 seconds -[20.36, 20.36, 20.48, 20.36, 20.56, 20.68, 20.6, 20.64, 20.6, 20.52] -[20.56, 20.56, 20.56, 21.88, 23.2, 25.36, 26.72, 27.0, 26.4, 25.72, 25.04, 25.04, 25.2, 25.32, 25.2, 25.0, 25.12, 24.96, 24.88, 25.0, 24.92, 24.92, 24.96, 25.08, 25.24, 25.28, 25.48, 25.28, 25.28, 25.44, 25.44, 25.4, 25.2, 25.12, 24.96, 25.12, 25.32, 25.52, 25.8, 25.72, 25.44, 25.08, 25.0, 25.0, 24.96, 25.0, 25.04, 25.12, 25.12, 25.2, 25.2, 25.16, 25.04, 24.88, 24.96, 25.16, 25.16, 25.24, 25.24, 25.4, 25.2, 25.32, 25.16, 25.16, 25.2, 25.2, 25.0, 25.16, 25.28, 25.28, 25.28, 25.16, 25.2, 25.2, 25.04, 25.2, 25.36, 25.32, 25.32, 25.52, 25.44, 25.4, 25.36, 25.4, 25.32, 25.24, 25.04, 25.04, 24.92, 24.96, 24.96, 25.12, 25.32, 25.24, 25.2, 25.04, 24.92, 25.08, 25.0, 24.96, 24.88] -105.97613334655762 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 98.37349367141724, 'TIME_S_1KI': 98.37349367141724, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2530.5661976623533, 'W': 23.87864246176097} -[20.36, 20.36, 20.48, 20.36, 20.56, 20.68, 20.6, 20.64, 20.6, 20.52, 20.68, 20.4, 20.36, 20.52, 20.4, 20.6, 20.68, 20.68, 20.64, 20.76] -369.72 -18.486 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 98.37349367141724, 'TIME_S_1KI': 98.37349367141724, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2530.5661976623533, 'W': 23.87864246176097, 'J_1KI': 2530.5661976623533, 'W_1KI': 23.87864246176097, 'W_D': 5.392642461760968, 'J_D': 571.491396617889, 'W_D_1KI': 5.392642461760968, 'J_D_1KI': 5.392642461760968} +[20.36, 20.36, 20.32, 20.32, 20.32, 20.56, 20.64, 20.8, 21.0, 21.0] +[20.96, 20.8, 21.96, 21.96, 22.8, 24.92, 25.88, 26.52, 26.32, 25.68, 25.2, 25.44, 25.36, 25.12, 25.04, 25.08, 25.04, 25.24, 25.36, 25.12, 25.28, 25.28, 25.08, 24.96, 25.04, 25.04, 25.2, 25.32, 25.2, 25.36, 25.08, 24.88, 24.96, 25.12, 25.24, 25.4, 25.4, 25.4, 25.2, 25.16, 25.08, 25.04, 25.24, 25.28, 25.36, 25.48, 25.48, 25.4, 25.36, 25.48, 25.52, 25.4, 25.28, 25.04, 24.84, 24.88, 25.12, 25.68, 26.12, 26.12, 25.76, 25.32, 24.96, 24.84, 24.92, 24.96, 24.92, 24.92, 24.92, 25.28, 25.08, 25.08, 25.16, 25.12, 25.04, 25.12, 25.2, 24.92, 24.92, 24.96, 25.08, 25.4, 25.4, 25.48, 25.44, 25.24, 25.28, 25.48, 25.72, 25.56, 25.56, 25.52, 25.44, 25.2, 25.08, 25.12, 25.08, 25.28, 25.44, 25.32] +104.92622089385986 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 101.81685495376587, 'TIME_S_1KI': 101.81685495376587, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2509.431913032532, 'W': 23.91615643501538} +[20.36, 20.36, 20.32, 20.32, 20.32, 20.56, 20.64, 20.8, 21.0, 21.0, 20.52, 20.48, 20.28, 20.48, 20.52, 20.64, 20.52, 20.44, 20.4, 20.4] +369.22 +18.461000000000002 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 101.81685495376587, 'TIME_S_1KI': 101.81685495376587, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2509.431913032532, 'W': 23.91615643501538, 'J_1KI': 2509.431913032532, 'W_1KI': 23.91615643501538, 'W_D': 5.455156435015379, 'J_D': 572.3889491109848, 'W_D_1KI': 5.455156435015379, 'J_D_1KI': 5.455156435015379} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.0001.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.0001.json index 05b637f..99da0bf 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.0001.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.0001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1803, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.916261672973633, "TIME_S_1KI": 6.054498986674227, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 348.36130602836613, "W": 23.871230542861202, "J_1KI": 193.21203883991467, "W_1KI": 13.239728531814311, "W_D": 5.286230542861201, "J_D": 77.14383104681971, "W_D_1KI": 2.931908232313478, "J_D_1KI": 1.6261276940174587} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1750, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.16480827331543, "TIME_S_1KI": 5.80846187046596, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 350.615839214325, "W": 23.96705841957935, "J_1KI": 200.35190812247143, "W_1KI": 13.695461954045342, "W_D": 5.563058419579352, "J_D": 81.38238586616524, "W_D_1KI": 3.178890525473916, "J_D_1KI": 1.8165088716993805} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.0001.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.0001.output index a1e0ae8..38bf734 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.0001.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.0001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 50000 -sd 0.0001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 5.821210145950317} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 5.999283075332642} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 11, ..., 249996, 249998, +tensor(crow_indices=tensor([ 0, 5, 11, ..., 249990, 249995, 250000]), - col_indices=tensor([12413, 12946, 15415, ..., 25881, 14227, 42249]), - values=tensor([0.3226, 0.4714, 0.3498, ..., 0.9478, 0.5271, 0.1593]), + col_indices=tensor([13962, 18394, 22949, ..., 14595, 37415, 49220]), + values=tensor([0.3721, 0.9393, 0.0895, ..., 0.9714, 0.3434, 0.8212]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.8728, 0.8759, 0.3915, ..., 0.5486, 0.7678, 0.2723]) +tensor([0.7511, 0.6955, 0.0801, ..., 0.5808, 0.0034, 0.8132]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 5.821210145950317 seconds +Time: 5.999283075332642 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1803 -ss 50000 -sd 0.0001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.916261672973633} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1750 -ss 50000 -sd 0.0001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.16480827331543} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 11, ..., 249988, 249991, +tensor(crow_indices=tensor([ 0, 8, 10, ..., 249989, 249996, 250000]), - col_indices=tensor([ 7415, 12339, 19287, ..., 32647, 33814, 45500]), - values=tensor([0.8370, 0.0969, 0.8316, ..., 0.1944, 0.4025, 0.6344]), + col_indices=tensor([ 581, 19518, 20111, ..., 13396, 34309, 44743]), + values=tensor([0.2810, 0.4140, 0.9885, ..., 0.7044, 0.0704, 0.4209]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.2154, 0.6825, 0.0342, ..., 0.6227, 0.4225, 0.9397]) +tensor([0.2162, 0.8403, 0.5346, ..., 0.6143, 0.9627, 0.5199]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,16 +36,16 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 10.916261672973633 seconds +Time: 10.16480827331543 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 11, ..., 249988, 249991, +tensor(crow_indices=tensor([ 0, 8, 10, ..., 249989, 249996, 250000]), - col_indices=tensor([ 7415, 12339, 19287, ..., 32647, 33814, 45500]), - values=tensor([0.8370, 0.0969, 0.8316, ..., 0.1944, 0.4025, 0.6344]), + col_indices=tensor([ 581, 19518, 20111, ..., 13396, 34309, 44743]), + values=tensor([0.2810, 0.4140, 0.9885, ..., 0.7044, 0.0704, 0.4209]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.2154, 0.6825, 0.0342, ..., 0.6227, 0.4225, 0.9397]) +tensor([0.2162, 0.8403, 0.5346, ..., 0.6143, 0.9627, 0.5199]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -53,13 +53,13 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 10.916261672973633 seconds +Time: 10.16480827331543 seconds -[20.84, 20.76, 20.76, 20.6, 20.4, 20.28, 20.24, 20.52, 20.56, 20.44] -[20.44, 20.32, 23.6, 25.4, 27.36, 28.36, 28.36, 29.32, 26.76, 26.0, 25.28, 24.92, 25.04, 25.08] -14.593353509902954 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1803, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.916261672973633, 'TIME_S_1KI': 6.054498986674227, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 348.36130602836613, 'W': 23.871230542861202} -[20.84, 20.76, 20.76, 20.6, 20.4, 20.28, 20.24, 20.52, 20.56, 20.44, 20.2, 20.24, 20.32, 20.44, 20.72, 20.92, 20.92, 21.36, 21.28, 21.28] -371.7 -18.585 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1803, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.916261672973633, 'TIME_S_1KI': 6.054498986674227, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 348.36130602836613, 'W': 23.871230542861202, 'J_1KI': 193.21203883991467, 'W_1KI': 13.239728531814311, 'W_D': 5.286230542861201, 'J_D': 77.14383104681971, 'W_D_1KI': 2.931908232313478, 'J_D_1KI': 1.6261276940174587} +[20.36, 20.36, 20.36, 20.4, 20.24, 20.28, 20.32, 20.8, 21.08, 20.92] +[21.16, 21.04, 24.12, 26.48, 28.28, 28.28, 29.04, 29.84, 25.88, 24.76, 24.76, 24.72, 24.88, 24.76] +14.629072666168213 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1750, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.16480827331543, 'TIME_S_1KI': 5.80846187046596, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 350.615839214325, 'W': 23.96705841957935} +[20.36, 20.36, 20.36, 20.4, 20.24, 20.28, 20.32, 20.8, 21.08, 20.92, 20.4, 20.24, 20.24, 20.08, 20.2, 20.32, 20.56, 20.68, 20.72, 20.72] +368.0799999999999 +18.403999999999996 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1750, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.16480827331543, 'TIME_S_1KI': 5.80846187046596, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 350.615839214325, 'W': 23.96705841957935, 'J_1KI': 200.35190812247143, 'W_1KI': 13.695461954045342, 'W_D': 5.563058419579352, 'J_D': 81.38238586616524, 'W_D_1KI': 3.178890525473916, 'J_D_1KI': 1.8165088716993805} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.001.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.001.json index 62a0664..295c278 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.001.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 53.93572449684143, "TIME_S_1KI": 53.93572449684143, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1405.257185211182, "W": 23.49413775375655, "J_1KI": 1405.257185211182, "W_1KI": 23.49413775375655, "W_D": 4.945137753756551, "J_D": 295.78401357650813, "W_D_1KI": 4.945137753756551, "J_D_1KI": 4.945137753756551} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 54.368531465530396, "TIME_S_1KI": 54.368531465530396, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1384.197800512314, "W": 23.562014123499935, "J_1KI": 1384.197800512314, "W_1KI": 23.562014123499935, "W_D": 4.987014123499936, "J_D": 292.97215190053004, "W_D_1KI": 4.987014123499936, "J_D_1KI": 4.987014123499936} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.001.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.001.output index cbf5bdc..ea77cdc 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.001.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 50000 -sd 0.001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 53.93572449684143} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 54.368531465530396} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 63, 121, ..., 2499897, - 2499959, 2500000]), - col_indices=tensor([ 158, 1232, 2736, ..., 48449, 48581, 49575]), - values=tensor([0.0263, 0.9327, 0.9651, ..., 0.1558, 0.2228, 0.0301]), +tensor(crow_indices=tensor([ 0, 55, 110, ..., 2499903, + 2499953, 2500000]), + col_indices=tensor([ 180, 933, 1739, ..., 48224, 48432, 48665]), + values=tensor([0.2331, 0.6137, 0.9488, ..., 0.3126, 0.9414, 0.7411]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.0174, 0.1708, 0.2801, ..., 0.8892, 0.6468, 0.1800]) +tensor([0.7249, 0.6013, 0.3531, ..., 0.7563, 0.1447, 0.0341]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,16 +16,16 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 53.93572449684143 seconds +Time: 54.368531465530396 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 63, 121, ..., 2499897, - 2499959, 2500000]), - col_indices=tensor([ 158, 1232, 2736, ..., 48449, 48581, 49575]), - values=tensor([0.0263, 0.9327, 0.9651, ..., 0.1558, 0.2228, 0.0301]), +tensor(crow_indices=tensor([ 0, 55, 110, ..., 2499903, + 2499953, 2500000]), + col_indices=tensor([ 180, 933, 1739, ..., 48224, 48432, 48665]), + values=tensor([0.2331, 0.6137, 0.9488, ..., 0.3126, 0.9414, 0.7411]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.0174, 0.1708, 0.2801, ..., 0.8892, 0.6468, 0.1800]) +tensor([0.7249, 0.6013, 0.3531, ..., 0.7563, 0.1447, 0.0341]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -33,13 +33,13 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 53.93572449684143 seconds +Time: 54.368531465530396 seconds -[20.72, 20.76, 20.76, 20.8, 20.72, 20.72, 20.24, 20.36, 20.72, 20.72] -[20.76, 21.16, 21.32, 25.56, 26.68, 28.72, 29.48, 27.44, 25.8, 24.96, 24.56, 24.52, 24.56, 24.64, 24.6, 24.52, 24.56, 24.56, 24.6, 24.6, 24.68, 24.68, 24.4, 24.48, 24.4, 24.56, 24.52, 24.8, 24.64, 24.8, 24.68, 24.88, 24.56, 24.56, 24.36, 24.48, 24.64, 24.68, 24.56, 24.72, 24.48, 24.48, 24.44, 24.92, 25.08, 24.92, 24.96, 24.76, 24.76, 24.56, 24.44, 24.36, 24.36, 24.44, 24.36, 24.44, 24.52] -59.8130989074707 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 53.93572449684143, 'TIME_S_1KI': 53.93572449684143, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1405.257185211182, 'W': 23.49413775375655} -[20.72, 20.76, 20.76, 20.8, 20.72, 20.72, 20.24, 20.36, 20.72, 20.72, 20.2, 20.6, 20.56, 20.68, 20.8, 20.8, 20.64, 20.56, 20.36, 20.16] -370.97999999999996 -18.549 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 53.93572449684143, 'TIME_S_1KI': 53.93572449684143, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1405.257185211182, 'W': 23.49413775375655, 'J_1KI': 1405.257185211182, 'W_1KI': 23.49413775375655, 'W_D': 4.945137753756551, 'J_D': 295.78401357650813, 'W_D_1KI': 4.945137753756551, 'J_D_1KI': 4.945137753756551} +[20.28, 20.28, 20.28, 20.52, 20.52, 20.52, 20.84, 20.84, 20.8, 20.84] +[20.8, 20.8, 24.12, 26.04, 26.04, 28.44, 29.44, 30.08, 26.76, 25.32, 24.56, 24.68, 24.84, 24.84, 24.76, 24.52, 24.68, 24.72, 24.96, 24.76, 24.96, 24.84, 25.0, 24.88, 24.88, 24.64, 24.64, 24.44, 24.28, 24.24, 24.28, 24.28, 24.52, 24.72, 24.84, 24.64, 24.68, 24.68, 24.72, 24.72, 24.8, 24.76, 24.56, 24.52, 24.2, 24.24, 24.24, 24.24, 24.24, 24.4, 24.52, 24.52, 24.4, 24.44, 24.4, 24.12] +58.74700665473938 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 54.368531465530396, 'TIME_S_1KI': 54.368531465530396, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1384.197800512314, 'W': 23.562014123499935} +[20.28, 20.28, 20.28, 20.52, 20.52, 20.52, 20.84, 20.84, 20.8, 20.84, 20.4, 20.36, 20.4, 20.28, 20.56, 20.56, 20.96, 21.12, 21.28, 21.24] +371.5 +18.575 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 54.368531465530396, 'TIME_S_1KI': 54.368531465530396, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1384.197800512314, 'W': 23.562014123499935, 'J_1KI': 1384.197800512314, 'W_1KI': 23.562014123499935, 'W_D': 4.987014123499936, 'J_D': 292.97215190053004, 'W_D_1KI': 4.987014123499936, 'J_D_1KI': 4.987014123499936} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_1e-05.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_1e-05.json index 8adab40..79e75a2 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_1e-05.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_1e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 10285, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.171167612075806, "TIME_S_1KI": 0.9889321936874872, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 325.4937496185303, "W": 23.99041156544644, "J_1KI": 31.647423395092883, "W_1KI": 2.332563107967568, "W_D": 5.591411565446439, "J_D": 75.86237156176564, "W_D_1KI": 0.5436472110302809, "J_D_1KI": 0.052858260673824105} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 10740, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.677687883377075, "TIME_S_1KI": 0.9941981269438619, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 332.0521117687225, "W": 22.70459223140718, "J_1KI": 30.91732884252537, "W_1KI": 2.11402162303605, "W_D": 4.278592231407178, "J_D": 62.57393091917032, "W_D_1KI": 0.3983791649354914, "J_D_1KI": 0.03709303211689864} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_1e-05.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_1e-05.output index 12e8b6c..0bf1f45 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_1e-05.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_1e-05.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 50000 -sd 1e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.020900011062622} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.031747817993164} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 24999, 24999, 25000]), - col_indices=tensor([43592, 45763, 41730, ..., 2923, 32227, 39553]), - values=tensor([0.0398, 0.4210, 0.0283, ..., 0.1409, 0.8695, 0.8837]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 24999, 25000, 25000]), + col_indices=tensor([40215, 7884, 10043, ..., 30495, 28697, 40914]), + values=tensor([0.0776, 0.0144, 0.1627, ..., 0.8046, 0.8736, 0.3953]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.8554, 0.8486, 0.8747, ..., 0.5244, 0.7497, 0.0831]) +tensor([0.9279, 0.0068, 0.0286, ..., 0.3265, 0.6131, 0.7632]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -15,18 +15,18 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 1.020900011062622 seconds +Time: 1.031747817993164 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 10285 -ss 50000 -sd 1e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.171167612075806} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 10176 -ss 50000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.948124408721924} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 25000, 25000, 25000]), - col_indices=tensor([14664, 43703, 46520, ..., 7061, 31497, 43987]), - values=tensor([0.1911, 0.5487, 0.9416, ..., 0.5242, 0.5616, 0.0900]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 24999, 25000, 25000]), + col_indices=tensor([36670, 6571, 29568, ..., 18627, 41427, 17079]), + values=tensor([0.2785, 0.5861, 0.6450, ..., 0.6094, 0.8660, 0.4536]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.7733, 0.9528, 0.6124, ..., 0.0354, 0.2670, 0.0752]) +tensor([0.5003, 0.3455, 0.7125, ..., 0.5405, 0.2393, 0.4201]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -34,15 +34,18 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 10.171167612075806 seconds +Time: 9.948124408721924 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 10740 -ss 50000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.677687883377075} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 25000, 25000, 25000]), - col_indices=tensor([14664, 43703, 46520, ..., 7061, 31497, 43987]), - values=tensor([0.1911, 0.5487, 0.9416, ..., 0.5242, 0.5616, 0.0900]), +tensor(crow_indices=tensor([ 0, 1, 2, ..., 24998, 25000, 25000]), + col_indices=tensor([16841, 18429, 37212, ..., 30943, 4364, 38003]), + values=tensor([0.6614, 0.5763, 0.4032, ..., 0.0279, 0.2406, 0.7956]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.7733, 0.9528, 0.6124, ..., 0.0354, 0.2670, 0.0752]) +tensor([0.8404, 0.4278, 0.6904, ..., 0.0651, 0.6749, 0.6556]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -50,13 +53,29 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 10.171167612075806 seconds +Time: 10.677687883377075 seconds -[20.44, 20.44, 20.36, 20.08, 20.36, 20.36, 20.4, 20.44, 20.6, 20.52] -[20.52, 20.72, 24.24, 25.88, 27.44, 27.44, 28.48, 29.32, 25.96, 26.28, 25.8, 26.0, 25.92] -13.567660093307495 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 10285, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.171167612075806, 'TIME_S_1KI': 0.9889321936874872, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 325.4937496185303, 'W': 23.99041156544644} -[20.44, 20.44, 20.36, 20.08, 20.36, 20.36, 20.4, 20.44, 20.6, 20.52, 20.56, 20.36, 20.28, 20.12, 20.2, 20.6, 20.92, 20.8, 20.6, 20.6] -367.98 -18.399 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 10285, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.171167612075806, 'TIME_S_1KI': 0.9889321936874872, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 325.4937496185303, 'W': 23.99041156544644, 'J_1KI': 31.647423395092883, 'W_1KI': 2.332563107967568, 'W_D': 5.591411565446439, 'J_D': 75.86237156176564, 'W_D_1KI': 0.5436472110302809, 'J_D_1KI': 0.052858260673824105} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 24998, 25000, 25000]), + col_indices=tensor([16841, 18429, 37212, ..., 30943, 4364, 38003]), + values=tensor([0.6614, 0.5763, 0.4032, ..., 0.0279, 0.2406, 0.7956]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.8404, 0.4278, 0.6904, ..., 0.0651, 0.6749, 0.6556]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 10.677687883377075 seconds + +[20.72, 20.68, 20.64, 20.72, 20.72, 20.64, 20.68, 20.52, 20.52, 20.6] +[21.04, 21.2, 21.2, 21.92, 23.48, 24.12, 25.48, 25.92, 26.2, 25.8, 25.88, 25.6, 25.8, 25.72] +14.624887704849243 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 10740, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.677687883377075, 'TIME_S_1KI': 0.9941981269438619, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 332.0521117687225, 'W': 22.70459223140718} +[20.72, 20.68, 20.64, 20.72, 20.72, 20.64, 20.68, 20.52, 20.52, 20.6, 20.2, 20.12, 20.24, 20.24, 20.28, 20.28, 20.4, 20.4, 20.44, 20.48] +368.52000000000004 +18.426000000000002 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 10740, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.677687883377075, 'TIME_S_1KI': 0.9941981269438619, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 332.0521117687225, 'W': 22.70459223140718, 'J_1KI': 30.91732884252537, 'W_1KI': 2.11402162303605, 'W_D': 4.278592231407178, 'J_D': 62.57393091917032, 'W_D_1KI': 0.3983791649354914, 'J_D_1KI': 0.03709303211689864} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.0001.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.0001.json new file mode 100644 index 0000000..f8f615f --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 96826, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.588202953338623, "TIME_S_1KI": 0.10935289027057425, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 358.2542047309875, "W": 24.50573052785985, "J_1KI": 3.699979393251683, "W_1KI": 0.2530903943967514, "W_D": 4.6827305278598494, "J_D": 68.45777967405314, "W_D_1KI": 0.04836232548963966, "J_D_1KI": 0.000499476643563089} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.0001.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.0001.output new file mode 100644 index 0000000..4f006ee --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.0001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 5000 -sd 0.0001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.1145937442779541} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 2495, 2499, 2500]), + col_indices=tensor([2458, 4485, 3264, ..., 1767, 2577, 3633]), + values=tensor([0.5111, 0.1865, 0.4486, ..., 0.9187, 0.4905, 0.6857]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.1036, 0.8585, 0.3762, ..., 0.6219, 0.4226, 0.3195]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 0.1145937442779541 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 91628 -ss 5000 -sd 0.0001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.936307668685913} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 2500, 2500, 2500]), + col_indices=tensor([1700, 3040, 4129, ..., 4083, 2058, 3930]), + values=tensor([0.1350, 0.7186, 0.2594, ..., 0.2124, 0.0344, 0.1244]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.7260, 0.9332, 0.2146, ..., 0.1697, 0.4017, 0.1867]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 9.936307668685913 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 96826 -ss 5000 -sd 0.0001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.588202953338623} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 2499, 2500, 2500]), + col_indices=tensor([3997, 2967, 4931, ..., 3835, 1314, 3597]), + values=tensor([0.7356, 0.2235, 0.2006, ..., 0.5232, 0.0695, 0.2889]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.7175, 0.9581, 0.9907, ..., 0.9548, 0.8349, 0.6616]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.588202953338623 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 2499, 2500, 2500]), + col_indices=tensor([3997, 2967, 4931, ..., 3835, 1314, 3597]), + values=tensor([0.7356, 0.2235, 0.2006, ..., 0.5232, 0.0695, 0.2889]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.7175, 0.9581, 0.9907, ..., 0.9548, 0.8349, 0.6616]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.588202953338623 seconds + +[20.4, 20.56, 20.56, 20.6, 20.44, 20.64, 20.68, 20.96, 21.52, 22.56] +[23.28, 23.64, 26.88, 28.2, 29.6, 29.52, 29.52, 29.72, 25.72, 24.92, 23.8, 23.72, 24.2, 24.28] +14.619201183319092 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 96826, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.588202953338623, 'TIME_S_1KI': 0.10935289027057425, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 358.2542047309875, 'W': 24.50573052785985} +[20.4, 20.56, 20.56, 20.6, 20.44, 20.64, 20.68, 20.96, 21.52, 22.56, 22.96, 23.28, 23.32, 23.36, 23.32, 23.28, 23.36, 23.12, 23.0, 23.0] +396.46 +19.823 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 96826, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.588202953338623, 'TIME_S_1KI': 0.10935289027057425, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 358.2542047309875, 'W': 24.50573052785985, 'J_1KI': 3.699979393251683, 'W_1KI': 0.2530903943967514, 'W_D': 4.6827305278598494, 'J_D': 68.45777967405314, 'W_D_1KI': 0.04836232548963966, 'J_D_1KI': 0.000499476643563089} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.001.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.001.json new file mode 100644 index 0000000..7e96f33 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 17363, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.667346239089966, "TIME_S_1KI": 0.6143722996653784, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 333.18905796051024, "W": 22.76067676218484, "J_1KI": 19.1896019098376, "W_1KI": 1.3108723585892323, "W_D": 3.0516767621848366, "J_D": 44.67289422965043, "W_D_1KI": 0.17575745909029755, "J_D_1KI": 0.010122528312520737} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.001.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.001.output new file mode 100644 index 0000000..ba00b65 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.001.output @@ -0,0 +1,62 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 5000 -sd 0.001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.6047096252441406} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 11, ..., 24983, 24992, 25000]), + col_indices=tensor([ 225, 408, 1943, ..., 2555, 2651, 2712]), + values=tensor([0.7906, 0.4816, 0.2276, ..., 0.2718, 0.8003, 0.8712]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.6660, 0.8709, 0.7078, ..., 0.4840, 0.5828, 0.2928]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 0.6047096252441406 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 17363 -ss 5000 -sd 0.001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.667346239089966} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 24986, 24994, 25000]), + col_indices=tensor([ 85, 195, 4187, ..., 2991, 3287, 4675]), + values=tensor([0.1915, 0.0298, 0.9128, ..., 0.0482, 0.8260, 0.8063]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.9668, 0.6018, 0.4153, ..., 0.6117, 0.1974, 0.6733]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.667346239089966 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 24986, 24994, 25000]), + col_indices=tensor([ 85, 195, 4187, ..., 2991, 3287, 4675]), + values=tensor([0.1915, 0.0298, 0.9128, ..., 0.0482, 0.8260, 0.8063]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.9668, 0.6018, 0.4153, ..., 0.6117, 0.1974, 0.6733]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.667346239089966 seconds + +[22.96, 22.64, 22.44, 22.84, 23.2, 23.48, 24.08, 23.88, 23.04, 22.2] +[21.52, 21.52, 20.88, 24.32, 25.36, 27.04, 27.68, 28.12, 25.12, 23.88, 23.84, 23.84, 23.68, 23.84] +14.638802766799927 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 17363, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.667346239089966, 'TIME_S_1KI': 0.6143722996653784, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 333.18905796051024, 'W': 22.76067676218484} +[22.96, 22.64, 22.44, 22.84, 23.2, 23.48, 24.08, 23.88, 23.04, 22.2, 20.48, 20.6, 20.52, 20.76, 20.72, 20.68, 20.68, 20.76, 20.64, 20.8] +394.18000000000006 +19.709000000000003 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 17363, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.667346239089966, 'TIME_S_1KI': 0.6143722996653784, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 333.18905796051024, 'W': 22.76067676218484, 'J_1KI': 19.1896019098376, 'W_1KI': 1.3108723585892323, 'W_D': 3.0516767621848366, 'J_D': 44.67289422965043, 'W_D_1KI': 0.17575745909029755, 'J_D_1KI': 0.010122528312520737} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.01.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.01.json new file mode 100644 index 0000000..a5d181f --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1948, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.5094473361969, "TIME_S_1KI": 5.394993499074384, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 321.46233737945556, "W": 22.060099154306354, "J_1KI": 165.02173376768766, "W_1KI": 11.324486218843099, "W_D": 3.783099154306356, "J_D": 55.127762036561975, "W_D_1KI": 1.9420426870155834, "J_D_1KI": 0.9969418311168293} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.01.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.01.output new file mode 100644 index 0000000..80c2fbe --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.01.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 5000 -sd 0.01 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 5.389868259429932} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 46, 96, ..., 249907, 249949, + 250000]), + col_indices=tensor([ 123, 345, 399, ..., 4711, 4879, 4988]), + values=tensor([0.4250, 0.5468, 0.7620, ..., 0.1883, 0.2040, 0.8985]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.2612, 0.9268, 0.9416, ..., 0.0698, 0.1077, 0.5090]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 5.389868259429932 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1948 -ss 5000 -sd 0.01 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.5094473361969} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 57, 115, ..., 249893, 249944, + 250000]), + col_indices=tensor([ 73, 135, 475, ..., 4575, 4723, 4971]), + values=tensor([0.1739, 0.5180, 0.0955, ..., 0.3924, 0.5566, 0.2573]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.5075, 0.6044, 0.6141, ..., 0.4161, 0.9554, 0.0515]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.5094473361969 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 57, 115, ..., 249893, 249944, + 250000]), + col_indices=tensor([ 73, 135, 475, ..., 4575, 4723, 4971]), + values=tensor([0.1739, 0.5180, 0.0955, ..., 0.3924, 0.5566, 0.2573]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.5075, 0.6044, 0.6141, ..., 0.4161, 0.9554, 0.0515]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.5094473361969 seconds + +[20.56, 20.36, 20.36, 20.92, 20.64, 20.52, 20.44, 20.2, 20.0, 20.24] +[20.4, 20.4, 20.96, 22.24, 24.0, 24.56, 25.32, 25.32, 25.28, 24.8, 24.08, 24.24, 24.16, 24.16] +14.572116613388062 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1948, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.5094473361969, 'TIME_S_1KI': 5.394993499074384, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 321.46233737945556, 'W': 22.060099154306354} +[20.56, 20.36, 20.36, 20.92, 20.64, 20.52, 20.44, 20.2, 20.0, 20.24, 20.16, 20.08, 20.04, 20.12, 20.12, 20.16, 20.24, 20.2, 20.44, 20.44] +365.53999999999996 +18.276999999999997 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1948, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.5094473361969, 'TIME_S_1KI': 5.394993499074384, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 321.46233737945556, 'W': 22.060099154306354, 'J_1KI': 165.02173376768766, 'W_1KI': 11.324486218843099, 'W_D': 3.783099154306356, 'J_D': 55.127762036561975, 'W_D_1KI': 1.9420426870155834, 'J_D_1KI': 0.9969418311168293} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.05.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.05.json new file mode 100644 index 0000000..496e602 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 28.093817949295044, "TIME_S_1KI": 28.093817949295044, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 689.2963391876219, "W": 22.82953703239495, "J_1KI": 689.2963391876219, "W_1KI": 22.82953703239495, "W_D": 4.334537032394952, "J_D": 130.87346030116072, "W_D_1KI": 4.334537032394952, "J_D_1KI": 4.334537032394952} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.05.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.05.output new file mode 100644 index 0000000..9c2fd18 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.05.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 5000 -sd 0.05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 28.093817949295044} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 249, 484, ..., 1249498, + 1249755, 1250000]), + col_indices=tensor([ 8, 31, 46, ..., 4934, 4976, 4984]), + values=tensor([0.2044, 0.4643, 0.3912, ..., 0.8352, 0.2191, 0.4950]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.7092, 0.7480, 0.2063, ..., 0.9775, 0.7055, 0.9981]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 28.093817949295044 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 249, 484, ..., 1249498, + 1249755, 1250000]), + col_indices=tensor([ 8, 31, 46, ..., 4934, 4976, 4984]), + values=tensor([0.2044, 0.4643, 0.3912, ..., 0.8352, 0.2191, 0.4950]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.7092, 0.7480, 0.2063, ..., 0.9775, 0.7055, 0.9981]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 28.093817949295044 seconds + +[20.56, 20.6, 20.48, 20.56, 20.4, 20.48, 20.68, 20.72, 20.92, 21.08] +[20.92, 20.92, 20.56, 23.36, 25.52, 27.28, 28.24, 28.72, 25.44, 24.36, 24.6, 24.76, 24.64, 24.52, 24.36, 24.44, 24.24, 24.4, 24.44, 24.16, 24.16, 24.12, 24.2, 24.2, 24.08, 24.12, 24.24, 24.12, 24.0] +30.193180799484253 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 28.093817949295044, 'TIME_S_1KI': 28.093817949295044, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 689.2963391876219, 'W': 22.82953703239495} +[20.56, 20.6, 20.48, 20.56, 20.4, 20.48, 20.68, 20.72, 20.92, 21.08, 20.52, 20.6, 20.36, 20.4, 20.52, 20.44, 20.6, 20.44, 20.44, 20.36] +369.9 +18.494999999999997 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 28.093817949295044, 'TIME_S_1KI': 28.093817949295044, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 689.2963391876219, 'W': 22.82953703239495, 'J_1KI': 689.2963391876219, 'W_1KI': 22.82953703239495, 'W_D': 4.334537032394952, 'J_D': 130.87346030116072, 'W_D_1KI': 4.334537032394952, 'J_D_1KI': 4.334537032394952} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.1.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.1.json new file mode 100644 index 0000000..fff9557 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 53.78093886375427, "TIME_S_1KI": 53.78093886375427, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1352.5172874259947, "W": 23.03061139017129, "J_1KI": 1352.5172874259947, "W_1KI": 23.03061139017129, "W_D": 4.417611390171292, "J_D": 259.4327902595995, "W_D_1KI": 4.417611390171292, "J_D_1KI": 4.417611390171292} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.1.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.1.output new file mode 100644 index 0000000..c5ce197 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.1.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 5000 -sd 0.1 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 53.78093886375427} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 534, 1044, ..., 2498992, + 2499517, 2500000]), + col_indices=tensor([ 3, 19, 25, ..., 4971, 4983, 4990]), + values=tensor([0.2124, 0.6762, 0.6770, ..., 0.5380, 0.6783, 0.2658]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.0160, 0.9125, 0.7128, ..., 0.4183, 0.3158, 0.5797]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 53.78093886375427 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 534, 1044, ..., 2498992, + 2499517, 2500000]), + col_indices=tensor([ 3, 19, 25, ..., 4971, 4983, 4990]), + values=tensor([0.2124, 0.6762, 0.6770, ..., 0.5380, 0.6783, 0.2658]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.0160, 0.9125, 0.7128, ..., 0.4183, 0.3158, 0.5797]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 53.78093886375427 seconds + +[20.56, 20.72, 20.76, 20.76, 20.64, 20.88, 20.6, 20.52, 20.72, 20.48] +[20.36, 20.24, 20.8, 22.0, 24.0, 25.2, 26.08, 25.8, 25.8, 25.44, 24.16, 24.32, 24.32, 24.4, 24.36, 24.4, 24.52, 24.88, 24.68, 24.64, 24.52, 24.28, 24.04, 24.08, 24.08, 24.08, 24.36, 24.4, 24.48, 24.6, 24.6, 24.64, 24.56, 24.52, 24.56, 24.32, 24.04, 24.32, 24.36, 24.24, 24.28, 24.28, 24.28, 24.48, 24.52, 24.76, 24.56, 24.24, 24.16, 24.04, 24.12, 24.12, 24.44, 24.48, 24.52, 24.4] +58.726938009262085 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 53.78093886375427, 'TIME_S_1KI': 53.78093886375427, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1352.5172874259947, 'W': 23.03061139017129} +[20.56, 20.72, 20.76, 20.76, 20.64, 20.88, 20.6, 20.52, 20.72, 20.48, 20.84, 20.6, 20.72, 20.72, 20.72, 20.52, 20.6, 20.72, 20.76, 20.72] +372.26 +18.613 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 53.78093886375427, 'TIME_S_1KI': 53.78093886375427, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1352.5172874259947, 'W': 23.03061139017129, 'J_1KI': 1352.5172874259947, 'W_1KI': 23.03061139017129, 'W_D': 4.417611390171292, 'J_D': 259.4327902595995, 'W_D_1KI': 4.417611390171292, 'J_D_1KI': 4.417611390171292} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_1e-05.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_1e-05.json new file mode 100644 index 0000000..a523326 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 289284, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.336281061172485, "TIME_S_1KI": 0.035730566022222056, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 302.8508556365967, "W": 22.269333190276484, "J_1KI": 1.0468980504853247, "W_1KI": 0.07698086721103305, "W_D": 3.542333190276487, "J_D": 48.17381052494056, "W_D_1KI": 0.012245174950140648, "J_D_1KI": 4.232925066765064e-05} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_1e-05.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_1e-05.output new file mode 100644 index 0000000..2a5bc74 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_1e-05.output @@ -0,0 +1,356 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 5000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.04395866394042969} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([ 439, 4351, 241, 500, 1004, 350, 1803, 3065, 3191, + 3136, 4095, 405, 1027, 870, 1417, 1360, 1534, 1342, + 3091, 3442, 2499, 1358, 1636, 3780, 2825, 2256, 4221, + 891, 2908, 3121, 1626, 2038, 883, 1037, 495, 2079, + 274, 937, 1868, 1488, 2903, 1523, 2167, 269, 3946, + 4053, 3008, 3702, 2193, 1563, 433, 1763, 2812, 3707, + 1886, 3013, 1511, 241, 1937, 2889, 1518, 4490, 4205, + 2026, 1673, 448, 986, 4061, 3094, 3985, 2097, 1213, + 4129, 3540, 2913, 1842, 3281, 3579, 2699, 1582, 1926, + 2137, 2888, 530, 3516, 2878, 57, 3238, 1656, 156, + 3904, 1121, 616, 2128, 426, 4846, 2365, 4030, 347, + 3690, 867, 1324, 1005, 4649, 3492, 4358, 47, 220, + 4307, 708, 2842, 3336, 1686, 1004, 4195, 3767, 332, + 43, 2809, 3452, 1463, 2172, 1464, 3770, 1880, 2042, + 3777, 2498, 3420, 1112, 4060, 4103, 4825, 1440, 4448, + 99, 2245, 3060, 27, 3733, 457, 3987, 3747, 1652, + 2522, 757, 4125, 2250, 2724, 3925, 2338, 3816, 1409, + 2282, 4242, 682, 3683, 4310, 1582, 4330, 601, 544, + 2289, 2874, 3966, 1136, 681, 4257, 2516, 3237, 2677, + 2257, 2771, 3675, 3168, 1248, 4288, 3632, 3599, 280, + 4551, 4047, 3577, 2662, 2281, 1968, 3402, 454, 1141, + 3366, 354, 2291, 4168, 3523, 2296, 2127, 2248, 4229, + 2140, 736, 3393, 640, 4820, 2236, 1416, 4815, 3234, + 1042, 1979, 4637, 2323, 138, 2380, 3226, 1859, 3342, + 2378, 803, 349, 3172, 4960, 4660, 4480, 3337, 245, + 4128, 3649, 2732, 968, 771, 3445, 3899, 644, 16, + 3599, 1029, 1799, 4502, 366, 4843, 2859, 2949, 545, + 645, 3511, 4843, 251, 2988, 2387, 946]), + values=tensor([9.4296e-01, 5.2301e-01, 8.9037e-01, 1.8262e-01, + 9.3621e-01, 5.6553e-01, 9.8721e-01, 6.5141e-01, + 2.8305e-01, 8.9567e-01, 7.1276e-04, 4.5788e-01, + 1.3154e-01, 7.7912e-01, 2.1464e-01, 9.3572e-01, + 4.0199e-01, 1.4579e-01, 1.5259e-01, 5.2311e-01, + 6.3620e-01, 8.3700e-01, 3.7813e-01, 1.4289e-01, + 6.8630e-01, 9.7120e-01, 7.6830e-01, 1.8723e-01, + 5.0392e-01, 9.2014e-01, 9.6103e-01, 7.2487e-01, + 3.2638e-01, 3.9838e-01, 2.7919e-01, 9.9376e-02, + 1.2394e-01, 1.9018e-01, 9.4573e-01, 4.8384e-02, + 3.3755e-01, 5.4543e-01, 6.5933e-01, 9.2931e-03, + 6.7184e-01, 3.3367e-01, 7.2403e-02, 1.6238e-01, + 7.9429e-01, 7.1594e-01, 9.3852e-01, 9.0787e-01, + 8.7587e-01, 2.4929e-01, 3.4089e-01, 7.4583e-01, + 3.6106e-01, 5.5151e-01, 6.3073e-01, 2.4689e-01, + 6.6122e-01, 6.2804e-01, 3.7429e-04, 5.6550e-01, + 5.0592e-01, 5.2248e-02, 7.1885e-01, 1.4852e-03, + 6.1029e-01, 4.5258e-01, 9.8998e-01, 7.7545e-03, + 6.8035e-01, 8.7032e-01, 2.7807e-01, 6.6854e-01, + 8.8838e-01, 1.5830e-02, 6.6226e-01, 1.1911e-01, + 1.8780e-01, 3.7508e-01, 9.2709e-01, 1.3932e-01, + 8.5139e-01, 2.8186e-01, 2.2711e-01, 8.2491e-01, + 9.3666e-01, 5.4799e-01, 8.7126e-01, 5.6305e-01, + 2.9909e-01, 9.8105e-02, 1.0565e-01, 9.1471e-01, + 9.5693e-01, 5.2767e-01, 7.5753e-01, 2.3887e-01, + 8.7389e-01, 2.4255e-01, 8.0756e-01, 7.2201e-01, + 6.6620e-01, 4.9751e-01, 5.1454e-01, 8.6001e-01, + 3.0834e-01, 2.2246e-01, 1.9841e-01, 8.9698e-02, + 9.1174e-01, 9.2243e-01, 7.7010e-01, 3.5962e-01, + 6.8634e-01, 9.5528e-01, 9.6147e-02, 9.3024e-02, + 8.3726e-01, 7.2003e-01, 6.7904e-01, 2.9273e-01, + 9.7464e-02, 1.5658e-02, 9.0559e-01, 3.6883e-01, + 7.9470e-01, 3.6450e-01, 5.7814e-03, 6.5827e-02, + 6.1557e-02, 3.8228e-02, 4.7705e-01, 2.6058e-01, + 8.0137e-01, 9.8272e-01, 8.4581e-01, 6.6501e-01, + 5.2583e-03, 3.0522e-01, 9.5123e-01, 2.4154e-01, + 6.0106e-01, 6.7170e-01, 2.1086e-01, 6.6402e-01, + 9.0397e-01, 3.9084e-01, 2.0324e-01, 7.2153e-01, + 6.7300e-01, 5.3381e-01, 2.8418e-02, 4.4506e-01, + 1.0782e-01, 1.9622e-01, 8.0898e-02, 5.4146e-01, + 8.2802e-01, 7.5722e-01, 9.2798e-04, 8.7421e-02, + 6.0281e-01, 1.2511e-01, 5.8418e-01, 7.7672e-01, + 8.2524e-01, 8.4603e-01, 6.9503e-01, 5.3184e-01, + 8.1918e-01, 5.6983e-01, 6.0056e-01, 1.8971e-01, + 1.0667e-01, 1.4853e-01, 3.6607e-01, 9.1330e-01, + 7.6093e-01, 6.6336e-01, 8.3088e-02, 8.4756e-01, + 5.8339e-01, 9.7773e-03, 7.7948e-02, 2.5127e-01, + 9.2139e-01, 3.2626e-01, 8.8502e-01, 8.8419e-01, + 9.3048e-01, 2.5403e-01, 7.0568e-01, 6.2669e-01, + 5.4774e-01, 7.1848e-01, 6.1011e-01, 7.7754e-01, + 8.5827e-01, 1.7827e-01, 6.2997e-01, 8.0090e-02, + 2.7963e-01, 9.9685e-01, 9.8342e-01, 1.9697e-01, + 4.5505e-01, 4.5432e-01, 2.5097e-01, 6.7016e-01, + 1.8891e-01, 1.1873e-01, 3.8346e-01, 2.0525e-01, + 7.7441e-01, 9.7489e-01, 9.5720e-01, 1.2362e-01, + 6.3758e-01, 4.1703e-01, 4.2223e-01, 1.8615e-01, + 3.6248e-02, 7.9391e-01, 2.0557e-01, 2.4331e-01, + 3.3957e-02, 7.9866e-01, 9.2672e-01, 7.1739e-01, + 4.0885e-01, 7.5316e-01, 1.3635e-01, 7.8209e-01, + 7.8379e-01, 8.6373e-01, 4.7931e-01, 9.1748e-01, + 8.8234e-01, 3.9897e-02, 1.9663e-01, 5.1895e-01, + 1.8534e-01, 5.8047e-01, 8.8859e-01, 6.9097e-01, + 9.8689e-01, 3.5349e-01]), size=(5000, 5000), nnz=250, + layout=torch.sparse_csr) +tensor([0.0440, 0.7352, 0.2145, ..., 0.3780, 0.1332, 0.0924]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 0.04395866394042969 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 238860 -ss 5000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 8.669770956039429} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([3091, 2173, 2760, 4828, 4497, 2021, 4336, 1372, 2593, + 4578, 2353, 1617, 2286, 4843, 611, 3842, 780, 3798, + 1703, 2131, 4067, 844, 2093, 4026, 1314, 3497, 4042, + 4776, 3331, 3582, 1805, 810, 679, 3355, 267, 75, + 1213, 2221, 1110, 2198, 2383, 4776, 4217, 678, 3909, + 1512, 3709, 4936, 3783, 908, 1282, 1246, 4599, 2322, + 400, 1819, 1668, 1808, 2129, 438, 3127, 679, 3190, + 1219, 3867, 1347, 947, 2998, 4062, 3110, 2027, 1149, + 4411, 3584, 2329, 3206, 3899, 4697, 2802, 4938, 2228, + 4929, 3505, 2881, 4726, 2353, 1213, 3407, 639, 4955, + 2493, 2366, 1047, 948, 3072, 1625, 3356, 4277, 3654, + 3675, 3687, 1889, 2750, 4011, 2466, 2775, 4133, 2972, + 4848, 1886, 2462, 153, 3593, 4334, 1547, 1439, 1117, + 4652, 364, 4137, 3929, 32, 4355, 3906, 1819, 701, + 843, 1395, 965, 3122, 4564, 113, 2887, 3505, 3813, + 1298, 294, 4050, 112, 970, 3705, 1370, 1914, 3916, + 1662, 4047, 1814, 166, 4992, 2795, 1857, 3493, 862, + 4171, 3693, 4410, 3072, 191, 4249, 2990, 2750, 2777, + 2482, 2558, 4173, 4640, 4365, 2368, 165, 3278, 3602, + 4362, 3309, 800, 3849, 4373, 4033, 1894, 4873, 4868, + 2497, 3754, 682, 4534, 989, 3189, 843, 3829, 1001, + 1817, 1493, 4385, 4304, 2601, 4528, 142, 3070, 914, + 4818, 1532, 2114, 396, 1015, 1256, 3073, 1867, 2500, + 3218, 958, 3683, 1738, 4356, 2003, 3914, 1072, 3035, + 906, 3835, 659, 3510, 266, 3356, 607, 3975, 4538, + 845, 569, 3535, 3958, 1202, 678, 853, 3550, 3828, + 589, 2363, 2962, 3748, 447, 325, 4847, 760, 2711, + 4314, 4639, 1546, 4036, 2172, 2793, 2280]), + values=tensor([0.9311, 0.9337, 0.6031, 0.9384, 0.3149, 0.4635, 0.9582, + 0.9724, 0.9125, 0.1632, 0.9245, 0.0672, 0.1143, 0.3208, + 0.1789, 0.9522, 0.9522, 0.5693, 0.9699, 0.8167, 0.2351, + 0.8218, 0.0084, 0.8188, 0.0090, 0.0238, 0.9758, 0.2522, + 0.5008, 0.7112, 0.5123, 0.0579, 0.8162, 0.9429, 0.9583, + 0.8914, 0.0600, 0.0407, 0.6565, 0.9268, 0.0759, 0.6544, + 0.1768, 0.1190, 0.3416, 0.4319, 0.6553, 0.9105, 0.0139, + 0.3695, 0.9454, 0.5109, 0.7588, 0.3085, 0.7470, 0.2791, + 0.8189, 0.8019, 0.7112, 0.0119, 0.9175, 0.6748, 0.5583, + 0.3843, 0.9066, 0.9602, 0.5163, 0.7903, 0.5317, 0.8558, + 0.0178, 0.9916, 0.0539, 0.1774, 0.1131, 0.2007, 0.4985, + 0.3602, 0.2595, 0.8066, 0.9027, 0.9075, 0.6105, 0.4231, + 0.6445, 0.3321, 0.5032, 0.7416, 0.0328, 0.1698, 0.1582, + 0.0973, 0.7734, 0.4633, 0.0933, 0.5521, 0.4839, 0.4820, + 0.1735, 0.5797, 0.5056, 0.2959, 0.7988, 0.9839, 0.0551, + 0.6884, 0.9314, 0.9873, 0.7685, 0.0058, 0.0787, 0.9765, + 0.6762, 0.3041, 0.3881, 0.9603, 0.5133, 0.5010, 0.5978, + 0.4901, 0.1096, 0.3089, 0.4831, 0.1777, 0.2237, 0.1128, + 0.1933, 0.6434, 0.5434, 0.2104, 0.1106, 0.7119, 0.8262, + 0.1519, 0.4358, 0.3729, 0.3091, 0.7531, 0.7323, 0.9612, + 0.1214, 0.5723, 0.7721, 0.9862, 0.8839, 0.8431, 0.1624, + 0.7651, 0.9221, 0.7966, 0.7730, 0.4034, 0.8456, 0.4576, + 0.9356, 0.8744, 0.0500, 0.0142, 0.8332, 0.7405, 0.9426, + 0.9799, 0.7180, 0.0762, 0.9417, 0.8209, 0.5328, 0.8635, + 0.5987, 0.9841, 0.7140, 0.4626, 0.1625, 0.9366, 0.7462, + 0.7100, 0.7244, 0.1108, 0.3970, 0.3797, 0.5535, 0.5783, + 0.7423, 0.8333, 0.5720, 0.4870, 0.8115, 0.4909, 0.2202, + 0.4712, 0.9250, 0.1538, 0.1309, 0.3084, 0.5786, 0.3477, + 0.3671, 0.5677, 0.9819, 0.9097, 0.6246, 0.6428, 0.8143, + 0.2008, 0.5795, 0.9732, 0.8629, 0.0578, 0.4214, 0.2742, + 0.5882, 0.2057, 0.2782, 0.1474, 0.6538, 0.7641, 0.1314, + 0.5759, 0.5734, 0.1329, 0.3014, 0.6477, 0.7298, 0.9380, + 0.2945, 0.0625, 0.3728, 0.4803, 0.1010, 0.9830, 0.7456, + 0.0112, 0.3135, 0.2364, 0.8172, 0.4517, 0.9464, 0.8185, + 0.0983, 0.1786, 0.9208, 0.9192, 0.5143, 0.5288, 0.7078, + 0.6070, 0.5609, 0.7211, 0.9777, 0.7339]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.3130, 0.9247, 0.8789, ..., 0.8987, 0.6939, 0.4674]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 8.669770956039429 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 289284 -ss 5000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.336281061172485} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([1127, 2914, 651, 3868, 3478, 2616, 630, 2816, 2915, + 3943, 3548, 2263, 4542, 4912, 1799, 1521, 4830, 1959, + 565, 4046, 352, 708, 388, 1948, 1601, 580, 4884, + 1273, 2391, 2767, 3934, 3369, 4073, 1550, 863, 4309, + 666, 1592, 2221, 566, 3749, 4816, 269, 465, 352, + 1056, 3923, 2996, 3908, 4028, 1098, 3401, 2466, 323, + 4554, 3723, 4598, 3095, 2628, 4872, 2114, 3081, 3773, + 3425, 1731, 1262, 1917, 2900, 2481, 4272, 4608, 2685, + 4012, 3949, 546, 721, 2719, 2060, 3934, 2047, 319, + 1177, 4368, 590, 919, 2939, 1268, 3254, 3134, 888, + 658, 3560, 3243, 4771, 1870, 2190, 3032, 1145, 3921, + 3093, 240, 195, 4761, 94, 4383, 2739, 425, 1280, + 2618, 2549, 2332, 4924, 2783, 3566, 338, 1395, 3128, + 1333, 3138, 4314, 4739, 2917, 1017, 709, 300, 2533, + 3360, 999, 395, 2920, 889, 1982, 4806, 1821, 1887, + 3776, 1083, 112, 254, 1671, 1524, 3260, 3015, 2718, + 1436, 4393, 4051, 3480, 2230, 4054, 2670, 395, 2759, + 4796, 849, 4168, 1575, 4853, 1261, 4275, 1866, 3556, + 3417, 1020, 4282, 584, 3689, 3874, 1509, 4083, 263, + 1550, 171, 3186, 1466, 1336, 4936, 3512, 2418, 944, + 325, 1694, 930, 2377, 1839, 621, 2680, 2742, 1537, + 4859, 1103, 3522, 1157, 158, 4629, 2357, 873, 4934, + 2882, 1458, 3703, 572, 1916, 2812, 1567, 1471, 1134, + 673, 1170, 2394, 135, 1008, 3492, 716, 2043, 4892, + 1753, 1218, 680, 2404, 2996, 3897, 4680, 298, 3550, + 1169, 883, 1691, 2497, 4937, 4137, 2804, 4987, 4765, + 1784, 3581, 2966, 4679, 4779, 60, 1363, 4249, 709, + 3283, 2433, 962, 3692, 1587, 4377, 2820]), + values=tensor([0.9574, 0.0088, 0.5020, 0.9141, 0.2863, 0.0911, 0.1607, + 0.4081, 0.1489, 0.0577, 0.6602, 0.5319, 0.6687, 0.7359, + 0.7218, 0.5265, 0.2843, 0.5255, 0.7975, 0.9675, 0.4955, + 0.9458, 0.7420, 0.1283, 0.0140, 0.5968, 0.2693, 0.9592, + 0.8530, 0.7750, 0.2021, 0.3487, 0.7218, 0.6129, 0.8420, + 0.1328, 0.0258, 0.3482, 0.2496, 0.9070, 0.1335, 0.8930, + 0.3961, 0.0685, 0.4593, 0.3228, 0.0085, 0.1698, 0.1363, + 0.2353, 0.4054, 0.4337, 0.7557, 0.8715, 0.1886, 0.6545, + 0.5162, 0.7325, 0.3336, 0.6877, 0.8204, 0.5811, 0.3075, + 0.6798, 0.4051, 0.0597, 0.5326, 0.8458, 0.4272, 0.2826, + 0.4719, 0.5396, 0.3388, 0.9973, 0.4187, 0.6234, 0.2698, + 0.3492, 0.8857, 0.1489, 0.1998, 0.2289, 0.4451, 0.0379, + 0.1988, 0.2113, 0.3738, 0.7193, 0.5213, 0.9072, 0.0613, + 0.4005, 0.3523, 0.0709, 0.5596, 0.7335, 0.6383, 0.0887, + 0.5692, 0.4603, 0.6272, 0.2553, 0.8985, 0.3462, 0.0407, + 0.6936, 0.4412, 0.0627, 0.2562, 0.5155, 0.3465, 0.4292, + 0.4385, 0.0812, 0.3872, 0.5207, 0.2559, 0.2581, 0.6221, + 0.7181, 0.1019, 0.8605, 0.1756, 0.2609, 0.7394, 0.4792, + 0.5099, 0.8831, 0.7934, 0.9746, 0.6748, 0.9066, 0.6080, + 0.5057, 0.1054, 0.3619, 0.1974, 0.9928, 0.4111, 0.7540, + 0.7143, 0.9147, 0.9579, 0.7958, 0.4523, 0.7894, 0.2118, + 0.3648, 0.9673, 0.5837, 0.0431, 0.7582, 0.2735, 0.6036, + 0.6216, 0.5076, 0.9183, 0.8897, 0.4081, 0.7880, 0.2381, + 0.5085, 0.3796, 0.6662, 0.3146, 0.0575, 0.2385, 0.6086, + 0.9934, 0.6888, 0.1889, 0.0438, 0.3261, 0.3882, 0.4169, + 0.8627, 0.9997, 0.2070, 0.7356, 0.5145, 0.1752, 0.6555, + 0.6684, 0.9501, 0.6473, 0.8531, 0.7478, 0.1401, 0.2317, + 0.3747, 0.6467, 0.8854, 0.0360, 0.9037, 0.4674, 0.5830, + 0.9597, 0.0900, 0.4875, 0.2138, 0.3988, 0.5880, 0.0152, + 0.7769, 0.9566, 0.4429, 0.9222, 0.4459, 0.5489, 0.2798, + 0.1520, 0.0578, 0.0988, 0.1282, 0.5238, 0.4828, 0.8259, + 0.8455, 0.5457, 0.6118, 0.8302, 0.6716, 0.4292, 0.3306, + 0.7331, 0.1640, 0.1078, 0.2534, 0.3387, 0.7022, 0.6433, + 0.1056, 0.7198, 0.6256, 0.4771, 0.9207, 0.9076, 0.7974, + 0.8755, 0.5354, 0.1002, 0.2943, 0.2911, 0.1894, 0.3903, + 0.1589, 0.3357, 0.6754, 0.9423, 0.7719]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.4104, 0.7044, 0.9040, ..., 0.0726, 0.3479, 0.6465]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.336281061172485 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([1127, 2914, 651, 3868, 3478, 2616, 630, 2816, 2915, + 3943, 3548, 2263, 4542, 4912, 1799, 1521, 4830, 1959, + 565, 4046, 352, 708, 388, 1948, 1601, 580, 4884, + 1273, 2391, 2767, 3934, 3369, 4073, 1550, 863, 4309, + 666, 1592, 2221, 566, 3749, 4816, 269, 465, 352, + 1056, 3923, 2996, 3908, 4028, 1098, 3401, 2466, 323, + 4554, 3723, 4598, 3095, 2628, 4872, 2114, 3081, 3773, + 3425, 1731, 1262, 1917, 2900, 2481, 4272, 4608, 2685, + 4012, 3949, 546, 721, 2719, 2060, 3934, 2047, 319, + 1177, 4368, 590, 919, 2939, 1268, 3254, 3134, 888, + 658, 3560, 3243, 4771, 1870, 2190, 3032, 1145, 3921, + 3093, 240, 195, 4761, 94, 4383, 2739, 425, 1280, + 2618, 2549, 2332, 4924, 2783, 3566, 338, 1395, 3128, + 1333, 3138, 4314, 4739, 2917, 1017, 709, 300, 2533, + 3360, 999, 395, 2920, 889, 1982, 4806, 1821, 1887, + 3776, 1083, 112, 254, 1671, 1524, 3260, 3015, 2718, + 1436, 4393, 4051, 3480, 2230, 4054, 2670, 395, 2759, + 4796, 849, 4168, 1575, 4853, 1261, 4275, 1866, 3556, + 3417, 1020, 4282, 584, 3689, 3874, 1509, 4083, 263, + 1550, 171, 3186, 1466, 1336, 4936, 3512, 2418, 944, + 325, 1694, 930, 2377, 1839, 621, 2680, 2742, 1537, + 4859, 1103, 3522, 1157, 158, 4629, 2357, 873, 4934, + 2882, 1458, 3703, 572, 1916, 2812, 1567, 1471, 1134, + 673, 1170, 2394, 135, 1008, 3492, 716, 2043, 4892, + 1753, 1218, 680, 2404, 2996, 3897, 4680, 298, 3550, + 1169, 883, 1691, 2497, 4937, 4137, 2804, 4987, 4765, + 1784, 3581, 2966, 4679, 4779, 60, 1363, 4249, 709, + 3283, 2433, 962, 3692, 1587, 4377, 2820]), + values=tensor([0.9574, 0.0088, 0.5020, 0.9141, 0.2863, 0.0911, 0.1607, + 0.4081, 0.1489, 0.0577, 0.6602, 0.5319, 0.6687, 0.7359, + 0.7218, 0.5265, 0.2843, 0.5255, 0.7975, 0.9675, 0.4955, + 0.9458, 0.7420, 0.1283, 0.0140, 0.5968, 0.2693, 0.9592, + 0.8530, 0.7750, 0.2021, 0.3487, 0.7218, 0.6129, 0.8420, + 0.1328, 0.0258, 0.3482, 0.2496, 0.9070, 0.1335, 0.8930, + 0.3961, 0.0685, 0.4593, 0.3228, 0.0085, 0.1698, 0.1363, + 0.2353, 0.4054, 0.4337, 0.7557, 0.8715, 0.1886, 0.6545, + 0.5162, 0.7325, 0.3336, 0.6877, 0.8204, 0.5811, 0.3075, + 0.6798, 0.4051, 0.0597, 0.5326, 0.8458, 0.4272, 0.2826, + 0.4719, 0.5396, 0.3388, 0.9973, 0.4187, 0.6234, 0.2698, + 0.3492, 0.8857, 0.1489, 0.1998, 0.2289, 0.4451, 0.0379, + 0.1988, 0.2113, 0.3738, 0.7193, 0.5213, 0.9072, 0.0613, + 0.4005, 0.3523, 0.0709, 0.5596, 0.7335, 0.6383, 0.0887, + 0.5692, 0.4603, 0.6272, 0.2553, 0.8985, 0.3462, 0.0407, + 0.6936, 0.4412, 0.0627, 0.2562, 0.5155, 0.3465, 0.4292, + 0.4385, 0.0812, 0.3872, 0.5207, 0.2559, 0.2581, 0.6221, + 0.7181, 0.1019, 0.8605, 0.1756, 0.2609, 0.7394, 0.4792, + 0.5099, 0.8831, 0.7934, 0.9746, 0.6748, 0.9066, 0.6080, + 0.5057, 0.1054, 0.3619, 0.1974, 0.9928, 0.4111, 0.7540, + 0.7143, 0.9147, 0.9579, 0.7958, 0.4523, 0.7894, 0.2118, + 0.3648, 0.9673, 0.5837, 0.0431, 0.7582, 0.2735, 0.6036, + 0.6216, 0.5076, 0.9183, 0.8897, 0.4081, 0.7880, 0.2381, + 0.5085, 0.3796, 0.6662, 0.3146, 0.0575, 0.2385, 0.6086, + 0.9934, 0.6888, 0.1889, 0.0438, 0.3261, 0.3882, 0.4169, + 0.8627, 0.9997, 0.2070, 0.7356, 0.5145, 0.1752, 0.6555, + 0.6684, 0.9501, 0.6473, 0.8531, 0.7478, 0.1401, 0.2317, + 0.3747, 0.6467, 0.8854, 0.0360, 0.9037, 0.4674, 0.5830, + 0.9597, 0.0900, 0.4875, 0.2138, 0.3988, 0.5880, 0.0152, + 0.7769, 0.9566, 0.4429, 0.9222, 0.4459, 0.5489, 0.2798, + 0.1520, 0.0578, 0.0988, 0.1282, 0.5238, 0.4828, 0.8259, + 0.8455, 0.5457, 0.6118, 0.8302, 0.6716, 0.4292, 0.3306, + 0.7331, 0.1640, 0.1078, 0.2534, 0.3387, 0.7022, 0.6433, + 0.1056, 0.7198, 0.6256, 0.4771, 0.9207, 0.9076, 0.7974, + 0.8755, 0.5354, 0.1002, 0.2943, 0.2911, 0.1894, 0.3903, + 0.1589, 0.3357, 0.6754, 0.9423, 0.7719]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.4104, 0.7044, 0.9040, ..., 0.0726, 0.3479, 0.6465]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.336281061172485 seconds + +[20.72, 20.68, 20.72, 20.88, 21.04, 21.0, 21.04, 20.76, 20.76, 20.8] +[20.88, 21.0, 21.28, 23.68, 24.48, 25.8, 26.4, 26.0, 24.92, 23.92, 24.12, 24.16, 24.24] +13.599457740783691 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 289284, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.336281061172485, 'TIME_S_1KI': 0.035730566022222056, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 302.8508556365967, 'W': 22.269333190276484} +[20.72, 20.68, 20.72, 20.88, 21.04, 21.0, 21.04, 20.76, 20.76, 20.8, 21.04, 20.92, 20.64, 20.48, 20.48, 20.48, 20.68, 20.96, 21.16, 21.16] +374.53999999999996 +18.726999999999997 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 289284, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.336281061172485, 'TIME_S_1KI': 0.035730566022222056, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 302.8508556365967, 'W': 22.269333190276484, 'J_1KI': 1.0468980504853247, 'W_1KI': 0.07698086721103305, 'W_D': 3.542333190276487, 'J_D': 48.17381052494056, 'W_D_1KI': 0.012245174950140648, 'J_D_1KI': 4.232925066765064e-05} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.0001.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.0001.json index e1dfb7e..4c97264 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.0001.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.0001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 6038, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.267276763916016, "TIME_S_1KI": 1.7004433196283564, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 856.7928693008422, "W": 66.02, "J_1KI": 141.9001108481024, "W_1KI": 10.934084133819145, "W_D": 30.906499999999994, "J_D": 401.09767971897116, "W_D_1KI": 5.1186651209009595, "J_D_1KI": 0.8477418219445113} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 6154, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.661418676376343, "TIME_S_1KI": 1.732437223980556, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 888.1215419101716, "W": 66.29, "J_1KI": 144.31614265683646, "W_1KI": 10.771855703607411, "W_D": 31.486250000000005, "J_D": 421.8376361286641, "W_D_1KI": 5.116387715307118, "J_D_1KI": 0.8313922189319334} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.0001.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.0001.output index ba4be77..3c6753a 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.0001.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.0001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '100000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 1.7388477325439453} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 1.7059962749481201} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 13, 28, ..., 999983, - 999988, 1000000]), - col_indices=tensor([ 2300, 3196, 10757, ..., 92248, 95895, 96660]), - values=tensor([0.0937, 0.5944, 0.4639, ..., 0.5292, 0.3684, 0.5963]), +tensor(crow_indices=tensor([ 0, 10, 28, ..., 999981, + 999989, 1000000]), + col_indices=tensor([10839, 13780, 19162, ..., 70763, 71204, 84111]), + values=tensor([0.3862, 0.3703, 0.4692, ..., 0.8959, 0.7094, 0.8230]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.9252, 0.4601, 0.1039, ..., 0.3841, 0.9664, 0.4740]) +tensor([0.6738, 0.0568, 0.8510, ..., 0.5567, 0.5192, 0.1431]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 1.7388477325439453 seconds +Time: 1.7059962749481201 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '6038', '-ss', '100000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.267276763916016} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '6154', '-ss', '100000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.661418676376343} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 25, ..., 999976, - 999990, 1000000]), - col_indices=tensor([ 7337, 9006, 37341, ..., 86240, 86867, 93776]), - values=tensor([0.1177, 0.4165, 0.7590, ..., 0.7494, 0.7065, 0.3766]), +tensor(crow_indices=tensor([ 0, 19, 22, ..., 999983, + 999992, 1000000]), + col_indices=tensor([ 4495, 11307, 13629, ..., 46229, 59792, 89876]), + values=tensor([0.8364, 0.7832, 0.5169, ..., 0.6963, 0.9299, 0.6811]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.1410, 0.1591, 0.3967, ..., 0.8959, 0.7085, 0.3739]) +tensor([0.2491, 0.3919, 0.1225, ..., 0.6201, 0.7425, 0.7393]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,16 +36,16 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 10.267276763916016 seconds +Time: 10.661418676376343 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 25, ..., 999976, - 999990, 1000000]), - col_indices=tensor([ 7337, 9006, 37341, ..., 86240, 86867, 93776]), - values=tensor([0.1177, 0.4165, 0.7590, ..., 0.7494, 0.7065, 0.3766]), +tensor(crow_indices=tensor([ 0, 19, 22, ..., 999983, + 999992, 1000000]), + col_indices=tensor([ 4495, 11307, 13629, ..., 46229, 59792, 89876]), + values=tensor([0.8364, 0.7832, 0.5169, ..., 0.6963, 0.9299, 0.6811]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.1410, 0.1591, 0.3967, ..., 0.8959, 0.7085, 0.3739]) +tensor([0.2491, 0.3919, 0.1225, ..., 0.6201, 0.7425, 0.7393]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -53,13 +53,13 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 10.267276763916016 seconds +Time: 10.661418676376343 seconds -[39.74, 38.35, 38.46, 38.36, 38.8, 38.69, 39.25, 38.82, 38.81, 38.76] -[66.02] -12.977777481079102 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 6038, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.267276763916016, 'TIME_S_1KI': 1.7004433196283564, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 856.7928693008422, 'W': 66.02} -[39.74, 38.35, 38.46, 38.36, 38.8, 38.69, 39.25, 38.82, 38.81, 38.76, 39.02, 38.96, 38.6, 38.78, 38.41, 38.79, 38.39, 38.7, 43.94, 38.8] -702.27 -35.1135 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 6038, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.267276763916016, 'TIME_S_1KI': 1.7004433196283564, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 856.7928693008422, 'W': 66.02, 'J_1KI': 141.9001108481024, 'W_1KI': 10.934084133819145, 'W_D': 30.906499999999994, 'J_D': 401.09767971897116, 'W_D_1KI': 5.1186651209009595, 'J_D_1KI': 0.8477418219445113} +[39.48, 39.2, 39.2, 38.44, 38.38, 38.41, 38.43, 38.34, 38.9, 38.82] +[66.29] +13.3975191116333 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 6154, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.661418676376343, 'TIME_S_1KI': 1.732437223980556, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 888.1215419101716, 'W': 66.29} +[39.48, 39.2, 39.2, 38.44, 38.38, 38.41, 38.43, 38.34, 38.9, 38.82, 39.78, 38.22, 38.35, 38.27, 38.77, 38.41, 38.95, 38.84, 38.8, 38.25] +696.075 +34.80375 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 6154, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.661418676376343, 'TIME_S_1KI': 1.732437223980556, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 888.1215419101716, 'W': 66.29, 'J_1KI': 144.31614265683646, 'W_1KI': 10.771855703607411, 'W_D': 31.486250000000005, 'J_D': 421.8376361286641, 'W_D_1KI': 5.116387715307118, 'J_D_1KI': 0.8313922189319334} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.001.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.001.json new file mode 100644 index 0000000..28fc497 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 16.51292133331299, "TIME_S_1KI": 16.51292133331299, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1675.715212612152, "W": 77.36, "J_1KI": 1675.715212612152, "W_1KI": 77.36, "W_D": 42.0475, "J_D": 910.8019054073095, "W_D_1KI": 42.0475, "J_D_1KI": 42.0475} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.001.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.001.output new file mode 100644 index 0000000..d1f2742 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.001.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '100000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 16.51292133331299} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 85, 184, ..., 9999802, + 9999894, 10000000]), + col_indices=tensor([ 1647, 2383, 2584, ..., 98263, 98777, 99734]), + values=tensor([0.1681, 0.5843, 0.2619, ..., 0.7600, 0.0011, 0.9501]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.3789, 0.7363, 0.6915, ..., 0.8879, 0.6465, 0.7586]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 16.51292133331299 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 85, 184, ..., 9999802, + 9999894, 10000000]), + col_indices=tensor([ 1647, 2383, 2584, ..., 98263, 98777, 99734]), + values=tensor([0.1681, 0.5843, 0.2619, ..., 0.7600, 0.0011, 0.9501]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.3789, 0.7363, 0.6915, ..., 0.8879, 0.6465, 0.7586]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 16.51292133331299 seconds + +[39.67, 38.57, 40.75, 44.07, 38.52, 39.19, 39.95, 38.76, 38.89, 38.37] +[77.36] +21.661261796951294 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 16.51292133331299, 'TIME_S_1KI': 16.51292133331299, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1675.715212612152, 'W': 77.36} +[39.67, 38.57, 40.75, 44.07, 38.52, 39.19, 39.95, 38.76, 38.89, 38.37, 39.54, 38.41, 38.76, 38.62, 39.0, 38.84, 38.88, 38.44, 38.6, 38.42] +706.25 +35.3125 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 16.51292133331299, 'TIME_S_1KI': 16.51292133331299, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1675.715212612152, 'W': 77.36, 'J_1KI': 1675.715212612152, 'W_1KI': 77.36, 'W_D': 42.0475, 'J_D': 910.8019054073095, 'W_D_1KI': 42.0475, 'J_D_1KI': 42.0475} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_1e-05.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_1e-05.json index fbd54c6..8f7419f 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_1e-05.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_1e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 12169, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.374937295913696, "TIME_S_1KI": 0.8525710654871967, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 840.8859812259674, "W": 64.58, "J_1KI": 69.10066408299511, "W_1KI": 5.306927438573425, "W_D": 29.637499999999996, "J_D": 385.90520700812334, "W_D_1KI": 2.4354918234859064, "J_D_1KI": 0.20013902732236885} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 12077, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.322007656097412, "TIME_S_1KI": 0.8546830881922176, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 838.5464090538024, "W": 64.47, "J_1KI": 69.43333684307382, "W_1KI": 5.3382462532085775, "W_D": 29.621750000000006, "J_D": 385.28326496648793, "W_D_1KI": 2.452740746874224, "J_D_1KI": 0.20309188928328428} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_1e-05.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_1e-05.output index cc678f8..d689d8c 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_1e-05.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_1e-05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '100000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.8628060817718506} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.8693974018096924} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 99999, 100000, +tensor(crow_indices=tensor([ 0, 1, 1, ..., 99998, 100000, 100000]), - col_indices=tensor([15542, 51530, 32014, ..., 17183, 69417, 75150]), - values=tensor([0.6948, 0.1030, 0.8530, ..., 0.6511, 0.2631, 0.7718]), + col_indices=tensor([57795, 90642, 37628, ..., 28610, 559, 98027]), + values=tensor([0.1696, 0.5341, 0.5606, ..., 0.7529, 0.5749, 0.6066]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.3720, 0.8026, 0.8839, ..., 0.1725, 0.9607, 0.0788]) +tensor([0.7238, 0.7083, 0.7900, ..., 0.2093, 0.5825, 0.4482]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 0.8628060817718506 seconds +Time: 0.8693974018096924 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '12169', '-ss', '100000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.374937295913696} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '12077', '-ss', '100000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.322007656097412} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 2, ..., 99998, 99998, +tensor(crow_indices=tensor([ 0, 0, 0, ..., 100000, 100000, 100000]), - col_indices=tensor([38450, 44184, 11395, ..., 3206, 2272, 42747]), - values=tensor([0.8156, 0.6388, 0.3060, ..., 0.5932, 0.6977, 0.4008]), + col_indices=tensor([ 3486, 41765, 3206, ..., 33238, 50080, 42417]), + values=tensor([0.6049, 0.2829, 0.2416, ..., 0.9238, 0.5292, 0.5723]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.7706, 0.5998, 0.9728, ..., 0.9827, 0.6551, 0.5654]) +tensor([0.4597, 0.0749, 0.9185, ..., 0.4582, 0.2319, 0.2322]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,16 +36,16 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 10.374937295913696 seconds +Time: 10.322007656097412 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 2, ..., 99998, 99998, +tensor(crow_indices=tensor([ 0, 0, 0, ..., 100000, 100000, 100000]), - col_indices=tensor([38450, 44184, 11395, ..., 3206, 2272, 42747]), - values=tensor([0.8156, 0.6388, 0.3060, ..., 0.5932, 0.6977, 0.4008]), + col_indices=tensor([ 3486, 41765, 3206, ..., 33238, 50080, 42417]), + values=tensor([0.6049, 0.2829, 0.2416, ..., 0.9238, 0.5292, 0.5723]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.7706, 0.5998, 0.9728, ..., 0.9827, 0.6551, 0.5654]) +tensor([0.4597, 0.0749, 0.9185, ..., 0.4582, 0.2319, 0.2322]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -53,13 +53,13 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 10.374937295913696 seconds +Time: 10.322007656097412 seconds -[39.65, 38.92, 38.96, 38.86, 38.63, 38.6, 38.47, 38.43, 38.43, 38.83] -[64.58] -13.0208420753479 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 12169, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.374937295913696, 'TIME_S_1KI': 0.8525710654871967, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 840.8859812259674, 'W': 64.58} -[39.65, 38.92, 38.96, 38.86, 38.63, 38.6, 38.47, 38.43, 38.43, 38.83, 40.06, 39.01, 39.04, 38.86, 38.59, 38.53, 38.46, 38.45, 40.01, 38.66] -698.85 -34.9425 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 12169, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.374937295913696, 'TIME_S_1KI': 0.8525710654871967, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 840.8859812259674, 'W': 64.58, 'J_1KI': 69.10066408299511, 'W_1KI': 5.306927438573425, 'W_D': 29.637499999999996, 'J_D': 385.90520700812334, 'W_D_1KI': 2.4354918234859064, 'J_D_1KI': 0.20013902732236885} +[39.73, 38.57, 38.91, 38.66, 38.65, 38.36, 38.94, 39.73, 38.42, 38.47] +[64.47] +13.006769180297852 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 12077, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.322007656097412, 'TIME_S_1KI': 0.8546830881922176, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 838.5464090538024, 'W': 64.47} +[39.73, 38.57, 38.91, 38.66, 38.65, 38.36, 38.94, 39.73, 38.42, 38.47, 39.26, 38.5, 38.85, 38.41, 38.98, 38.35, 38.75, 38.41, 38.37, 38.75] +696.9649999999999 +34.84824999999999 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 12077, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.322007656097412, 'TIME_S_1KI': 0.8546830881922176, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 838.5464090538024, 'W': 64.47, 'J_1KI': 69.43333684307382, 'W_1KI': 5.3382462532085775, 'W_D': 29.621750000000006, 'J_D': 385.28326496648793, 'W_D_1KI': 2.452740746874224, 'J_D_1KI': 0.20309188928328428} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.0001.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.0001.json index 3886b8d..9dc0eed 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.0001.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.0001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 237950, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.479950666427612, "TIME_S_1KI": 0.044042658820876705, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 847.0966372108459, "W": 64.88, "J_1KI": 3.5599774625377005, "W_1KI": 0.2726623240176507, "W_D": 29.35949999999999, "J_D": 383.32820160591587, "W_D_1KI": 0.12338516495061984, "J_D_1KI": 0.0005185339985317077} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 238697, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.480466604232788, "TIME_S_1KI": 0.04390698921324017, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1152.02081230402, "W": 58.89, "J_1KI": 4.826289447726699, "W_1KI": 0.24671445388924035, "W_D": 23.781499999999994, "J_D": 465.21961195123185, "W_D_1KI": 0.09963049388974303, "J_D_1KI": 0.0004173931548772839} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.0001.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.0001.output index 4b3b779..9b8fd23 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.0001.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.0001.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.055043935775756836} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.05536341667175293} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 9999, 10000, 10000]), - col_indices=tensor([2489, 8082, 1798, ..., 7687, 8784, 7173]), - values=tensor([0.0419, 0.2217, 0.5372, ..., 0.9380, 0.6037, 0.5878]), +tensor(crow_indices=tensor([ 0, 1, 3, ..., 9998, 9998, 10000]), + col_indices=tensor([8403, 1214, 9126, ..., 1351, 3891, 9766]), + values=tensor([0.6664, 0.5402, 0.6356, ..., 0.4443, 0.7393, 0.7343]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.6995, 0.5522, 0.6987, ..., 0.2479, 0.0646, 0.0677]) +tensor([0.3881, 0.9820, 0.4323, ..., 0.4549, 0.5025, 0.0926]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -15,18 +15,18 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 0.055043935775756836 seconds +Time: 0.05536341667175293 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '190756', '-ss', '10000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 8.417458295822144} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '189655', '-ss', '10000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 8.342687606811523} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 2, ..., 10000, 10000, 10000]), - col_indices=tensor([6369, 8699, 1454, ..., 3376, 4538, 4463]), - values=tensor([0.7752, 0.1565, 0.1050, ..., 0.8742, 0.0228, 0.3625]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 9997, 9997, 10000]), + col_indices=tensor([1328, 2584, 2989, ..., 4729, 4835, 7640]), + values=tensor([0.4337, 0.1976, 0.1440, ..., 0.2725, 0.2860, 0.2817]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.2878, 0.2325, 0.9670, ..., 0.8581, 0.8156, 0.4801]) +tensor([0.7295, 0.2766, 0.3418, ..., 0.0114, 0.7550, 0.8307]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -34,18 +34,18 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 8.417458295822144 seconds +Time: 8.342687606811523 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '237950', '-ss', '10000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.479950666427612} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '238697', '-ss', '10000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.480466604232788} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 9997, 9998, 10000]), - col_indices=tensor([2568, 4888, 9428, ..., 1921, 2148, 9872]), - values=tensor([0.1473, 0.4194, 0.4025, ..., 0.4119, 0.3062, 0.3667]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 9996, 9997, 10000]), + col_indices=tensor([9286, 7396, 732, ..., 1484, 5299, 9027]), + values=tensor([0.3440, 0.6043, 0.5062, ..., 0.2355, 0.1186, 0.4561]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.9989, 0.1547, 0.2140, ..., 0.5569, 0.3690, 0.8580]) +tensor([0.0996, 0.8226, 0.2068, ..., 0.2572, 0.9962, 0.0083]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,15 +53,15 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 10.479950666427612 seconds +Time: 10.480466604232788 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 9997, 9998, 10000]), - col_indices=tensor([2568, 4888, 9428, ..., 1921, 2148, 9872]), - values=tensor([0.1473, 0.4194, 0.4025, ..., 0.4119, 0.3062, 0.3667]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 9996, 9997, 10000]), + col_indices=tensor([9286, 7396, 732, ..., 1484, 5299, 9027]), + values=tensor([0.3440, 0.6043, 0.5062, ..., 0.2355, 0.1186, 0.4561]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.9989, 0.1547, 0.2140, ..., 0.5569, 0.3690, 0.8580]) +tensor([0.0996, 0.8226, 0.2068, ..., 0.2572, 0.9962, 0.0083]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -69,13 +69,13 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 10.479950666427612 seconds +Time: 10.480466604232788 seconds -[40.47, 39.33, 38.5, 38.74, 38.63, 38.5, 43.74, 38.79, 39.18, 38.53] -[64.88] -13.056360006332397 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 237950, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.479950666427612, 'TIME_S_1KI': 0.044042658820876705, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 847.0966372108459, 'W': 64.88} -[40.47, 39.33, 38.5, 38.74, 38.63, 38.5, 43.74, 38.79, 39.18, 38.53, 39.54, 38.39, 38.83, 43.63, 38.88, 38.94, 39.01, 39.89, 38.82, 38.68] -710.4100000000001 -35.520500000000006 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 237950, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.479950666427612, 'TIME_S_1KI': 0.044042658820876705, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 847.0966372108459, 'W': 64.88, 'J_1KI': 3.5599774625377005, 'W_1KI': 0.2726623240176507, 'W_D': 29.35949999999999, 'J_D': 383.32820160591587, 'W_D_1KI': 0.12338516495061984, 'J_D_1KI': 0.0005185339985317077} +[40.64, 38.78, 39.02, 39.34, 38.69, 38.38, 38.42, 38.49, 38.47, 38.38] +[58.89] +19.562248468399048 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 238697, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.480466604232788, 'TIME_S_1KI': 0.04390698921324017, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1152.02081230402, 'W': 58.89} +[40.64, 38.78, 39.02, 39.34, 38.69, 38.38, 38.42, 38.49, 38.47, 38.38, 44.89, 39.96, 38.48, 39.7, 39.01, 38.35, 38.7, 38.38, 38.58, 38.93] +702.1700000000001 +35.10850000000001 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 238697, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.480466604232788, 'TIME_S_1KI': 0.04390698921324017, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1152.02081230402, 'W': 58.89, 'J_1KI': 4.826289447726699, 'W_1KI': 0.24671445388924035, 'W_D': 23.781499999999994, 'J_D': 465.21961195123185, 'W_D_1KI': 0.09963049388974303, 'J_D_1KI': 0.0004173931548772839} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.001.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.001.json index 4c5c61e..80c52ca 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.001.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 75505, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 11.45979928970337, "TIME_S_1KI": 0.15177536970668656, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 869.2745451879501, "W": 66.09, "J_1KI": 11.512807697343886, "W_1KI": 0.8753062711078737, "W_D": 31.095250000000007, "J_D": 408.9924239863158, "W_D_1KI": 0.4118303423614331, "J_D_1KI": 0.00545434530642253} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 75618, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.541181087493896, "TIME_S_1KI": 0.13940042169184447, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 940.5316159009933, "W": 66.47, "J_1KI": 12.437932977611062, "W_1KI": 0.8790235129202042, "W_D": 31.600500000000004, "J_D": 447.1380973112584, "W_D_1KI": 0.41789653257160997, "J_D_1KI": 0.005526416098965987} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.001.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.001.output index afe840d..9b49c17 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.001.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '0.001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.15292811393737793} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.16539597511291504} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 10, ..., 99983, 99994, +tensor(crow_indices=tensor([ 0, 9, 17, ..., 99980, 99988, 100000]), - col_indices=tensor([ 267, 3923, 5616, ..., 4271, 7755, 9973]), - values=tensor([0.9283, 0.7846, 0.2151, ..., 0.9447, 0.6120, 0.1119]), + col_indices=tensor([2312, 2519, 3298, ..., 9035, 9400, 9910]), + values=tensor([0.1410, 0.2218, 0.1849, ..., 0.4652, 0.0649, 0.3640]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.9192, 0.5849, 0.9579, ..., 0.9586, 0.7879, 0.6201]) +tensor([0.4363, 0.0084, 0.9005, ..., 0.6999, 0.4782, 0.9424]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 0.15292811393737793 seconds +Time: 0.16539597511291504 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '68659', '-ss', '10000', '-sd', '0.001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 9.547868490219116} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '63484', '-ss', '10000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 8.8150315284729} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 13, 21, ..., 99972, 99985, +tensor(crow_indices=tensor([ 0, 11, 21, ..., 99981, 99989, 100000]), - col_indices=tensor([ 305, 380, 962, ..., 8769, 9180, 9915]), - values=tensor([0.5782, 0.8638, 0.8069, ..., 0.1223, 0.7033, 0.9891]), + col_indices=tensor([ 457, 1232, 2417, ..., 8600, 9856, 9966]), + values=tensor([0.5653, 0.7705, 0.0640, ..., 0.9989, 0.3761, 0.2052]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.4135, 0.4566, 0.8532, ..., 0.7837, 0.5944, 0.7679]) +tensor([0.7731, 0.4840, 0.8355, ..., 0.4086, 0.2552, 0.3939]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,19 +36,19 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 9.547868490219116 seconds +Time: 8.8150315284729 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '75505', '-ss', '10000', '-sd', '0.001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 11.45979928970337} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '75618', '-ss', '10000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.541181087493896} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 12, 21, ..., 99982, 99992, +tensor(crow_indices=tensor([ 0, 8, 15, ..., 99981, 99989, 100000]), - col_indices=tensor([1022, 1138, 1407, ..., 6223, 7233, 9402]), - values=tensor([0.9484, 0.5958, 0.7782, ..., 0.0863, 0.6723, 0.0562]), + col_indices=tensor([ 812, 4021, 6538, ..., 8729, 9196, 9676]), + values=tensor([0.8795, 0.6481, 0.9606, ..., 0.0277, 0.7911, 0.3727]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.0389, 0.1147, 0.5260, ..., 0.1033, 0.1694, 0.2810]) +tensor([0.1908, 0.4704, 0.2059, ..., 0.1529, 0.3275, 0.9276]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -56,16 +56,16 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 11.45979928970337 seconds +Time: 10.541181087493896 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 12, 21, ..., 99982, 99992, +tensor(crow_indices=tensor([ 0, 8, 15, ..., 99981, 99989, 100000]), - col_indices=tensor([1022, 1138, 1407, ..., 6223, 7233, 9402]), - values=tensor([0.9484, 0.5958, 0.7782, ..., 0.0863, 0.6723, 0.0562]), + col_indices=tensor([ 812, 4021, 6538, ..., 8729, 9196, 9676]), + values=tensor([0.8795, 0.6481, 0.9606, ..., 0.0277, 0.7911, 0.3727]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.0389, 0.1147, 0.5260, ..., 0.1033, 0.1694, 0.2810]) +tensor([0.1908, 0.4704, 0.2059, ..., 0.1529, 0.3275, 0.9276]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -73,13 +73,13 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 11.45979928970337 seconds +Time: 10.541181087493896 seconds -[39.42, 38.37, 38.5, 39.45, 39.07, 38.36, 39.56, 38.91, 39.08, 38.45] -[66.09] -13.152890682220459 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 75505, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 11.45979928970337, 'TIME_S_1KI': 0.15177536970668656, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 869.2745451879501, 'W': 66.09} -[39.42, 38.37, 38.5, 39.45, 39.07, 38.36, 39.56, 38.91, 39.08, 38.45, 39.1, 38.46, 38.46, 39.62, 38.45, 39.31, 38.8, 38.73, 38.89, 38.78] -699.895 -34.994749999999996 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 75505, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 11.45979928970337, 'TIME_S_1KI': 0.15177536970668656, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 869.2745451879501, 'W': 66.09, 'J_1KI': 11.512807697343886, 'W_1KI': 0.8753062711078737, 'W_D': 31.095250000000007, 'J_D': 408.9924239863158, 'W_D_1KI': 0.4118303423614331, 'J_D_1KI': 0.00545434530642253} +[38.98, 38.52, 38.36, 38.43, 38.45, 38.57, 38.52, 38.55, 38.55, 38.4] +[66.47] +14.149715900421143 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 75618, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.541181087493896, 'TIME_S_1KI': 0.13940042169184447, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 940.5316159009933, 'W': 66.47} +[38.98, 38.52, 38.36, 38.43, 38.45, 38.57, 38.52, 38.55, 38.55, 38.4, 39.0, 38.48, 38.51, 39.07, 38.73, 38.62, 38.94, 38.66, 38.36, 43.76] +697.3899999999999 +34.869499999999995 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 75618, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.541181087493896, 'TIME_S_1KI': 0.13940042169184447, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 940.5316159009933, 'W': 66.47, 'J_1KI': 12.437932977611062, 'W_1KI': 0.8790235129202042, 'W_D': 31.600500000000004, 'J_D': 447.1380973112584, 'W_D_1KI': 0.41789653257160997, 'J_D_1KI': 0.005526416098965987} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.01.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.01.json index 4467311..e580351 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.01.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.01.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 10051, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.396085023880005, "TIME_S_1KI": 1.0343334020376087, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 865.9663576483727, "W": 66.05, "J_1KI": 86.1572338720896, "W_1KI": 6.571485424335887, "W_D": 30.772, "J_D": 403.44461404323573, "W_D_1KI": 3.061585911849567, "J_D_1KI": 0.3046051051486983} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 10094, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.424894571304321, "TIME_S_1KI": 1.0327813127902044, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 874.2127872061731, "W": 66.43, "J_1KI": 86.60717131030047, "W_1KI": 6.581137309292649, "W_D": 31.351250000000007, "J_D": 412.5796122971178, "W_D_1KI": 3.1059292649098484, "J_D_1KI": 0.3077005414018078} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.01.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.01.output index ec9215b..c679aff 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.01.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.01.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '0.01', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 1.0446221828460693} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 1.040170669555664} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 93, 186, ..., 999791, - 999885, 1000000]), - col_indices=tensor([ 85, 646, 706, ..., 9852, 9875, 9886]), - values=tensor([0.7433, 0.1282, 0.1316, ..., 0.9681, 0.9495, 0.6187]), +tensor(crow_indices=tensor([ 0, 84, 184, ..., 999814, + 999899, 1000000]), + col_indices=tensor([ 171, 251, 472, ..., 9843, 9880, 9941]), + values=tensor([0.4805, 0.3615, 0.2747, ..., 0.6607, 0.4074, 0.0301]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.5006, 0.3207, 0.7634, ..., 0.1693, 0.2023, 0.9705]) +tensor([0.4780, 0.2256, 0.5818, ..., 0.3209, 0.4621, 0.5747]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 1.0446221828460693 seconds +Time: 1.040170669555664 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '10051', '-ss', '10000', '-sd', '0.01', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.396085023880005} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '10094', '-ss', '10000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.424894571304321} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 82, 194, ..., 999830, - 999924, 1000000]), - col_indices=tensor([ 207, 248, 391, ..., 9735, 9842, 9886]), - values=tensor([0.2382, 0.1304, 0.8275, ..., 0.9132, 0.3101, 0.1677]), +tensor(crow_indices=tensor([ 0, 106, 205, ..., 999816, + 999911, 1000000]), + col_indices=tensor([ 83, 89, 669, ..., 9640, 9974, 9983]), + values=tensor([0.6432, 0.8453, 0.7190, ..., 0.8302, 0.0770, 0.7390]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.0491, 0.4304, 0.0195, ..., 0.4012, 0.5324, 0.0059]) +tensor([0.4670, 0.3263, 0.5346, ..., 0.9779, 0.3626, 0.9957]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,16 +36,16 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 10.396085023880005 seconds +Time: 10.424894571304321 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 82, 194, ..., 999830, - 999924, 1000000]), - col_indices=tensor([ 207, 248, 391, ..., 9735, 9842, 9886]), - values=tensor([0.2382, 0.1304, 0.8275, ..., 0.9132, 0.3101, 0.1677]), +tensor(crow_indices=tensor([ 0, 106, 205, ..., 999816, + 999911, 1000000]), + col_indices=tensor([ 83, 89, 669, ..., 9640, 9974, 9983]), + values=tensor([0.6432, 0.8453, 0.7190, ..., 0.8302, 0.0770, 0.7390]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.0491, 0.4304, 0.0195, ..., 0.4012, 0.5324, 0.0059]) +tensor([0.4670, 0.3263, 0.5346, ..., 0.9779, 0.3626, 0.9957]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,13 +53,13 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 10.396085023880005 seconds +Time: 10.424894571304321 seconds -[40.88, 38.45, 38.83, 38.72, 44.02, 38.37, 38.57, 38.7, 39.57, 38.48] -[66.05] -13.110769987106323 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 10051, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.396085023880005, 'TIME_S_1KI': 1.0343334020376087, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 865.9663576483727, 'W': 66.05} -[40.88, 38.45, 38.83, 38.72, 44.02, 38.37, 38.57, 38.7, 39.57, 38.48, 42.6, 39.8, 38.39, 38.73, 39.03, 38.72, 38.63, 38.42, 38.48, 38.3] -705.56 -35.278 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 10051, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.396085023880005, 'TIME_S_1KI': 1.0343334020376087, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 865.9663576483727, 'W': 66.05, 'J_1KI': 86.1572338720896, 'W_1KI': 6.571485424335887, 'W_D': 30.772, 'J_D': 403.44461404323573, 'W_D_1KI': 3.061585911849567, 'J_D_1KI': 0.3046051051486983} +[39.35, 40.11, 43.57, 38.24, 39.46, 38.59, 38.37, 38.38, 38.28, 38.71] +[66.43] +13.15990948677063 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 10094, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.424894571304321, 'TIME_S_1KI': 1.0327813127902044, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 874.2127872061731, 'W': 66.43} +[39.35, 40.11, 43.57, 38.24, 39.46, 38.59, 38.37, 38.38, 38.28, 38.71, 40.11, 38.48, 38.37, 38.48, 38.37, 38.22, 38.39, 39.48, 38.33, 38.74] +701.575 +35.07875 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 10094, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.424894571304321, 'TIME_S_1KI': 1.0327813127902044, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 874.2127872061731, 'W': 66.43, 'J_1KI': 86.60717131030047, 'W_1KI': 6.581137309292649, 'W_D': 31.351250000000007, 'J_D': 412.5796122971178, 'W_D_1KI': 3.1059292649098484, 'J_D_1KI': 0.3077005414018078} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.05.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.05.json index 9a92bbf..73d3b47 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.05.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 1760, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.496549844741821, "TIME_S_1KI": 5.963948775421489, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1063.6957297325134, "W": 75.3, "J_1KI": 604.3725737116553, "W_1KI": 42.784090909090914, "W_D": 40.305749999999996, "J_D": 569.3632690393924, "W_D_1KI": 22.900994318181816, "J_D_1KI": 13.011928589876032} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 1758, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.496397256851196, "TIME_S_1KI": 5.970646903783388, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1131.7024735164641, "W": 74.58, "J_1KI": 643.7442966532789, "W_1KI": 42.42320819112628, "W_D": 39.617, "J_D": 601.1619320635796, "W_D_1KI": 22.535267349260522, "J_D_1KI": 12.818695875574814} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.05.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.05.output index f82f04e..78fc6b7 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.05.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '0.05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 5.962578296661377} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 5.972491502761841} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 507, 983, ..., 4998985, - 4999482, 5000000]), - col_indices=tensor([ 6, 14, 63, ..., 9975, 9976, 9988]), - values=tensor([0.1343, 0.9147, 0.2964, ..., 0.8307, 0.6480, 0.1778]), +tensor(crow_indices=tensor([ 0, 519, 993, ..., 4998959, + 4999496, 5000000]), + col_indices=tensor([ 17, 61, 73, ..., 9901, 9911, 9920]), + values=tensor([0.3098, 0.8299, 0.3979, ..., 0.3415, 0.7398, 0.5378]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.4820, 0.9526, 0.2470, ..., 0.0414, 0.1724, 0.7388]) +tensor([0.6888, 0.9764, 0.3608, ..., 0.4208, 0.9222, 0.1586]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 5.962578296661377 seconds +Time: 5.972491502761841 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1760', '-ss', '10000', '-sd', '0.05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.496549844741821} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1758', '-ss', '10000', '-sd', '0.05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.496397256851196} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 494, 1024, ..., 4999026, - 4999505, 5000000]), - col_indices=tensor([ 14, 81, 111, ..., 9976, 9994, 9996]), - values=tensor([0.8750, 0.2097, 0.6973, ..., 0.7142, 0.2835, 0.0523]), +tensor(crow_indices=tensor([ 0, 496, 998, ..., 4999016, + 4999498, 5000000]), + col_indices=tensor([ 25, 29, 69, ..., 9894, 9911, 9997]), + values=tensor([0.8031, 0.3187, 0.9076, ..., 0.2949, 0.8412, 0.6618]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.6644, 0.2536, 0.5514, ..., 0.5924, 0.6712, 0.0391]) +tensor([0.4316, 0.0196, 0.7556, ..., 0.1123, 0.7172, 0.6330]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,16 +36,16 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 10.496549844741821 seconds +Time: 10.496397256851196 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 494, 1024, ..., 4999026, - 4999505, 5000000]), - col_indices=tensor([ 14, 81, 111, ..., 9976, 9994, 9996]), - values=tensor([0.8750, 0.2097, 0.6973, ..., 0.7142, 0.2835, 0.0523]), +tensor(crow_indices=tensor([ 0, 496, 998, ..., 4999016, + 4999498, 5000000]), + col_indices=tensor([ 25, 29, 69, ..., 9894, 9911, 9997]), + values=tensor([0.8031, 0.3187, 0.9076, ..., 0.2949, 0.8412, 0.6618]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.6644, 0.2536, 0.5514, ..., 0.5924, 0.6712, 0.0391]) +tensor([0.4316, 0.0196, 0.7556, ..., 0.1123, 0.7172, 0.6330]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,13 +53,13 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 10.496549844741821 seconds +Time: 10.496397256851196 seconds -[39.19, 39.0, 39.0, 38.91, 38.55, 38.97, 38.49, 38.52, 38.9, 38.82] -[75.3] -14.126105308532715 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1760, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.496549844741821, 'TIME_S_1KI': 5.963948775421489, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1063.6957297325134, 'W': 75.3} -[39.19, 39.0, 39.0, 38.91, 38.55, 38.97, 38.49, 38.52, 38.9, 38.82, 39.55, 38.59, 39.72, 38.89, 38.53, 38.82, 38.62, 39.33, 38.84, 38.85] -699.885 -34.99425 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1760, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.496549844741821, 'TIME_S_1KI': 5.963948775421489, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1063.6957297325134, 'W': 75.3, 'J_1KI': 604.3725737116553, 'W_1KI': 42.784090909090914, 'W_D': 40.305749999999996, 'J_D': 569.3632690393924, 'W_D_1KI': 22.900994318181816, 'J_D_1KI': 13.011928589876032} +[39.62, 38.84, 38.53, 38.4, 38.53, 38.95, 38.67, 38.93, 38.95, 38.34] +[74.58] +15.174342632293701 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1758, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.496397256851196, 'TIME_S_1KI': 5.970646903783388, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1131.7024735164641, 'W': 74.58} +[39.62, 38.84, 38.53, 38.4, 38.53, 38.95, 38.67, 38.93, 38.95, 38.34, 39.14, 38.64, 38.84, 38.7, 38.86, 38.26, 38.63, 38.26, 38.4, 44.64] +699.26 +34.963 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1758, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.496397256851196, 'TIME_S_1KI': 5.970646903783388, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1131.7024735164641, 'W': 74.58, 'J_1KI': 643.7442966532789, 'W_1KI': 42.42320819112628, 'W_D': 39.617, 'J_D': 601.1619320635796, 'W_D_1KI': 22.535267349260522, 'J_D_1KI': 12.818695875574814} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.1.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.1.json new file mode 100644 index 0000000..6bba238 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 14.862756252288818, "TIME_S_1KI": 14.862756252288818, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1509.8221988201142, "W": 77.42, "J_1KI": 1509.8221988201142, "W_1KI": 77.42, "W_D": 42.230500000000006, "J_D": 823.5668608534337, "W_D_1KI": 42.230500000000006, "J_D_1KI": 42.230500000000006} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.1.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.1.output new file mode 100644 index 0000000..550e30b --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.1.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '0.1', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 14.862756252288818} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1022, 2054, ..., 9998064, + 9999016, 10000000]), + col_indices=tensor([ 8, 12, 13, ..., 9969, 9975, 9983]), + values=tensor([0.6048, 0.0895, 0.3093, ..., 0.2729, 0.9589, 0.2791]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.8686, 0.6857, 0.7903, ..., 0.7591, 0.3670, 0.6215]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 14.862756252288818 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1022, 2054, ..., 9998064, + 9999016, 10000000]), + col_indices=tensor([ 8, 12, 13, ..., 9969, 9975, 9983]), + values=tensor([0.6048, 0.0895, 0.3093, ..., 0.2729, 0.9589, 0.2791]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.8686, 0.6857, 0.7903, ..., 0.7591, 0.3670, 0.6215]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 14.862756252288818 seconds + +[44.78, 38.35, 39.08, 38.71, 38.47, 39.2, 39.52, 39.83, 39.49, 40.27] +[77.42] +19.501707553863525 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 14.862756252288818, 'TIME_S_1KI': 14.862756252288818, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1509.8221988201142, 'W': 77.42} +[44.78, 38.35, 39.08, 38.71, 38.47, 39.2, 39.52, 39.83, 39.49, 40.27, 40.17, 38.57, 38.51, 38.69, 38.5, 38.97, 38.48, 38.73, 38.88, 38.4] +703.79 +35.189499999999995 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 14.862756252288818, 'TIME_S_1KI': 14.862756252288818, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1509.8221988201142, 'W': 77.42, 'J_1KI': 1509.8221988201142, 'W_1KI': 77.42, 'W_D': 42.230500000000006, 'J_D': 823.5668608534337, 'W_D_1KI': 42.230500000000006, 'J_D_1KI': 42.230500000000006} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_1e-05.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_1e-05.json index 5067ef9..721cd64 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_1e-05.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_1e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 363782, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.585279941558838, "TIME_S_1KI": 0.029097866143896176, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 855.3318356752395, "W": 64.69, "J_1KI": 2.3512208841428097, "W_1KI": 0.17782628057462985, "W_D": 29.81474999999999, "J_D": 394.2109266918896, "W_D_1KI": 0.08195773842576046, "J_D_1KI": 0.00022529355060382441} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 361507, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.549095392227173, "TIME_S_1KI": 0.029180888315377497, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 862.0974852204322, "W": 65.27, "J_1KI": 2.3847324815852313, "W_1KI": 0.18054975422329303, "W_D": 30.177249999999994, "J_D": 398.58635415762654, "W_D_1KI": 0.08347625357185336, "J_D_1KI": 0.0002309118594435332} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_1e-05.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_1e-05.output index 198dd99..4ba6f6a 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_1e-05.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_1e-05.output @@ -1,373 +1,266 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.08438587188720703} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.043045759201049805} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([ 654, 6772, 3481, 2535, 125, 3792, 8070, 9757, 6184, - 2668, 22, 1611, 8038, 5477, 9185, 5993, 9592, 3939, - 8639, 7553, 398, 5715, 4399, 2570, 9973, 3035, 1537, - 7004, 5611, 9891, 2622, 9818, 312, 6105, 1848, 1339, - 7382, 4214, 8955, 9892, 6474, 3909, 9732, 690, 3371, - 4869, 387, 3460, 9149, 5467, 6478, 5618, 6583, 2381, - 1542, 8342, 3787, 7463, 3823, 6427, 315, 6985, 523, - 5901, 9665, 9643, 5095, 3067, 2951, 9816, 6719, 6640, - 4349, 9622, 9227, 394, 8600, 2210, 9007, 6794, 6193, - 3591, 3763, 8848, 712, 2600, 6953, 345, 8176, 4284, - 2762, 1429, 3052, 9077, 9247, 8084, 9368, 8295, 4882, - 1029, 4128, 2221, 4457, 136, 1060, 5650, 2149, 979, - 879, 252, 4258, 4991, 6954, 9684, 5762, 3304, 4194, - 5738, 4881, 2067, 4630, 3102, 4373, 4364, 3467, 3904, - 2703, 9367, 5744, 1003, 7348, 7382, 2923, 6743, 2716, - 6005, 8885, 8781, 4688, 3208, 8165, 7655, 2105, 9594, - 413, 1022, 9740, 9415, 4440, 7624, 1850, 8157, 8564, - 2131, 4847, 5156, 223, 2070, 8741, 4432, 2158, 1202, - 3074, 6959, 1256, 8497, 7709, 1898, 7099, 7257, 3557, - 1556, 6637, 1724, 6284, 8045, 1698, 1347, 5955, 3324, - 2660, 5510, 9500, 461, 4931, 3894, 7627, 3132, 4579, - 5941, 9535, 3710, 3467, 3922, 66, 8349, 6253, 7489, - 3867, 9837, 4357, 5889, 1429, 2174, 5793, 5103, 9209, - 7044, 5165, 1550, 2922, 7404, 724, 8019, 1723, 9892, - 8178, 326, 8495, 1261, 5156, 3647, 48, 9679, 8317, - 258, 3999, 7172, 9917, 1368, 9356, 8091, 2920, 4194, - 9220, 533, 1856, 4184, 9970, 585, 888, 7980, 502, - 3237, 7414, 5484, 1198, 5107, 6730, 1701, 778, 9576, - 9847, 7139, 5046, 9505, 4273, 3181, 3701, 7837, 6438, - 5446, 5489, 3750, 7620, 3013, 2520, 4866, 2256, 1226, - 4389, 803, 6623, 207, 8091, 9177, 5502, 7927, 5893, - 7286, 4995, 7523, 7718, 638, 1691, 5859, 7604, 7499, - 5376, 8321, 497, 2691, 7462, 1832, 7626, 4908, 8743, - 2814, 8920, 2170, 2237, 5900, 1192, 1906, 5324, 8567, - 433, 7307, 4785, 7781, 8369, 5137, 3139, 6244, 3003, - 5146, 5779, 963, 5834, 8307, 1509, 6851, 2879, 9153, - 5590, 2199, 7088, 4851, 1689, 3978, 7052, 9728, 7823, - 6554, 7099, 3919, 9251, 3170, 4952, 3743, 753, 4966, - 8769, 5930, 1536, 3883, 3398, 3809, 4230, 4021, 4625, - 911, 3578, 6189, 791, 2646, 9155, 6564, 5024, 7172, - 6929, 9364, 1592, 9019, 2813, 8778, 1820, 106, 4324, - 1259, 4349, 3192, 4715, 9115, 966, 2560, 8066, 1744, - 3149, 6003, 1498, 3413, 7870, 8393, 6128, 5648, 3764, - 4120, 43, 1077, 5168, 6447, 193, 4043, 7134, 3629, - 5140, 5639, 2285, 6521, 4820, 2512, 2929, 3156, 8218, - 1304, 6623, 4398, 4748, 475, 2397, 4221, 1663, 9429, - 2056, 5484, 6397, 578, 8710, 4074, 3345, 5055, 8296, - 3426, 9590, 5354, 2960, 4081, 7610, 9414, 6545, 7162, - 2284, 3632, 5177, 9745, 61, 2148, 4065, 1241, 1790, - 2890, 291, 8273, 8068, 1391, 1258, 2802, 698, 6995, - 5305, 5523, 4749, 4449, 5337, 2346, 555, 1299, 4206, - 1176, 3831, 8077, 1268, 5459, 4259, 5701, 3655, 9166, - 7235, 9918, 5806, 8346, 4150, 3003, 3079, 6459, 1439, - 629, 2696, 6049, 7779, 4964, 9219, 6464, 1735, 1160, - 3784, 6373, 384, 1808, 5241, 8372, 1677, 4788, 4963, - 9409, 1508, 9724, 8760, 7710, 589, 3951, 700, 1849, - 1678, 543, 3229, 9558, 3592, 1739, 3704, 323, 6030, - 5632, 5396, 5373, 2268, 8829, 915, 2928, 6822, 4320, - 9393, 3081, 6996, 900, 1244, 9959, 6457, 3034, 4242, - 8879, 6148, 3658, 2641, 9655, 1273, 2175, 1409, 2889, - 4549, 2132, 2423, 3494, 1302, 6844, 8913, 6052, 6682, - 4676, 5897, 8113, 7646, 9008, 815, 8636, 2168, 8534, - 9398, 7841, 9924, 5887, 3514, 8309, 8574, 9316, 1868, - 8906, 4664, 9990, 98, 7132, 708, 8628, 2818, 642, - 3643, 213, 310, 8853, 5074, 7350, 4260, 4345, 2685, - 6155, 1114, 3294, 3439, 9572, 673, 6218, 2900, 8049, - 3443, 3297, 3657, 8180, 8304, 4095, 9553, 4268, 1084, - 5217, 7168, 3140, 2957, 3846, 8019, 7828, 163, 1006, - 9946, 9671, 9272, 3197, 4715, 2133, 6480, 2603, 3711, - 3059, 7614, 7252, 8503, 5168, 3017, 8178, 6997, 7469, - 6817, 2311, 2400, 2604, 5422, 4020, 1626, 3307, 5947, - 5172, 1638, 3688, 98, 1611, 1810, 3312, 7402, 2064, - 7049, 8774, 3516, 1326, 7757, 8495, 5985, 6522, 6285, - 926, 4463, 4966, 488, 3014, 7145, 2639, 4866, 7635, - 9111, 404, 6974, 425, 470, 1597, 9938, 8633, 1112, - 1609, 7751, 5866, 2397, 2930, 2785, 5455, 1171, 5622, - 9921, 3502, 5025, 968, 6760, 6708, 7717, 9632, 2776, - 8251, 2002, 6046, 5073, 6290, 4377, 8849, 6636, 2619, - 7154, 9845, 1431, 4845, 3786, 826, 5185, 5466, 9894, - 5919, 6738, 412, 7178, 6064, 7916, 8348, 9807, 7127, - 3957, 3075, 9983, 7984, 3352, 4559, 4032, 6062, 2428, - 9519, 4774, 5203, 4905, 2366, 6001, 4187, 2803, 6224, - 8937, 871, 887, 753, 6766, 7729, 6343, 5371, 7478, - 4377, 2616, 562, 5625, 2629, 6739, 1582, 3555, 3157, - 6267, 2853, 1658, 4524, 81, 9456, 3168, 7244, 1072, - 7992, 7229, 3997, 2559, 7595, 3257, 822, 3803, 9044, - 4118, 6382, 357, 4687, 7985, 1064, 7748, 7272, 6982, - 1721, 62, 8299, 458, 3900, 7701, 1054, 3789, 4, - 9018, 4356, 4519, 9343, 3566, 2087, 9695, 1990, 982, - 2457, 7819, 9049, 8393, 7518, 4215, 5670, 386, 7510, - 5190, 4578, 8752, 4373, 4995, 745, 1654, 8288, 9362, - 5849, 7627, 2767, 9267, 1855, 5384, 4327, 4418, 1591, - 1122, 4195, 5132, 5368, 1200, 7062, 7024, 6158, 423, - 5711, 1019, 1328, 9184, 2842, 6447, 7948, 7654, 5641, - 1458, 476, 1313, 332, 9273, 4501, 315, 5260, 7109, - 1604, 5981, 8914, 190, 1062, 5126, 1309, 3331, 553, - 4330, 7219, 4988, 2671, 2168, 5519, 3181, 9637, 5051, - 8302, 2204, 7909, 6777, 1005, 6145, 5110, 6838, 9076, - 177, 899, 6196, 6318, 1764, 1175, 4772, 8191, 8417, - 6357, 9011, 697, 8083, 6982, 8213, 8974, 7616, 72, - 1182, 2829, 5015, 8973, 9205, 383, 6201, 1455, 2354, - 1267, 694, 8960, 9517, 4308, 9225, 5125, 4432, 5696, - 5694, 2632, 2399, 5165, 7619, 3796, 9759, 7244, 4225, - 4053, 3194, 7288, 9459, 2975, 3009, 5576, 120, 1434, - 7291, 8484, 8487, 6972, 1212, 902, 5858, 9194, 297, - 8570, 8447, 7854, 3003, 5252, 4842, 949, 2773, 969, - 3387, 3908, 1314, 8010, 4241, 2612, 2698, 8269, 7690, - 1980, 2174, 8245, 3101, 7283, 9184, 5599, 5828, 6807, - 4649, 4531, 608, 3401, 7366, 1236, 4816, 4466, 9003, - 6, 5171, 3604, 7726, 5508, 8457, 6946, 6624, 6571, - 2161, 3266, 9412, 6669, 4820, 7494, 5787, 9016, 7480, - 3453]), - values=tensor([8.0507e-01, 2.4061e-01, 9.9295e-01, 5.2270e-01, - 5.0369e-01, 8.0636e-01, 9.2131e-01, 7.3217e-01, - 2.6765e-01, 7.6297e-01, 2.8175e-01, 2.7665e-01, - 9.8805e-01, 6.0214e-01, 7.9375e-01, 1.5782e-01, - 5.4458e-01, 7.3042e-01, 5.4889e-02, 6.5435e-01, - 3.9504e-01, 5.1617e-01, 8.2417e-01, 4.6359e-01, - 6.2808e-01, 1.5615e-01, 4.0632e-01, 8.0385e-01, - 9.7725e-02, 2.7003e-01, 5.7269e-01, 5.9901e-01, - 6.9641e-01, 4.5868e-01, 7.0478e-01, 3.7953e-01, - 9.1271e-01, 8.4774e-01, 5.6314e-01, 1.9135e-01, - 2.8674e-01, 3.9676e-01, 5.6842e-01, 3.2122e-01, - 8.6286e-01, 8.6492e-01, 8.1028e-01, 1.8244e-01, - 7.6320e-01, 5.2109e-01, 2.7986e-01, 2.2582e-01, - 5.4112e-01, 1.3152e-01, 2.0246e-01, 4.2137e-01, - 3.3474e-01, 3.4342e-01, 4.8395e-01, 9.7969e-01, - 9.2855e-01, 5.9261e-01, 8.8771e-01, 2.7136e-01, - 7.7586e-01, 3.2662e-01, 6.6785e-01, 1.9121e-02, - 6.9228e-01, 9.2617e-02, 5.9247e-01, 5.8896e-01, - 9.5905e-01, 7.7813e-01, 6.1692e-01, 9.4429e-01, - 9.5185e-01, 7.6228e-01, 4.2703e-01, 8.2680e-01, - 8.3731e-01, 9.5819e-01, 8.7021e-01, 8.4965e-01, - 2.7776e-01, 5.8516e-01, 6.4275e-01, 7.2807e-02, - 3.9795e-01, 8.5851e-01, 6.9601e-01, 2.9595e-01, - 7.9908e-01, 4.2211e-01, 6.8287e-01, 5.3939e-01, - 8.8201e-01, 1.6659e-01, 6.2928e-01, 3.1327e-01, - 3.2717e-01, 6.9925e-01, 7.5348e-01, 7.2599e-01, - 2.0553e-01, 6.0729e-01, 4.1387e-01, 6.3892e-01, - 7.7973e-01, 1.6592e-01, 8.0874e-01, 1.8499e-01, - 1.8789e-01, 4.4245e-01, 2.2357e-01, 1.7321e-01, - 3.9260e-01, 3.9275e-01, 5.3270e-01, 4.0217e-01, - 7.3609e-01, 5.5575e-01, 7.9280e-01, 8.0882e-01, - 6.4776e-01, 2.8334e-01, 5.5097e-01, 4.2347e-01, - 9.0593e-01, 1.9089e-01, 7.5048e-01, 8.4130e-01, - 2.9380e-01, 9.7529e-01, 4.4187e-01, 8.0464e-01, - 7.6919e-01, 7.7568e-01, 6.2286e-01, 3.6055e-01, - 3.3840e-01, 3.2178e-01, 3.1976e-01, 2.0615e-01, - 4.6471e-02, 5.0030e-01, 1.9682e-01, 4.6424e-01, - 9.0253e-01, 8.1431e-01, 3.2480e-02, 8.0170e-01, - 6.0957e-01, 9.2842e-01, 9.8725e-01, 8.1810e-01, - 4.9758e-01, 1.6485e-01, 7.9130e-01, 5.0798e-01, - 1.1257e-01, 6.0078e-01, 5.6108e-01, 4.9654e-01, - 3.2612e-01, 3.1885e-01, 2.1347e-01, 4.0301e-01, - 1.4096e-01, 4.7382e-01, 8.8193e-01, 6.4107e-01, - 2.5871e-01, 3.3105e-01, 7.1814e-01, 8.1813e-01, - 1.1066e-02, 4.5316e-01, 3.9875e-01, 3.9958e-01, - 7.3179e-01, 1.3981e-01, 6.9424e-01, 7.4864e-01, - 4.2577e-01, 6.5063e-01, 9.0541e-01, 8.9115e-01, - 5.6373e-01, 7.2516e-01, 4.5611e-01, 2.3250e-01, - 8.5110e-01, 7.4140e-01, 2.2912e-01, 3.4030e-02, - 4.3318e-01, 9.4978e-01, 1.7428e-01, 5.3481e-01, - 3.0915e-01, 9.4312e-02, 8.4948e-01, 7.0419e-01, - 7.8533e-01, 6.3407e-01, 2.8080e-01, 7.3828e-01, - 9.8599e-01, 7.5889e-01, 1.3728e-01, 6.5997e-01, - 3.8460e-01, 6.6440e-01, 3.8946e-02, 7.3804e-02, - 2.4318e-01, 3.3013e-01, 2.6887e-01, 1.1820e-01, - 4.2522e-01, 4.3302e-01, 1.7182e-01, 7.1649e-02, - 7.4360e-01, 3.4560e-01, 2.9003e-01, 7.1065e-01, - 1.9589e-01, 9.4763e-02, 7.2924e-01, 4.1721e-01, - 3.2266e-01, 7.4814e-01, 3.5923e-01, 7.4608e-01, - 5.1318e-02, 5.0948e-01, 2.2794e-02, 5.6396e-01, - 4.0443e-02, 4.7217e-01, 5.1177e-01, 1.4188e-02, - 4.4743e-01, 9.7751e-01, 6.6817e-01, 5.8061e-01, - 4.4294e-01, 3.3303e-01, 1.0600e-01, 9.1982e-01, - 9.7845e-01, 2.1391e-01, 9.9607e-01, 6.9474e-01, - 9.9428e-01, 5.9972e-01, 5.7451e-01, 5.7396e-02, - 6.9516e-02, 8.7242e-01, 9.8407e-01, 9.5583e-01, - 8.4831e-01, 5.5568e-01, 2.9895e-01, 9.3606e-01, - 7.9218e-01, 7.5839e-01, 8.8937e-01, 5.8967e-02, - 6.6676e-01, 4.6444e-01, 7.6581e-01, 2.3600e-02, - 9.5966e-01, 8.2239e-01, 9.5020e-01, 7.1875e-01, - 5.7831e-01, 9.0482e-02, 1.6168e-01, 7.8712e-02, - 9.5825e-01, 7.1869e-01, 1.0140e-01, 2.8523e-01, - 3.6853e-01, 5.3681e-02, 4.3362e-01, 3.1478e-02, - 7.8635e-01, 3.4366e-01, 5.6111e-01, 1.2127e-02, - 9.0278e-01, 1.9809e-01, 6.3472e-01, 9.0084e-01, - 7.5565e-01, 1.3093e-01, 2.3358e-01, 3.1800e-01, - 1.5689e-01, 4.2469e-01, 6.9820e-01, 6.1385e-01, - 2.5466e-01, 9.4154e-01, 1.7091e-01, 6.6611e-01, - 3.0606e-01, 8.5477e-01, 7.8533e-01, 8.7224e-01, - 5.2056e-01, 7.0916e-01, 1.5376e-02, 3.0544e-01, - 5.1884e-01, 9.6093e-01, 1.2889e-01, 8.5539e-01, - 2.5647e-01, 9.1180e-03, 1.9597e-01, 2.7536e-01, - 3.9574e-01, 5.2746e-01, 6.8273e-01, 6.9238e-01, - 6.7659e-01, 8.8601e-01, 3.2503e-01, 4.4480e-01, - 4.6140e-01, 2.2678e-01, 5.8936e-01, 1.9554e-01, - 6.6458e-01, 2.2131e-01, 8.5009e-01, 7.0706e-01, - 9.5349e-01, 6.2824e-01, 4.1599e-01, 8.7893e-01, - 2.3537e-01, 8.4975e-01, 1.7865e-01, 4.7327e-02, - 8.7836e-01, 6.5097e-01, 6.9850e-01, 8.1697e-01, - 4.2112e-01, 6.4393e-01, 9.1991e-01, 4.9738e-01, - 8.2887e-01, 6.7509e-01, 8.4041e-01, 9.1399e-01, - 3.6578e-01, 9.8913e-01, 7.7253e-01, 1.8280e-01, - 2.4871e-01, 7.5650e-01, 1.4646e-01, 3.4292e-02, - 5.8682e-01, 7.4195e-01, 5.3159e-02, 4.3829e-01, - 8.5519e-01, 7.7959e-01, 3.8413e-02, 4.0249e-01, - 3.3982e-01, 4.7195e-01, 9.9454e-01, 3.3522e-01, - 4.9192e-01, 3.8224e-01, 4.4352e-01, 5.7838e-01, - 1.3144e-01, 4.5732e-01, 6.4630e-01, 2.4199e-01, - 1.2734e-01, 9.5766e-02, 6.5757e-01, 5.4258e-01, - 9.3010e-01, 4.0742e-01, 4.2345e-01, 2.7314e-01, - 1.5881e-01, 1.1798e-01, 2.5639e-01, 7.3654e-01, - 8.9433e-01, 8.8763e-02, 6.0792e-01, 7.5790e-01, - 2.2865e-01, 9.9348e-01, 3.9391e-01, 1.3746e-01, - 4.0923e-01, 5.8573e-01, 5.6565e-01, 5.1379e-01, - 2.5013e-01, 9.9438e-01, 5.6604e-01, 5.3212e-01, - 7.4988e-01, 9.6312e-01, 5.5377e-01, 5.4789e-01, - 7.4612e-02, 8.7294e-01, 3.3253e-01, 9.8328e-01, - 2.8072e-01, 6.9491e-01, 4.2727e-01, 1.0826e-01, - 1.6755e-01, 4.3996e-01, 7.0088e-01, 6.5111e-01, - 7.2310e-01, 9.7371e-01, 6.6237e-02, 5.6600e-01, - 6.5732e-01, 1.5901e-01, 3.5927e-01, 5.7040e-01, - 4.5588e-01, 4.0037e-01, 7.3440e-01, 1.7459e-01, - 5.9630e-01, 1.3977e-01, 4.8270e-01, 3.7273e-01, - 7.3967e-01, 5.0054e-01, 2.5218e-01, 3.0617e-01, - 4.3805e-04, 1.0425e-01, 6.3347e-01, 4.6682e-01, - 9.6110e-01, 4.0049e-01, 9.2703e-02, 7.3912e-01, - 4.1610e-02, 2.3594e-01, 1.3021e-01, 3.6550e-01, - 8.2483e-01, 6.8855e-02, 9.7663e-01, 7.0695e-01, - 6.2656e-02, 8.0776e-01, 7.6084e-01, 7.1101e-01, - 3.8279e-01, 4.5469e-01, 6.6115e-01, 4.5057e-01, - 3.6197e-01, 7.4029e-01, 7.6340e-01, 8.3608e-01, - 8.3839e-01, 2.5357e-01, 3.8408e-01, 6.1298e-01, - 9.8468e-01, 7.6932e-01, 6.6239e-01, 8.6013e-01, - 4.1519e-01, 6.5759e-01, 7.1094e-01, 5.1742e-01, - 1.2965e-01, 7.5811e-01, 9.3997e-01, 3.9015e-01, - 7.2502e-01, 9.0838e-01, 5.0903e-01, 8.8963e-01, - 1.3096e-01, 5.6967e-01, 5.2654e-01, 7.5502e-01, - 9.1487e-01, 7.6229e-01, 5.3217e-01, 7.0901e-01, - 4.0276e-01, 7.7266e-01, 1.3731e-01, 8.8716e-01, - 5.0082e-01, 9.0385e-01, 2.1904e-01, 4.9512e-01, - 3.9813e-01, 8.9681e-01, 4.3891e-01, 1.3651e-01, - 9.8979e-01, 1.8445e-01, 3.2477e-01, 4.1938e-01, - 6.1831e-01, 9.9846e-01, 4.4281e-01, 6.7203e-01, - 3.1498e-01, 3.6237e-01, 5.3583e-01, 3.3656e-01, - 7.5325e-01, 3.2010e-01, 8.9637e-01, 4.2261e-02, - 6.2375e-01, 9.5499e-01, 1.7212e-01, 1.4874e-01, - 6.7623e-01, 8.3182e-01, 7.4532e-01, 4.2210e-02, - 6.5329e-01, 4.4951e-01, 7.5430e-01, 8.7388e-01, - 8.5789e-01, 9.8723e-01, 8.1589e-01, 3.1591e-01, - 6.0895e-01, 8.4026e-01, 1.5956e-01, 4.3307e-01, - 3.3088e-01, 2.4020e-01, 4.6642e-01, 3.6388e-01, - 8.3079e-03, 6.3874e-01, 7.0864e-01, 8.4270e-01, - 3.9820e-01, 7.9893e-01, 8.5444e-01, 2.1196e-01, - 7.4788e-01, 5.8199e-01, 6.9565e-01, 3.0630e-01, - 8.4838e-01, 2.3687e-01, 1.6845e-01, 2.0640e-01, - 9.6962e-01, 3.8025e-01, 9.4056e-01, 5.0775e-02, - 8.4877e-01, 5.0238e-01, 4.0567e-01, 8.8208e-01, - 2.4011e-01, 9.8945e-01, 4.9792e-02, 5.6083e-01, - 8.5086e-01, 9.0628e-01, 8.3393e-01, 2.8443e-01, - 4.2645e-01, 7.5821e-01, 6.3742e-02, 1.5048e-01, - 6.5685e-01, 6.3517e-01, 1.7601e-02, 1.1730e-01, - 3.5213e-01, 7.2766e-01, 5.3210e-01, 2.4482e-01, - 2.3051e-01, 2.8771e-01, 1.2646e-01, 3.8082e-01, - 3.4515e-01, 9.8746e-01, 9.0908e-01, 5.1389e-01, - 5.3672e-02, 7.4069e-01, 8.8725e-01, 1.2742e-01, - 4.6425e-01, 9.4137e-01, 5.6908e-01, 2.1842e-01, - 5.3924e-01, 7.3004e-01, 3.7237e-01, 5.9746e-01, - 3.8151e-01, 8.7017e-01, 2.3522e-01, 8.7846e-01, - 2.9566e-01, 6.3719e-01, 5.0273e-01, 6.6207e-01, - 2.9977e-01, 7.9874e-01, 8.4384e-01, 4.8911e-01, - 8.0898e-01, 3.9092e-01, 7.4185e-01, 9.6535e-01, - 9.0972e-01, 9.3776e-01, 8.3238e-01, 4.3263e-01, - 3.6534e-02, 6.6501e-01, 2.3741e-01, 4.2544e-01, - 5.1300e-01, 8.0559e-01, 4.7343e-01, 6.8441e-01, - 2.2575e-01, 9.5046e-01, 9.6595e-01, 9.0274e-02, - 3.6334e-01, 6.9095e-02, 1.9040e-01, 5.8034e-01, - 4.4344e-01, 6.3314e-01, 4.6341e-01, 1.9620e-01, - 1.2561e-01, 3.3448e-01, 2.3939e-01, 6.5656e-01, - 7.4549e-01, 8.0767e-01, 9.0073e-01, 6.8499e-01, - 2.5279e-01, 6.7235e-02, 9.9432e-01, 7.9345e-01, - 5.2322e-01, 7.4221e-01, 8.7684e-01, 5.9947e-01, - 8.5031e-01, 5.2108e-01, 5.7199e-02, 5.8895e-01, - 1.5455e-01, 5.5722e-01, 1.1740e-01, 1.8787e-01, - 3.4269e-02, 6.1328e-01, 8.9098e-01, 6.1179e-02, - 9.7174e-01, 6.9530e-01, 7.5351e-01, 3.6878e-01, - 6.3225e-01, 3.0838e-02, 9.2489e-01, 4.4399e-01, - 8.3176e-01, 5.1710e-02, 7.8755e-01, 2.7926e-01, - 9.0979e-01, 3.7306e-01, 3.3451e-01, 7.8876e-03, - 5.8840e-01, 9.9100e-01, 5.2276e-01, 3.3959e-01, - 2.0976e-01, 9.0783e-01, 2.8137e-01, 3.3640e-01, - 1.3910e-01, 4.3941e-01, 7.7420e-01, 4.4066e-01, - 5.3733e-01, 6.9166e-01, 7.3402e-01, 9.0601e-01, - 7.2659e-01, 3.5345e-01, 8.3657e-01, 6.3362e-01, - 9.0880e-01, 6.2022e-01, 7.3968e-02, 9.5163e-01, - 8.1590e-01, 3.2542e-01, 4.3341e-01, 8.5695e-01, - 1.6718e-01, 6.1165e-01, 4.7159e-01, 1.6461e-01, - 5.4686e-01, 3.9495e-01, 5.1038e-01, 1.0149e-01, - 7.1867e-01, 1.9060e-01, 8.4690e-01, 2.1212e-01, - 6.2564e-01, 4.6129e-01, 3.7432e-01, 7.4444e-01, - 6.6752e-01, 5.4421e-01, 4.5698e-01, 2.0129e-01, - 5.3565e-01, 6.4916e-01, 3.9503e-01, 9.1870e-01, - 7.0564e-01, 2.7758e-01, 5.0366e-01, 9.3558e-02, - 7.3633e-01, 1.5508e-01, 4.2724e-01, 1.1644e-01, - 2.8316e-01, 6.2577e-02, 7.7771e-01, 7.7460e-01, - 9.0249e-02, 6.4390e-01, 8.3686e-01, 9.2400e-01, - 2.6242e-01, 2.0027e-01, 8.4560e-01, 8.7000e-01, - 7.7375e-01, 5.4250e-01, 3.6026e-01, 5.6023e-01, - 3.3220e-01, 8.4107e-01, 6.3823e-01, 1.8703e-01, - 2.8048e-02, 9.2826e-01, 3.9652e-02, 8.9134e-01, - 6.6399e-01, 7.7509e-01, 8.1472e-01, 4.4265e-01, - 1.9597e-02, 5.7310e-01, 2.0484e-01, 3.6184e-01, - 5.3963e-01, 8.3754e-01, 3.9675e-01, 4.8612e-02, - 6.0476e-01, 1.0854e-01, 6.4168e-01, 8.0755e-01, - 7.9661e-01, 4.5280e-01, 6.7552e-01, 2.6422e-01, - 2.9765e-01, 7.3493e-01, 2.7940e-01, 7.7422e-01, - 2.8204e-04, 4.2108e-01, 5.6586e-01, 1.0556e-01, - 1.1550e-02, 8.2554e-02, 4.3945e-01, 5.7594e-01, - 8.9251e-01, 9.4039e-01, 5.6989e-02, 5.4813e-01, - 6.4406e-01, 9.2155e-01, 9.4419e-01, 6.6322e-01, - 2.7957e-01, 2.9197e-01, 9.1368e-01, 9.5441e-01, - 9.5904e-01, 4.2642e-01, 7.9625e-01, 1.2600e-01, - 1.3869e-01, 8.3948e-01, 9.7068e-02, 7.5039e-01, - 3.2113e-01, 3.7210e-01, 7.9795e-01, 7.3403e-01, - 4.2191e-01, 2.1404e-01, 1.9561e-01, 2.6717e-01, - 6.3550e-01, 5.2827e-01, 5.6268e-01, 6.6776e-01, - 6.6585e-01, 5.4771e-01, 4.7139e-01, 4.2999e-01, - 2.2744e-01, 7.3449e-01, 9.3571e-01, 2.0460e-01, - 8.7126e-01, 6.3205e-01, 4.5232e-01, 1.0747e-02, - 9.6500e-01, 4.2656e-01, 7.3286e-01, 9.2143e-01, - 4.1197e-01, 8.6333e-01, 9.6936e-01, 8.1432e-02, - 8.2861e-02, 4.3735e-01, 5.8064e-01, 7.6731e-02, - 2.2808e-01, 2.4833e-01, 9.5109e-01, 4.1294e-01, - 2.2988e-01, 6.9450e-01, 6.1942e-01, 8.9872e-01, - 5.5144e-01, 5.4203e-01, 7.6712e-01, 1.0604e-01, - 8.3558e-01, 3.0839e-01, 7.6796e-01, 4.6978e-01, - 3.9200e-01, 9.4286e-01, 5.5714e-02, 2.8062e-01, - 4.1955e-02, 1.2238e-01, 2.7380e-01, 5.4131e-01, - 9.8529e-01, 1.4965e-01, 4.5735e-01, 5.8346e-01, - 7.2817e-01, 2.1697e-01, 7.2339e-01, 2.7133e-01, - 4.2316e-01, 8.8540e-03, 2.8433e-01, 8.2022e-01, - 7.7852e-02, 8.5926e-01, 4.4605e-02, 7.7914e-01, - 8.0113e-01, 1.5392e-01, 9.7433e-01, 5.6771e-01, - 6.7325e-01, 6.3463e-01, 2.1643e-01, 5.3227e-01, - 6.7932e-01, 3.4423e-01, 2.5964e-01, 6.5186e-01, - 9.1365e-01, 1.8955e-01, 7.6133e-01, 2.5264e-01, - 8.3147e-01, 6.8222e-02, 8.8616e-01, 3.2882e-01, - 7.0634e-01, 1.8169e-01, 9.6910e-01, 4.4978e-01, - 4.6309e-01, 8.5679e-01, 7.1098e-01, 7.5744e-01, - 9.2047e-01, 9.5873e-01, 1.5589e-01, 9.3949e-01, - 3.4989e-01, 4.6710e-01, 8.1157e-02, 1.4008e-01, - 8.7401e-01, 9.8365e-01, 3.5178e-01, 5.0703e-01, - 4.3238e-01, 1.7770e-01, 3.0158e-01, 8.0513e-01, - 5.4259e-01, 9.7303e-01, 9.8516e-01, 5.1310e-01, - 8.6513e-01, 8.5088e-01, 6.9005e-01, 3.6431e-01, - 9.7970e-01, 1.9764e-02, 9.5557e-01, 2.1947e-01, - 8.7722e-02, 3.0887e-01, 1.3598e-01, 2.8304e-01, - 3.4945e-01, 5.4883e-01, 1.7336e-01, 6.3137e-01, - 9.7623e-01, 5.7471e-01, 2.7208e-01, 8.4042e-01, - 4.8335e-01, 8.4750e-01, 9.9182e-02, 1.1519e-01, - 4.4113e-01, 8.5510e-01, 5.2443e-01, 4.3708e-01]), + col_indices=tensor([1583, 2010, 5254, 7979, 6044, 1811, 7275, 5124, 1436, + 6977, 5579, 4446, 9531, 9948, 1649, 1369, 7922, 7653, + 2659, 6213, 259, 9933, 1809, 5407, 6617, 1048, 9736, + 8197, 4865, 8298, 9784, 82, 5539, 325, 4902, 1683, + 7666, 621, 9636, 2101, 9761, 740, 4832, 605, 5884, + 3975, 9597, 4053, 6617, 1715, 7682, 8784, 4868, 6631, + 4385, 6313, 6260, 3586, 9177, 2920, 7526, 5398, 7541, + 248, 3734, 7646, 6276, 8109, 2125, 9714, 6281, 1353, + 5963, 2603, 264, 3737, 9675, 9238, 2280, 9506, 9180, + 8024, 6153, 5553, 3522, 6695, 1640, 8954, 8297, 6626, + 843, 9222, 5001, 3481, 6513, 5429, 9771, 5585, 8988, + 5464, 3454, 6624, 6512, 7330, 6444, 6199, 5861, 4510, + 672, 6028, 7721, 5884, 2715, 1700, 4921, 4515, 9810, + 242, 3364, 5002, 1424, 3751, 9511, 8727, 7691, 6098, + 8102, 5389, 3846, 102, 6605, 6800, 5446, 4429, 9979, + 6051, 887, 6347, 5680, 5275, 2705, 9887, 8702, 7450, + 9613, 1435, 9601, 9924, 7389, 9934, 1462, 4302, 946, + 584, 8440, 8214, 4233, 5454, 4216, 6174, 9492, 1467, + 7415, 3149, 3551, 5073, 7523, 8960, 4752, 7424, 4426, + 2828, 4638, 8921, 7283, 9829, 5888, 286, 7407, 6914, + 7286, 465, 3805, 1983, 2183, 7470, 8061, 9434, 9295, + 8270, 5079, 1562, 9550, 5203, 9993, 4115, 392, 600, + 8605, 4591, 7089, 1143, 4551, 580, 3847, 8259, 1796, + 2339, 8992, 9416, 7284, 9062, 8162, 205, 4452, 8143, + 669, 7123, 4261, 3096, 5227, 4884, 7762, 4252, 52, + 3294, 1781, 9880, 1607, 4053, 2502, 5482, 6906, 2035, + 3232, 7130, 8453, 2375, 5456, 7258, 5074, 1652, 861, + 2762, 5646, 7995, 1392, 97, 3679, 7567, 6422, 8241, + 7836, 1183, 6034, 142, 9235, 4509, 6760, 4561, 9070, + 2031, 9065, 6375, 4040, 9160, 4623, 5781, 9376, 5259, + 8317, 6466, 1257, 2274, 1048, 3553, 9336, 8350, 2160, + 4368, 2869, 233, 8694, 1453, 4399, 705, 4421, 79, + 2161, 6601, 6000, 6535, 869, 6199, 4248, 4356, 4542, + 4670, 9521, 6660, 9199, 5986, 7398, 9596, 5813, 6088, + 7831, 753, 5374, 9257, 2827, 3291, 6819, 4035, 7713, + 4013, 7218, 2279, 2871, 1326, 1811, 2703, 1574, 5900, + 1155, 1352, 9694, 8175, 2074, 3391, 9502, 4148, 8642, + 2600, 2104, 2058, 3036, 4818, 8770, 2956, 8526, 9775, + 225, 4118, 7042, 6449, 5064, 4207, 1756, 8969, 2588, + 1096, 1153, 7117, 4392, 9511, 4215, 2271, 4504, 5309, + 2046, 8527, 4173, 3449, 7352, 1698, 7327, 3267, 1105, + 6661, 3644, 2703, 5379, 701, 6814, 1440, 6092, 1140, + 2239, 412, 4967, 1213, 7822, 3780, 5299, 7074, 5185, + 2666, 9098, 4903, 8017, 846, 4723, 3624, 9337, 5755, + 3369, 3913, 8577, 7137, 8463, 7938, 5325, 4521, 5290, + 6218, 1511, 1273, 4386, 8704, 1619, 195, 3249, 4064, + 773, 7448, 8201, 3121, 301, 4372, 1634, 7663, 2937, + 4689, 3712, 2187, 7632, 4046, 6463, 6448, 2875, 2814, + 9857, 2905, 9752, 7090, 3694, 7956, 4390, 5280, 7018, + 7056, 2989, 6712, 9231, 5247, 2513, 738, 431, 1726, + 2666, 6815, 9509, 646, 9632, 2730, 9313, 4866, 8557, + 1201, 4486, 5168, 9202, 7420, 4285, 5922, 3677, 2027, + 3975, 6254, 498, 5210, 204, 3290, 3533, 9875, 2505, + 6720, 3641, 9914, 1958, 3109, 7127, 2799, 7390, 179, + 7325, 9320, 2800, 2350, 7555, 8269, 7272, 8574, 8049, + 6676, 3970, 285, 8780, 1108, 1111, 9658, 6160, 6280, + 9914, 5935, 5918, 9764, 548, 2210, 2619, 1696, 4207, + 1063, 9914, 4231, 1029, 9243, 3982, 7733, 8072, 6321, + 6035, 5297, 7545, 9721, 4364, 9434, 6360, 7476, 2533, + 7275, 8433, 3934, 6724, 1068, 4014, 8204, 9682, 9063, + 1493, 2469, 2999, 3214, 9658, 2217, 9114, 3111, 8598, + 4210, 3973, 1496, 4445, 7146, 1795, 4717, 7849, 7512, + 1191, 1515, 3311, 6622, 939, 4014, 9088, 9339, 7424, + 7262, 6339, 7032, 9476, 7001, 4460, 2692, 4399, 7886, + 7047, 7669, 9359, 864, 5049, 7040, 4096, 4298, 9623, + 4192, 5872, 9144, 7397, 6121, 280, 9465, 5892, 966, + 5574, 5723, 2834, 9950, 3081, 5143, 5667, 8402, 2784, + 5766, 6218, 8186, 8713, 1780, 6821, 8450, 7019, 6189, + 8952, 9208, 1484, 7762, 3939, 7602, 4668, 3210, 4330, + 5716, 7241, 405, 335, 3412, 8477, 9234, 3584, 5207, + 6179, 7712, 5919, 8037, 4123, 7316, 7474, 2679, 8523, + 3986, 1694, 5901, 9022, 9607, 9829, 9846, 6902, 4585, + 9022, 2079, 9614, 8663, 5819, 1097, 3118, 5088, 8749, + 4025, 4321, 5154, 6517, 880, 4285, 4711, 2326, 4162, + 4920, 8690, 2593, 488, 4694, 1259, 587, 7957, 4395, + 8376, 7054, 2133, 3697, 5563, 9686, 918, 2978, 8239, + 6797, 7376, 9376, 1457, 3934, 2176, 9476, 3211, 3186, + 7396, 5390, 2250, 176, 8978, 9325, 2806, 6907, 8181, + 4346, 5066, 99, 1478, 770, 7246, 8629, 9961, 3268, + 1790, 5846, 8686, 1633, 492, 1478, 6673, 1726, 8741, + 7768, 7527, 1121, 1733, 1273, 9650, 7653, 6076, 194, + 8769, 8199, 4604, 7252, 9298, 9566, 567, 6639, 4728, + 9542, 8173, 2780, 2516, 4192, 9184, 6994, 6018, 9797, + 590, 9338, 8251, 3777, 8162, 13, 8005, 188, 2602, + 9738, 4060, 9649, 9986, 8500, 4315, 5221, 4141, 4921, + 6691, 8158, 9615, 4239, 6096, 9428, 555, 6830, 9510, + 9880, 2716, 808, 4588, 557, 5422, 4795, 165, 4359, + 1300, 4655, 4817, 1957, 4576, 6824, 6342, 1040, 8141, + 4172, 8691, 48, 277, 8256, 9583, 4203, 1779, 5112, + 3616, 9838, 1376, 7998, 2797, 6753, 8079, 8276, 1987, + 19, 4354, 9286, 2615, 6740, 9487, 3627, 5231, 5312, + 390, 7185, 7779, 5971, 3543, 6278, 8106, 9509, 758, + 4532, 5244, 8649, 3081, 5562, 7090, 2064, 5447, 6233, + 2085, 2494, 1501, 8471, 526, 9417, 5830, 1825, 355, + 1862, 4324, 2868, 9563, 2266, 3962, 389, 6200, 6403, + 663, 5817, 7056, 8981, 4928, 9712, 1095, 3893, 9576, + 2748, 7578, 3941, 2877, 4899, 2380, 6793, 4371, 8114, + 4922, 6923, 2351, 8578, 3122, 5644, 6934, 4317, 6902, + 6045, 9795, 1482, 6773, 9968, 5852, 9269, 9520, 4391, + 2401, 9617, 2668, 7256, 4772, 1903, 3373, 4316, 1692, + 5034, 8118, 7083, 2393, 9148, 5891, 1781, 9855, 721, + 9905, 3195, 2413, 4717, 5913, 670, 1557, 3791, 7460, + 3616, 5334, 4133, 1405, 3039, 7529, 4796, 4803, 2152, + 5840, 2128, 9084, 8408, 3604, 6581, 5669, 6068, 7009, + 3479, 7465, 7540, 8897, 4413, 3315, 6549, 1677, 8701, + 2788, 2678, 3442, 3734, 1118, 4316, 9381, 9006, 4108, + 5912, 826, 6724, 7846, 460, 6304, 2843, 4879, 6744, + 6163, 4479, 222, 1812, 3520, 9850, 935, 9282, 4410, + 4179, 6820, 3994, 5429, 7733, 2102, 2864, 556, 4530, + 6540, 2911, 8663, 2630, 3513, 9220, 6390, 5701, 7953, + 7398, 6782, 5778, 7647, 1313, 5401, 5563, 9720, 5486, + 7709]), + values=tensor([0.6630, 0.9503, 0.0770, 0.9226, 0.1778, 0.8279, 0.0536, + 0.5325, 0.8085, 0.7558, 0.2260, 0.6523, 0.1197, 0.0283, + 0.8139, 0.4165, 0.5589, 0.9775, 0.8148, 0.2087, 0.7767, + 0.9982, 0.1484, 0.2622, 0.3615, 0.4355, 0.0185, 0.0082, + 0.8837, 0.4983, 0.6604, 0.5671, 0.1006, 0.6141, 0.5716, + 0.6764, 0.1529, 0.3165, 0.4084, 0.1229, 0.1096, 0.8308, + 0.6585, 0.1950, 0.5085, 0.3697, 0.4971, 0.2796, 0.2744, + 0.7057, 0.5504, 0.4942, 0.5020, 0.9849, 0.7865, 0.4277, + 0.7720, 0.9203, 0.4083, 0.5834, 0.2993, 0.4296, 0.1742, + 0.7625, 0.5725, 0.0721, 0.6285, 0.8961, 0.7799, 0.9867, + 0.4023, 0.0461, 0.2516, 0.8722, 0.3041, 0.0596, 0.3822, + 0.7562, 0.7884, 0.2916, 0.8113, 0.7857, 0.6486, 0.1687, + 0.2654, 0.6660, 0.5205, 0.5807, 0.1636, 0.7524, 0.7303, + 0.3845, 0.9418, 0.8240, 0.9533, 0.9406, 0.6844, 0.7416, + 0.8718, 0.0156, 0.8172, 0.4208, 0.9722, 0.5292, 0.3721, + 0.6191, 0.9317, 0.6666, 0.7072, 0.3855, 0.2294, 0.8589, + 0.8908, 0.4834, 0.3423, 0.7789, 0.1241, 0.4564, 0.1890, + 0.2563, 0.9107, 0.6748, 0.1334, 0.8151, 0.7204, 0.8972, + 0.1073, 0.8907, 0.0744, 0.5882, 0.8889, 0.6790, 0.8885, + 0.6658, 0.2075, 0.9175, 0.9938, 0.4163, 0.0696, 0.8712, + 0.9168, 0.3235, 0.4716, 0.9854, 0.8143, 0.7879, 0.4607, + 0.4850, 0.6066, 0.6913, 0.7193, 0.2839, 0.8347, 0.2095, + 0.8353, 0.4546, 0.8880, 0.7210, 0.8453, 0.8916, 0.7714, + 0.9868, 0.0515, 0.5239, 0.5681, 0.1820, 0.5272, 0.5691, + 0.4175, 0.5907, 0.4424, 0.0668, 0.0128, 0.9282, 0.7583, + 0.0221, 0.5736, 0.2285, 0.6693, 0.2730, 0.2836, 0.9157, + 0.5434, 0.9795, 0.1769, 0.6107, 0.7322, 0.8412, 0.2757, + 0.3098, 0.3578, 0.3820, 0.2329, 0.4037, 0.0151, 0.1326, + 0.7184, 0.6578, 0.8297, 0.6581, 0.3428, 0.9501, 0.6670, + 0.8806, 0.8754, 0.7223, 0.7669, 0.3788, 0.8755, 0.2260, + 0.4669, 0.9844, 0.0082, 0.2669, 0.6026, 0.2689, 0.0429, + 0.7489, 0.3726, 0.0159, 0.3927, 0.8172, 0.3282, 0.0575, + 0.9974, 0.1350, 0.5672, 0.4689, 0.7961, 0.5402, 0.4963, + 0.2623, 0.5421, 0.8354, 0.9905, 0.6240, 0.8519, 0.1978, + 0.6499, 0.5759, 0.2503, 0.1102, 0.8574, 0.7008, 0.3233, + 0.4296, 0.2435, 0.6516, 0.7119, 0.5430, 0.4755, 0.2341, + 0.5926, 0.0770, 0.8351, 0.6430, 0.9481, 0.7289, 0.4034, + 0.1701, 0.7125, 0.8934, 0.3081, 0.6354, 0.7915, 0.1404, + 0.9513, 0.8453, 0.0850, 0.0642, 0.8424, 0.2306, 0.9822, + 0.5022, 0.4984, 0.4800, 0.2218, 0.9723, 0.7497, 0.9084, + 0.4784, 0.7804, 0.1238, 0.0880, 0.9711, 0.5284, 0.2542, + 0.4913, 0.0373, 0.9479, 0.7298, 0.5961, 0.2747, 0.8281, + 0.3376, 0.6719, 0.5355, 0.2467, 0.1814, 0.6034, 0.3903, + 0.8079, 0.6634, 0.1089, 0.4461, 0.0248, 0.7098, 0.8937, + 0.6958, 0.2081, 0.8237, 0.3634, 0.3255, 0.3011, 0.8487, + 0.1664, 0.0299, 0.4550, 0.8746, 0.8226, 0.5176, 0.2105, + 0.8160, 0.9892, 0.5189, 0.1660, 0.0722, 0.9487, 0.8604, + 0.8181, 0.8144, 0.0561, 0.2179, 0.2462, 0.1360, 0.7312, + 0.5931, 0.6399, 0.4488, 0.9270, 0.1570, 0.5666, 0.0921, + 0.9507, 0.7374, 0.9303, 0.9871, 0.0275, 0.0396, 0.7157, + 0.7314, 0.4516, 0.5880, 0.5629, 0.7503, 0.5107, 0.0122, + 0.0074, 0.8876, 0.3382, 0.7626, 0.8644, 0.8030, 0.7340, + 0.8979, 0.6798, 0.6990, 0.8131, 0.0632, 0.8141, 0.9434, + 0.5737, 0.8869, 0.6157, 0.9858, 0.1492, 0.1949, 0.5858, + 0.8066, 0.7969, 0.1033, 0.7643, 0.6380, 0.5493, 0.8582, + 0.1931, 0.2076, 0.1054, 0.0371, 0.2378, 0.8212, 0.4451, + 0.9355, 0.1414, 0.2556, 0.4392, 0.9159, 0.8704, 0.5855, + 0.5298, 0.0073, 0.3946, 0.2827, 0.6593, 0.2170, 0.1559, + 0.0455, 0.0804, 0.3042, 0.1902, 0.1499, 0.4571, 0.2421, + 0.2416, 0.9852, 0.9579, 0.6952, 0.6187, 0.8878, 0.9344, + 0.1083, 0.7885, 0.6837, 0.3299, 0.6348, 0.5837, 0.6662, + 0.7313, 0.3365, 0.6725, 0.8139, 0.5019, 0.3039, 0.0443, + 0.3934, 0.1305, 0.6780, 0.3155, 0.0219, 0.7662, 0.6555, + 0.1320, 0.2374, 0.6933, 0.5809, 0.5466, 0.6377, 0.3020, + 0.1965, 0.5168, 0.4464, 0.2304, 0.0432, 0.9550, 0.9132, + 0.3859, 0.2031, 0.3693, 0.8159, 0.9889, 0.9524, 0.3432, + 0.9314, 0.8808, 0.9456, 0.6866, 0.3442, 0.3810, 0.2142, + 0.1117, 0.1744, 0.1056, 0.5794, 0.4455, 0.7572, 0.7078, + 0.3687, 0.4666, 0.7407, 0.7029, 0.4882, 0.4880, 0.1980, + 0.7023, 0.6681, 0.3423, 0.8422, 0.4851, 0.3016, 0.7606, + 0.3490, 0.7330, 0.1476, 0.1495, 0.7345, 0.5988, 0.7147, + 0.9822, 0.6029, 0.8218, 0.8268, 0.6557, 0.2332, 0.7667, + 0.0044, 0.4372, 0.0163, 0.9516, 0.7012, 0.6660, 0.1956, + 0.0351, 0.8338, 0.8275, 0.6101, 0.8723, 0.8245, 0.0823, + 0.2151, 0.2357, 0.3441, 0.6542, 0.6748, 0.6120, 0.6830, + 0.9101, 0.3363, 0.6175, 0.2109, 0.7948, 0.1254, 0.3320, + 0.4047, 0.8548, 0.2724, 0.0547, 0.0778, 0.5829, 0.7401, + 0.7010, 0.5091, 0.4745, 0.7438, 0.0620, 0.6122, 0.3501, + 0.7476, 0.3391, 0.5091, 0.9092, 0.8341, 0.1950, 0.5236, + 0.7056, 0.1342, 0.3505, 0.7257, 0.4253, 0.6642, 0.1578, + 0.1206, 0.9648, 0.8145, 0.6922, 0.2614, 0.5366, 0.4982, + 0.1154, 0.6473, 0.8548, 0.3066, 0.1033, 0.1884, 0.1407, + 0.6633, 0.9702, 0.8613, 0.8238, 0.6722, 0.6006, 0.6815, + 0.1240, 0.9769, 0.1594, 0.1592, 0.7939, 0.7089, 0.7574, + 0.7724, 0.7991, 0.0521, 0.1390, 0.5925, 0.9554, 0.5527, + 0.3929, 0.3007, 0.1885, 0.5802, 0.9953, 0.8505, 0.7792, + 0.7177, 0.7694, 0.9788, 0.1512, 0.0267, 0.7016, 0.1441, + 0.2745, 0.3479, 0.2525, 0.2337, 0.5448, 0.7971, 0.7101, + 0.4871, 0.7275, 0.0547, 0.1591, 0.6937, 0.7809, 0.7092, + 0.5201, 0.8475, 0.9010, 0.4869, 0.9890, 0.2146, 0.1450, + 0.9848, 0.0270, 0.0118, 0.0203, 0.7924, 0.2769, 0.6145, + 0.6071, 0.6017, 0.9106, 0.8197, 0.5817, 0.4479, 0.4907, + 0.2678, 0.4969, 0.5055, 0.5426, 0.5791, 0.8993, 0.7838, + 0.7690, 0.7852, 0.3688, 0.8160, 0.2079, 0.3793, 0.0739, + 0.5666, 0.4022, 0.9864, 0.3167, 0.6305, 0.3557, 0.3387, + 0.0828, 0.3478, 0.2152, 0.7658, 0.2613, 0.2753, 0.3668, + 0.0262, 0.0671, 0.9363, 0.6576, 0.6551, 0.4846, 0.8404, + 0.2088, 0.7480, 0.7314, 0.0171, 0.1082, 0.8451, 0.8946, + 0.0525, 0.5191, 0.9442, 0.8675, 0.0810, 0.1833, 0.6438, + 0.4552, 0.3045, 0.4082, 0.8689, 0.6683, 0.1434, 0.4421, + 0.8382, 0.0615, 0.9346, 0.9201, 0.3514, 0.2008, 0.9937, + 0.6121, 0.0783, 0.0490, 0.9787, 0.7461, 0.6146, 0.8319, + 0.8274, 0.2768, 0.5834, 0.3309, 0.3199, 0.2056, 0.8142, + 0.8941, 0.8025, 0.5653, 0.7564, 0.5742, 0.2219, 0.3180, + 0.5125, 0.8124, 0.7601, 0.2726, 0.1730, 0.3593, 0.3373, + 0.4692, 0.9228, 0.9147, 0.2409, 0.6852, 0.8612, 0.6749, + 0.1014, 0.9540, 0.1357, 0.7184, 0.6814, 0.5570, 0.7995, + 0.7039, 0.6331, 0.7100, 0.7276, 0.8540, 0.6804, 0.1364, + 0.3489, 0.7310, 0.1063, 0.2338, 0.8823, 0.4673, 0.2555, + 0.6167, 0.7623, 0.3031, 0.8515, 0.4643, 0.9082, 0.2171, + 0.0778, 0.6489, 0.8915, 0.5582, 0.7660, 0.2890, 0.4197, + 0.5005, 0.0508, 0.7572, 0.3797, 0.1183, 0.9101, 0.8882, + 0.5287, 0.0878, 0.2431, 0.2083, 0.8905, 0.2357, 0.5134, + 0.9562, 0.2161, 0.9351, 0.4696, 0.5955, 0.3703, 0.1321, + 0.5021, 0.2072, 0.4348, 0.7568, 0.6269, 0.1888, 0.6508, + 0.7362, 0.7266, 0.2942, 0.5439, 0.3775, 0.0531, 0.4822, + 0.1171, 0.7194, 0.5961, 0.0788, 0.7347, 0.2944, 0.5051, + 0.4234, 0.1811, 0.7851, 0.4131, 0.9675, 0.0834, 0.4549, + 0.3090, 0.4735, 0.2600, 0.9700, 0.4791, 0.4860, 0.9793, + 0.1782, 0.1251, 0.0561, 0.1267, 0.4222, 0.2756, 0.5840, + 0.9312, 0.7700, 0.0651, 0.8242, 0.0918, 0.6772, 0.2307, + 0.6185, 0.2052, 0.0318, 0.2899, 0.0400, 0.8978, 0.7673, + 0.1941, 0.1098, 0.6272, 0.1911, 0.5689, 0.7419, 0.2110, + 0.5718, 0.0873, 0.7370, 0.5046, 0.2710, 0.0251, 0.9665, + 0.4711, 0.3015, 0.6301, 0.4981, 0.9277, 0.2392, 0.1183, + 0.4019, 0.2169, 0.9822, 0.6515, 0.1332, 0.4805, 0.5861, + 0.5485, 0.5998, 0.1326, 0.7170, 0.7154, 0.4803, 0.1293, + 0.9035, 0.3781, 0.2599, 0.8153, 0.9982, 0.5549, 0.7963, + 0.6153, 0.5282, 0.8208, 0.7625, 0.3001, 0.8081, 0.0388, + 0.7114, 0.7782, 0.5048, 0.3137, 0.5784, 0.9730, 0.4431, + 0.4536, 0.3100, 0.6520, 0.2606, 0.8882, 0.1650, 0.9834, + 0.5674, 0.4688, 0.3389, 0.4291, 0.4446, 0.5716, 0.8014, + 0.3987, 0.6453, 0.1096, 0.5652, 0.9221, 0.8967, 0.0894, + 0.2612, 0.3509, 0.6985, 0.7305, 0.1395, 0.1928, 0.1405, + 0.9620, 0.6290, 0.2470, 0.3081, 0.4298, 0.1092, 0.7591, + 0.5871, 0.5192, 0.1299, 0.2518, 0.0236, 0.9061, 0.5083, + 0.1286, 0.8821, 0.0292, 0.9269, 0.0310, 0.7251, 0.3297, + 0.9879, 0.2589, 0.3124, 0.1632, 0.7470, 0.5454, 0.8446, + 0.7783, 0.6341, 0.9530, 0.0359, 0.0134, 0.0892, 0.9552, + 0.4086, 0.1451, 0.7356, 0.2482, 0.4823, 0.7400, 0.5225, + 0.5147, 0.8299, 0.9223, 0.5481, 0.2230, 0.4704, 0.7526, + 0.9367, 0.9192, 0.2550, 0.0140, 0.7589, 0.7582, 0.2471, + 0.6001, 0.8208, 0.0590, 0.9928, 0.6332, 0.0738]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.4435, 0.0773, 0.0249, ..., 0.0070, 0.7023, 0.2387]) +tensor([0.8647, 0.0597, 0.7485, ..., 0.2087, 0.0137, 0.4402]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -375,757 +268,271 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 0.08438587188720703 seconds +Time: 0.043045759201049805 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '124428', '-ss', '10000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 3.5914108753204346} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 1000, 1000, 1000]), - col_indices=tensor([3597, 9, 1044, 3716, 98, 3551, 3965, 3920, 2369, - 6622, 2889, 2733, 9619, 8879, 214, 1498, 5628, 5050, - 1612, 1432, 5924, 4223, 5673, 5024, 1571, 9554, 4117, - 6172, 4152, 1650, 6284, 8764, 3734, 5467, 6144, 9907, - 2415, 89, 4907, 3890, 4658, 8223, 3917, 3024, 6323, - 7419, 1781, 9076, 2972, 6710, 7400, 4605, 3762, 446, - 1387, 7068, 5435, 7232, 4568, 2187, 5029, 6733, 5022, - 6175, 6496, 4875, 4881, 4574, 9860, 7187, 9416, 1923, - 1194, 94, 6450, 120, 3556, 662, 3588, 5897, 9345, - 8674, 1514, 9592, 2980, 1401, 6049, 8787, 9171, 3495, - 9181, 919, 8930, 6135, 9408, 4922, 56, 574, 8860, - 478, 6298, 1874, 6479, 9220, 412, 8498, 4958, 3548, - 7785, 9175, 8108, 7647, 1805, 8157, 6171, 3362, 8230, - 6430, 7487, 1385, 3551, 2958, 7149, 4586, 8471, 1688, - 6329, 9764, 2504, 67, 2541, 5515, 177, 7617, 6563, - 2343, 6042, 7519, 7459, 2876, 5950, 2565, 239, 6045, - 8489, 8095, 4822, 3292, 2517, 6278, 8498, 1068, 8545, - 7307, 2055, 7660, 1787, 7644, 9721, 5299, 5866, 4300, - 2700, 1812, 3905, 8346, 3677, 5526, 136, 1020, 9070, - 3626, 7531, 24, 5730, 8750, 3419, 3958, 9660, 4864, - 397, 130, 2100, 9017, 8130, 9268, 9436, 7403, 5935, - 7294, 345, 5063, 7364, 6606, 2227, 9600, 9404, 2745, - 3479, 4174, 5328, 9829, 7151, 4314, 3696, 156, 6804, - 2390, 5226, 9767, 2814, 8379, 2259, 2590, 426, 1832, - 3602, 4756, 5684, 6464, 7783, 6365, 5698, 205, 73, - 2041, 63, 4203, 3158, 9783, 9611, 9510, 8078, 9809, - 7685, 5812, 4837, 8951, 9749, 248, 1383, 3588, 6139, - 2203, 451, 9006, 4176, 1050, 9797, 3518, 4341, 8268, - 4409, 6991, 6112, 2820, 7242, 7623, 1267, 1524, 2041, - 2802, 5217, 9728, 44, 647, 7587, 3741, 3734, 8625, - 9596, 3412, 9996, 5623, 3514, 6277, 5990, 9502, 9986, - 1404, 3495, 5226, 4443, 2844, 8625, 9754, 5816, 8532, - 1573, 8875, 8640, 5046, 3006, 6151, 9096, 9834, 8302, - 2981, 8009, 7004, 5002, 6563, 6778, 9143, 2298, 1341, - 9647, 4732, 1138, 318, 9778, 7333, 5010, 5308, 3661, - 4133, 4815, 8155, 350, 7096, 1463, 4030, 1425, 9734, - 7661, 857, 760, 5251, 5833, 9830, 9985, 5355, 3652, - 6244, 5516, 8779, 4897, 4324, 2984, 1720, 1214, 7697, - 3785, 8304, 4535, 9883, 9435, 9314, 4084, 980, 9586, - 7724, 8505, 9547, 8438, 2144, 2950, 3387, 1670, 310, - 1188, 8554, 4192, 8989, 1781, 8278, 4086, 264, 812, - 6229, 520, 1252, 4641, 8595, 3629, 3740, 3731, 4393, - 2538, 2138, 8564, 2632, 5000, 7375, 5627, 3510, 7874, - 1248, 1528, 6220, 1509, 9534, 4166, 3445, 949, 7555, - 9571, 7792, 1450, 4446, 3421, 5658, 8042, 9341, 4384, - 3786, 8082, 8416, 9541, 6129, 4084, 2581, 2590, 2166, - 2734, 785, 3174, 3234, 7590, 8715, 5041, 2090, 1009, - 7570, 6940, 9938, 3881, 1559, 1728, 5192, 6666, 2828, - 6810, 5582, 9131, 7619, 2389, 8415, 7441, 7444, 8840, - 9486, 9964, 2335, 6406, 6029, 1591, 202, 4193, 9819, - 9108, 840, 4133, 4334, 1950, 2165, 3516, 7793, 7110, - 5794, 8938, 7729, 3005, 7056, 7902, 4734, 5746, 3618, - 3551, 9225, 7799, 2978, 8642, 2603, 7336, 9375, 6727, - 4244, 6397, 8093, 3019, 6461, 1127, 4888, 4658, 2342, - 6588, 1361, 4072, 3523, 948, 4115, 8891, 7872, 8944, - 4098, 2776, 2517, 30, 759, 2713, 2765, 6101, 5678, - 6772, 3564, 9046, 1092, 553, 2540, 5146, 9176, 483, - 8793, 9620, 8646, 4982, 7934, 6502, 2497, 8717, 2110, - 39, 5784, 7817, 1830, 4480, 8719, 4702, 2821, 3855, - 627, 4048, 3201, 2076, 2655, 6182, 7135, 5225, 6795, - 2968, 3677, 1343, 6746, 8967, 5893, 2945, 1225, 4284, - 2175, 2404, 4007, 8929, 8043, 4910, 541, 3674, 8453, - 7919, 7114, 2082, 1249, 6704, 2817, 9550, 2450, 6154, - 4525, 173, 1891, 9794, 2003, 5909, 7708, 9703, 6803, - 6608, 9659, 6830, 9910, 4547, 7543, 3448, 195, 5601, - 3121, 1646, 624, 3739, 398, 3705, 6950, 4116, 2201, - 5815, 6622, 973, 6145, 9795, 8857, 8299, 7557, 7897, - 269, 7287, 4444, 7013, 9909, 1335, 5727, 542, 4598, - 7852, 92, 1610, 7627, 2156, 1900, 1688, 9186, 2572, - 224, 4786, 1211, 9995, 8276, 7178, 4362, 7972, 5004, - 8663, 1616, 9367, 9658, 4882, 8865, 1318, 187, 1685, - 5877, 8201, 9574, 32, 9588, 1800, 5659, 3491, 9467, - 6580, 8555, 3008, 1230, 5267, 501, 8991, 3154, 5367, - 2656, 8315, 1100, 3575, 6054, 2409, 2474, 1471, 3191, - 4445, 2112, 2894, 7911, 9524, 2624, 3539, 1257, 2492, - 4262, 6815, 1964, 8177, 169, 8119, 2882, 7843, 5953, - 878, 4802, 8642, 3349, 512, 4155, 9735, 3121, 950, - 6932, 5780, 9856, 1931, 1400, 1850, 2076, 9623, 7351, - 3509, 8123, 5943, 1447, 5721, 2683, 5005, 9711, 101, - 9367, 3394, 4719, 1979, 806, 9869, 768, 4364, 3396, - 8403, 9431, 4256, 9289, 76, 3077, 2878, 1712, 124, - 3730, 1906, 4466, 1650, 1301, 5779, 3996, 4687, 3816, - 512, 2262, 7799, 7891, 9050, 9500, 8470, 2869, 309, - 6882, 5126, 4765, 5030, 4479, 4382, 3906, 1799, 3780, - 4247, 5148, 5890, 7235, 7577, 3024, 2128, 674, 2569, - 8863, 8456, 5859, 9797, 248, 1593, 8305, 9315, 6565, - 9591, 8104, 4420, 461, 2813, 3880, 8370, 4001, 8798, - 9058, 1852, 146, 4964, 6879, 6088, 6473, 1174, 6756, - 8001, 6389, 1270, 6286, 9008, 6718, 8595, 3618, 4704, - 5906, 5310, 6765, 7872, 6545, 129, 6983, 2763, 2554, - 5333, 9755, 4687, 6079, 7357, 1012, 9375, 9223, 9523, - 8760, 5406, 5084, 2758, 6749, 4516, 5933, 8632, 8854, - 7523, 5233, 4293, 1627, 9769, 8134, 4773, 7537, 1219, - 5139, 3109, 5280, 7985, 7093, 8136, 9461, 8072, 2136, - 3202, 2675, 2129, 7596, 4277, 4132, 1480, 1216, 6052, - 3521, 1108, 1843, 114, 1849, 5800, 4653, 5041, 1182, - 4172, 5133, 477, 3651, 4005, 9269, 4638, 9006, 4857, - 3715, 6710, 1859, 1041, 6628, 1679, 5036, 3050, 2764, - 6929, 6851, 2533, 1289, 4800, 3879, 9174, 3515, 1678, - 9059, 6857, 5848, 8264, 8744, 6738, 2415, 4244, 9067, - 2862, 1613, 7663, 6779, 1381, 2392, 8685, 9289, 4184, - 2280, 1504, 3287, 7020, 8497, 3887, 7931, 469, 7800, - 7962, 566, 1330, 2209, 4356, 7380, 721, 7112, 2010, - 5226, 1707, 1220, 7140, 7438, 8069, 2556, 1860, 6656, - 6829, 7669, 8826, 9004, 9296, 4221, 739, 6315, 5684, - 5128, 2962, 588, 8230, 5387, 5795, 1909, 670, 7962, - 943, 428, 8211, 8950, 3960, 5148, 5930, 64, 618, - 2515, 8705, 3343, 9480, 8205, 5847, 706, 586, 3126, - 9408, 623, 538, 2577, 9282, 7387, 2181, 2354, 7225, - 4875, 885, 2566, 9382, 2630, 5819, 7718, 8692, 9847, - 4617, 6677, 6092, 9020, 7856, 3877, 4863, 9880, 5807, - 1413]), - values=tensor([5.8898e-01, 1.9785e-01, 6.3586e-01, 8.6646e-02, - 8.4703e-01, 8.9949e-01, 4.7578e-01, 3.2786e-01, - 8.4856e-01, 1.3220e-01, 4.9751e-01, 5.1156e-01, - 3.4802e-01, 7.9588e-02, 1.8863e-01, 5.3428e-01, - 2.8282e-01, 7.9120e-01, 8.7336e-01, 8.7475e-01, - 1.2938e-01, 1.9185e-01, 5.2270e-01, 9.1682e-01, - 1.6392e-01, 3.6995e-01, 8.4556e-01, 1.3267e-02, - 5.3756e-01, 6.3600e-01, 4.7649e-01, 5.4493e-01, - 2.7282e-01, 9.1126e-01, 6.9921e-01, 4.0012e-01, - 4.1687e-02, 5.5766e-02, 9.1626e-02, 7.0106e-02, - 9.3599e-01, 3.9164e-01, 5.7651e-01, 1.8004e-01, - 2.2728e-01, 5.6995e-01, 5.5047e-01, 1.3688e-02, - 4.8513e-01, 4.4803e-01, 2.2857e-01, 2.5550e-01, - 3.8222e-02, 9.3874e-01, 3.3957e-01, 1.7750e-01, - 2.5047e-01, 3.6607e-01, 4.7563e-01, 2.7928e-01, - 3.8084e-01, 6.4707e-01, 4.7085e-01, 8.0893e-01, - 2.2743e-01, 5.2161e-01, 8.4676e-01, 7.7562e-01, - 4.6980e-01, 9.0971e-01, 8.5451e-01, 2.4410e-01, - 7.0536e-02, 9.4950e-01, 1.6054e-02, 7.4894e-01, - 5.7160e-01, 5.1769e-01, 9.8477e-01, 9.9731e-01, - 8.9610e-01, 4.4542e-01, 2.1957e-01, 6.2373e-01, - 7.9382e-01, 8.2937e-01, 6.0337e-01, 6.7083e-01, - 1.0547e-01, 2.3128e-01, 2.7439e-01, 7.7884e-01, - 9.8521e-01, 5.4541e-01, 9.6292e-01, 3.9355e-01, - 3.6515e-01, 6.5120e-01, 1.2405e-02, 1.8165e-01, - 9.8960e-01, 7.9527e-02, 1.0601e-01, 8.2101e-01, - 5.8771e-02, 2.8802e-01, 5.3565e-01, 1.7563e-01, - 2.3199e-01, 2.0571e-01, 6.4893e-01, 7.2199e-01, - 3.3317e-01, 7.1268e-01, 1.9781e-01, 2.5079e-01, - 9.8791e-01, 1.3405e-01, 9.4955e-01, 2.9983e-01, - 5.6594e-01, 5.4895e-01, 3.2330e-01, 8.5597e-01, - 5.0450e-01, 8.6406e-01, 7.3031e-02, 3.9238e-01, - 1.0755e-02, 2.7806e-01, 4.6386e-01, 9.8927e-01, - 9.2034e-01, 8.8258e-01, 7.8995e-01, 3.4733e-01, - 4.2334e-01, 7.7828e-01, 1.9427e-01, 6.9287e-01, - 1.7783e-01, 1.4555e-02, 9.3092e-01, 8.2048e-01, - 9.4066e-01, 6.9144e-01, 5.4502e-01, 1.7581e-01, - 7.6536e-01, 6.5442e-01, 5.0250e-01, 9.1316e-01, - 8.8898e-01, 2.9152e-01, 3.2250e-01, 1.7168e-01, - 3.3001e-01, 1.3168e-01, 2.5197e-01, 3.1443e-01, - 8.6298e-01, 1.7979e-01, 1.1145e-01, 1.8429e-01, - 9.1810e-01, 5.3228e-01, 8.2721e-01, 4.0671e-01, - 7.2914e-01, 9.0041e-01, 1.7628e-01, 3.9028e-01, - 5.1899e-01, 8.4116e-01, 2.0125e-01, 9.5293e-01, - 5.7544e-02, 6.8202e-01, 5.4243e-01, 4.4692e-01, - 7.8965e-01, 7.6190e-01, 9.4762e-01, 8.9122e-01, - 2.4965e-02, 8.0804e-01, 7.2676e-01, 2.8219e-01, - 3.8119e-01, 3.8278e-01, 2.1577e-01, 1.9611e-01, - 5.3028e-01, 8.0132e-01, 5.1113e-01, 3.5340e-01, - 8.4920e-01, 5.9905e-01, 5.5000e-01, 9.1064e-01, - 9.9349e-01, 9.5796e-01, 9.3070e-02, 4.4622e-01, - 3.7794e-02, 6.1314e-02, 5.8795e-01, 9.6009e-01, - 1.7126e-01, 1.9649e-01, 4.7107e-01, 9.3471e-02, - 4.8184e-01, 7.2025e-01, 4.4528e-01, 5.0593e-01, - 2.6237e-01, 6.2130e-02, 1.2607e-01, 5.3003e-01, - 9.1400e-01, 3.7506e-01, 3.3344e-01, 5.6316e-01, - 2.7731e-01, 4.8451e-01, 5.4412e-02, 7.0750e-01, - 4.0044e-01, 4.0744e-01, 8.0663e-01, 3.8408e-01, - 8.8743e-01, 3.2130e-01, 4.1476e-01, 6.5939e-01, - 3.2461e-01, 7.3738e-01, 2.6924e-01, 7.9785e-01, - 7.6952e-01, 7.5999e-01, 3.2869e-01, 8.3331e-02, - 8.6669e-01, 6.0814e-01, 1.0331e-01, 1.4571e-01, - 3.6014e-01, 7.6453e-01, 6.4567e-01, 6.1038e-01, - 7.8420e-02, 3.1086e-01, 1.8874e-02, 7.1507e-01, - 3.2168e-01, 6.1324e-01, 2.1042e-01, 9.3044e-01, - 3.7680e-01, 7.3589e-01, 9.4311e-01, 5.2798e-01, - 3.4727e-01, 2.8965e-01, 1.0831e-01, 6.3955e-01, - 3.4589e-01, 7.8614e-01, 7.5346e-01, 8.9337e-01, - 2.9736e-01, 9.0881e-03, 2.3892e-01, 1.7932e-01, - 3.1706e-01, 4.3833e-01, 2.6880e-01, 7.0034e-01, - 7.8809e-01, 5.3610e-01, 5.8740e-01, 6.2145e-01, - 3.5527e-01, 7.5413e-01, 4.7065e-01, 7.6124e-01, - 5.8535e-02, 9.9575e-01, 3.0117e-01, 7.5173e-01, - 7.7395e-01, 4.7960e-01, 5.7820e-01, 5.6275e-02, - 7.3307e-01, 2.1509e-01, 8.2765e-01, 9.6562e-01, - 1.4450e-01, 7.7884e-01, 4.7076e-02, 9.3995e-01, - 9.7692e-01, 5.5090e-01, 5.9183e-01, 9.8172e-01, - 2.0021e-01, 7.0048e-01, 5.6574e-01, 6.0901e-01, - 6.6525e-01, 3.4233e-01, 8.5120e-01, 9.5768e-01, - 5.9485e-01, 2.8487e-01, 8.4151e-01, 5.7464e-01, - 3.7557e-01, 7.9613e-01, 2.2357e-01, 4.1104e-01, - 4.5075e-01, 8.2669e-01, 2.0418e-02, 5.2171e-01, - 2.5026e-01, 4.9965e-01, 9.7348e-01, 7.6496e-01, - 8.4108e-01, 2.1203e-01, 3.4009e-01, 3.5832e-01, - 9.5036e-01, 2.7537e-01, 8.6298e-01, 9.6349e-02, - 5.3931e-01, 4.6027e-02, 7.8789e-02, 2.4638e-01, - 3.4143e-01, 4.0269e-02, 1.4661e-01, 1.5786e-01, - 7.2679e-02, 7.9762e-02, 5.5604e-01, 1.5873e-01, - 6.1787e-01, 7.5778e-01, 3.5443e-01, 4.7033e-01, - 9.8668e-01, 3.8234e-01, 8.5690e-01, 6.4333e-01, - 7.9683e-01, 3.7979e-01, 1.4283e-01, 8.7626e-01, - 5.5154e-01, 6.5234e-01, 3.8837e-01, 9.5886e-01, - 1.4921e-01, 2.7649e-01, 9.9725e-01, 8.4708e-01, - 1.5102e-02, 4.1971e-01, 1.8317e-01, 4.7173e-01, - 9.4276e-01, 2.3664e-01, 5.4025e-01, 4.7318e-03, - 2.3773e-01, 5.3755e-01, 5.7743e-01, 8.2008e-01, - 4.9066e-01, 1.4077e-01, 2.8123e-01, 7.0610e-01, - 6.5906e-01, 1.4561e-01, 8.8094e-01, 8.3870e-01, - 8.2497e-01, 9.7921e-01, 4.2754e-01, 4.5122e-01, - 1.6323e-02, 8.9996e-02, 7.7245e-01, 2.4139e-01, - 7.7527e-01, 7.3230e-01, 3.2673e-01, 8.6534e-01, - 7.4316e-01, 5.1711e-01, 9.4095e-01, 8.5322e-01, - 5.1902e-01, 8.0029e-01, 3.2807e-01, 2.3632e-01, - 8.5332e-01, 4.7775e-01, 4.3627e-01, 2.1487e-01, - 8.4741e-01, 2.4588e-01, 7.9839e-01, 6.1017e-01, - 4.6890e-01, 6.7926e-01, 9.3577e-02, 4.7045e-01, - 9.7929e-01, 8.7900e-01, 5.0589e-01, 2.8158e-01, - 2.8676e-01, 2.5849e-01, 9.1505e-01, 6.8113e-01, - 9.8369e-01, 2.9625e-01, 9.7512e-01, 1.9835e-01, - 4.5588e-01, 5.2834e-01, 6.8604e-01, 3.9369e-03, - 2.1379e-02, 9.9466e-01, 7.8743e-01, 4.1121e-01, - 7.5760e-01, 2.5516e-01, 6.2149e-01, 4.8317e-01, - 6.7851e-01, 5.7955e-01, 9.3347e-01, 1.5762e-02, - 7.2533e-01, 4.8608e-01, 3.1121e-01, 6.4352e-01, - 4.1943e-02, 1.1054e-01, 4.8507e-01, 8.1158e-01, - 5.3857e-01, 2.2003e-01, 2.0791e-01, 3.7889e-01, - 1.7014e-01, 1.5822e-01, 1.2659e-01, 1.4356e-01, - 1.6536e-01, 1.4837e-01, 8.7364e-01, 3.4934e-01, - 3.8285e-01, 3.1356e-01, 6.4007e-01, 7.8815e-02, - 1.8235e-02, 3.5947e-01, 9.8116e-01, 3.3127e-01, - 9.1514e-01, 9.3039e-01, 7.9649e-01, 6.4455e-01, - 2.0122e-01, 2.5687e-01, 3.0865e-02, 1.8534e-01, - 4.1554e-01, 4.7758e-01, 8.1393e-01, 8.3072e-01, - 5.3264e-02, 3.7799e-02, 9.4645e-01, 9.1066e-01, - 8.4615e-01, 1.5824e-01, 6.9896e-01, 5.4031e-01, - 3.6360e-01, 8.8505e-01, 8.5896e-01, 2.3785e-01, - 1.2940e-02, 4.2546e-01, 7.1123e-01, 4.3094e-01, - 6.3707e-01, 4.3457e-04, 1.8388e-01, 3.8438e-01, - 1.7941e-01, 4.0588e-01, 9.3242e-01, 5.7778e-01, - 7.4646e-02, 1.5664e-01, 9.3614e-01, 7.3980e-01, - 3.2986e-01, 8.0129e-01, 8.7607e-01, 8.8861e-01, - 2.3201e-01, 7.5519e-01, 7.0829e-01, 9.1082e-01, - 6.3738e-01, 1.0170e-01, 2.5377e-02, 2.2690e-01, - 4.8110e-02, 6.5433e-01, 9.9909e-01, 4.7314e-01, - 7.9913e-01, 5.8682e-01, 5.0473e-01, 9.3889e-01, - 6.1872e-01, 2.6769e-01, 3.7750e-01, 8.3591e-01, - 3.3321e-01, 2.8068e-01, 9.2491e-01, 6.9194e-01, - 2.9549e-02, 1.9014e-01, 8.7788e-02, 6.7485e-01, - 5.9256e-01, 5.5193e-01, 1.8998e-01, 4.2886e-01, - 6.2134e-01, 6.9408e-01, 8.8914e-01, 8.0789e-01, - 3.6943e-01, 1.9904e-01, 8.9485e-03, 7.6193e-01, - 7.7062e-01, 9.5182e-01, 3.1749e-01, 5.7876e-01, - 3.0682e-02, 9.8304e-01, 8.0679e-01, 9.0657e-01, - 4.9901e-01, 6.3178e-01, 2.1080e-01, 6.6723e-01, - 5.7130e-01, 6.1873e-01, 8.6701e-01, 5.0743e-01, - 1.3738e-01, 8.4637e-01, 5.4882e-01, 9.6798e-01, - 1.4240e-01, 2.2805e-01, 6.2367e-01, 3.7799e-01, - 2.0326e-01, 1.3702e-01, 3.0310e-01, 4.8136e-01, - 3.1046e-02, 9.3411e-01, 6.8956e-02, 9.1324e-01, - 9.2449e-01, 3.9031e-01, 6.0527e-01, 6.2505e-01, - 7.0131e-03, 3.2616e-01, 5.9364e-01, 3.0465e-02, - 9.1118e-01, 9.9648e-01, 4.9248e-01, 9.7122e-02, - 5.8074e-01, 1.1979e-03, 7.3673e-01, 1.5177e-01, - 6.9761e-01, 9.1324e-01, 9.4354e-01, 4.9393e-01, - 8.3755e-01, 1.3216e-01, 9.2559e-01, 2.6868e-01, - 2.2019e-01, 9.8861e-01, 9.2722e-01, 9.5771e-01, - 6.3732e-01, 8.9835e-01, 8.9185e-01, 7.3086e-01, - 8.3756e-01, 8.6144e-01, 4.6742e-02, 3.8567e-01, - 9.5237e-01, 8.8451e-01, 7.0945e-01, 5.2850e-01, - 4.9557e-01, 7.8318e-01, 7.5254e-02, 6.7505e-01, - 9.5086e-01, 5.4112e-01, 7.4362e-01, 2.9076e-01, - 8.1730e-01, 3.8360e-01, 5.4883e-01, 2.2861e-01, - 9.4098e-03, 2.5906e-02, 5.6712e-01, 1.4304e-01, - 5.7518e-01, 2.4600e-01, 2.0790e-02, 6.4737e-01, - 9.8812e-01, 6.3177e-01, 3.3835e-02, 9.1150e-01, - 1.3355e-01, 5.6474e-01, 5.7587e-02, 6.2624e-01, - 7.8943e-01, 4.2296e-01, 8.5349e-01, 9.9756e-01, - 9.3998e-01, 8.0688e-01, 7.3633e-01, 6.4328e-01, - 4.4941e-01, 3.7755e-01, 7.0109e-01, 2.7185e-02, - 5.2233e-01, 4.2148e-01, 7.9177e-01, 9.8611e-01, - 3.4676e-01, 7.5373e-01, 3.7620e-01, 9.3792e-01, - 3.7963e-01, 1.5518e-01, 7.3247e-01, 2.7909e-01, - 3.5447e-01, 6.1500e-02, 5.4908e-02, 6.3810e-01, - 7.7339e-01, 2.6705e-02, 4.5089e-01, 4.5241e-01, - 5.5235e-01, 1.5614e-01, 7.7894e-02, 9.5874e-01, - 3.7168e-02, 2.7312e-01, 4.3120e-01, 3.2874e-01, - 5.7142e-01, 3.6130e-01, 9.2906e-01, 9.1572e-01, - 8.0265e-01, 7.5849e-01, 5.3804e-02, 6.7831e-01, - 2.4853e-01, 3.7848e-01, 1.8418e-02, 4.5608e-01, - 5.2397e-01, 7.4467e-01, 9.2880e-02, 4.6452e-01, - 5.8540e-01, 6.5798e-01, 8.8912e-01, 3.9890e-02, - 5.2457e-01, 7.8873e-01, 3.1919e-01, 1.1331e-02, - 5.8267e-01, 9.0865e-01, 4.0789e-01, 4.3879e-01, - 5.4865e-01, 4.5825e-02, 2.8572e-02, 1.1844e-01, - 4.4665e-01, 7.2445e-01, 8.9276e-01, 7.1536e-01, - 4.4658e-01, 3.0506e-01, 1.0345e-01, 5.2382e-01, - 9.8328e-01, 2.9920e-01, 1.7323e-01, 9.0400e-01, - 6.0347e-01, 4.7720e-01, 3.3228e-01, 6.1681e-02, - 5.2140e-01, 1.9829e-01, 6.5558e-02, 8.5760e-01, - 1.4001e-01, 7.0359e-01, 2.1593e-01, 2.4194e-01, - 7.0925e-01, 7.5696e-01, 9.2386e-01, 6.0428e-01, - 6.2685e-01, 4.9731e-01, 8.4687e-01, 7.6241e-01, - 1.5124e-01, 4.3670e-01, 1.8251e-02, 2.4860e-01, - 4.2732e-01, 1.4839e-01, 4.0940e-01, 1.2281e-01, - 9.9468e-01, 2.3994e-01, 1.9607e-01, 2.2855e-01, - 7.4350e-01, 3.6673e-01, 5.7596e-01, 8.4352e-02, - 7.4710e-01, 4.8573e-01, 3.2807e-01, 5.4515e-01, - 7.7087e-02, 1.8623e-01, 6.8462e-01, 2.4702e-01, - 7.9728e-01, 2.9985e-01, 9.8415e-01, 9.4390e-01, - 9.6890e-01, 6.6238e-01, 9.4039e-01, 6.3058e-01, - 3.6916e-01, 7.6364e-01, 8.7170e-01, 8.0689e-01, - 2.7583e-01, 5.8783e-01, 8.5485e-01, 9.1759e-02, - 1.5796e-01, 7.8565e-01, 6.0729e-02, 4.6819e-01, - 9.4606e-02, 8.3125e-01, 9.1526e-01, 4.5103e-03, - 9.8069e-01, 2.5511e-01, 8.0716e-02, 4.3126e-01, - 3.4246e-02, 8.2760e-01, 3.0626e-01, 8.9436e-01, - 4.0895e-03, 6.6548e-01, 2.0671e-04, 6.0869e-01, - 5.0895e-01, 4.8369e-02, 1.6133e-01, 4.3844e-01, - 9.0563e-01, 5.6368e-01, 1.0912e-01, 5.9556e-01, - 2.2326e-01, 1.6742e-01, 7.5251e-01, 7.0751e-01, - 3.5590e-01, 6.2298e-01, 1.5239e-01, 9.7441e-01, - 8.9988e-01, 6.2619e-01, 8.5924e-01, 3.2392e-01, - 2.8971e-01, 6.8691e-01, 9.4770e-01, 3.3294e-01, - 6.4690e-01, 7.2508e-01, 6.3154e-01, 8.0059e-02, - 4.8946e-01, 7.8938e-01, 5.4911e-01, 4.3188e-01, - 6.8351e-01, 6.2535e-01, 9.4981e-01, 3.9289e-01, - 4.0848e-01, 1.7000e-01, 7.2092e-01, 1.2379e-01, - 3.1251e-01, 1.2660e-01, 9.4156e-01, 4.5765e-01, - 9.3671e-01, 6.6153e-01, 6.1442e-01, 5.7858e-01, - 1.9139e-01, 1.0934e-01, 7.4005e-01, 8.0790e-02, - 9.0105e-01, 6.4955e-01, 9.7739e-01, 7.9616e-01, - 1.3297e-01, 2.0742e-01, 9.6556e-01, 1.3455e-01, - 2.5186e-01, 2.1543e-01, 3.4826e-02, 7.6440e-01, - 2.2376e-01, 8.6586e-01, 4.7182e-01, 4.3325e-01, - 4.1675e-01, 8.3446e-01, 4.9581e-02, 6.7913e-01, - 3.0389e-02, 2.4170e-01, 8.3960e-01, 5.1508e-01, - 3.4965e-01, 9.7804e-01, 7.1034e-01, 1.7936e-02, - 5.5724e-01, 4.0039e-01, 9.6068e-01, 1.8722e-01, - 8.1980e-01, 5.2903e-01, 4.0793e-01, 2.2700e-01, - 7.0366e-01, 4.0431e-01, 6.8702e-02, 6.1410e-02, - 7.9224e-01, 6.0851e-02, 6.2886e-01, 2.3274e-01, - 1.4516e-01, 7.4570e-01, 6.6696e-01, 8.0239e-01, - 5.5099e-02, 2.6725e-01, 9.9516e-01, 1.6306e-01, - 2.6052e-01, 1.8739e-01, 5.1894e-01, 6.9062e-01, - 7.1895e-02, 7.6126e-01, 5.9960e-01, 1.0987e-01, - 6.1792e-01, 2.0756e-01, 4.6885e-01, 4.6274e-01, - 8.8747e-01, 9.5345e-01, 7.0894e-01, 5.9417e-01, - 3.9523e-02, 5.7206e-01, 3.2277e-01, 3.5319e-01, - 5.4237e-01, 9.8440e-01, 3.3902e-01, 8.1761e-01, - 9.4886e-02, 1.4636e-01, 7.9422e-02, 6.0671e-01, - 6.8205e-01, 1.0147e-01, 7.4110e-01, 4.9735e-01, - 7.2855e-01, 6.1982e-01, 5.0316e-02, 9.4204e-01, - 4.7305e-01, 8.0307e-02, 7.5121e-01, 9.2374e-02, - 3.4992e-01, 6.9429e-01, 1.6789e-01, 3.6168e-01, - 7.3613e-01, 2.2608e-01, 8.5376e-01, 6.5522e-01, - 3.6983e-01, 3.2533e-01, 7.0235e-01, 2.8870e-01, - 1.8154e-01, 4.7093e-02, 3.8686e-03, 3.4319e-01, - 7.2570e-01, 2.8863e-01, 9.0271e-01, 8.9351e-01, - 6.9524e-01, 2.5214e-01, 9.5820e-01, 3.7436e-01, - 4.2317e-01, 1.4961e-01, 4.3533e-01, 9.4417e-01]), - size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.2136, 0.3814, 0.1034, ..., 0.1098, 0.3191, 0.2700]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 1000 -Density: 1e-05 -Time: 3.5914108753204346 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '363782', '-ss', '10000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.585279941558838} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '243926', '-ss', '10000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 7.084842920303345} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([7943, 4488, 6154, 9391, 155, 1146, 4006, 8134, 8781, - 7101, 3276, 7191, 9320, 2859, 4578, 2750, 6596, 4201, - 2541, 6640, 9961, 1286, 5879, 9740, 3536, 2151, 9582, - 2021, 7827, 8693, 2313, 555, 3377, 7363, 334, 2888, - 9782, 3162, 5677, 4519, 3889, 4828, 247, 2616, 279, - 8565, 2538, 9525, 8485, 2616, 1166, 2089, 7055, 6468, - 9499, 1310, 5525, 2540, 8419, 935, 4661, 2785, 1947, - 1602, 2918, 4726, 3718, 3716, 5417, 2404, 2572, 1793, - 4269, 7015, 419, 4336, 5223, 1709, 8875, 645, 5198, - 3752, 5677, 5777, 9470, 6191, 7729, 3008, 6984, 7165, - 5063, 8482, 7789, 9298, 6624, 3445, 4654, 5489, 7051, - 2026, 5766, 3319, 8576, 4863, 735, 6400, 8243, 4596, - 9136, 5453, 8094, 6731, 4592, 6080, 2446, 2152, 9189, - 7168, 5575, 8736, 8708, 188, 2747, 5830, 9269, 8804, - 3159, 3201, 4041, 923, 2727, 2290, 84, 4623, 7603, - 3330, 9268, 1450, 9643, 7491, 1862, 6217, 7622, 6831, - 6030, 2019, 4765, 7685, 3438, 4110, 698, 7237, 1712, - 233, 3041, 9992, 7124, 3242, 6158, 3269, 3219, 7971, - 3346, 2455, 5227, 7583, 1444, 6727, 5034, 2207, 2810, - 6860, 3956, 406, 9630, 4569, 2333, 3303, 790, 9166, - 3001, 9674, 4130, 4005, 9208, 9872, 4115, 1140, 4295, - 1232, 2077, 9451, 8659, 2998, 5331, 5112, 5332, 473, - 2236, 9565, 7574, 3329, 2231, 9436, 1794, 1973, 926, - 9017, 624, 3226, 1901, 9136, 3272, 7061, 4208, 8821, - 7803, 6260, 839, 8051, 1195, 3556, 5978, 2134, 4703, - 5878, 6569, 631, 929, 936, 8619, 2484, 8239, 5928, - 7092, 2988, 7292, 9843, 5609, 7908, 9735, 1868, 8029, - 6880, 8241, 655, 7771, 7115, 4230, 2177, 5561, 519, - 9981, 4448, 1648, 5631, 1203, 3106, 9586, 8781, 334, - 6071, 676, 3, 2908, 206, 7480, 7816, 8182, 3140, - 5100, 6638, 2660, 7295, 3954, 4994, 5905, 5343, 875, - 1395, 1415, 8075, 1833, 8285, 3241, 5726, 82, 7838, - 5658, 9442, 7175, 6495, 1263, 5251, 545, 2259, 5163, - 5212, 9905, 1946, 7828, 3306, 3170, 1189, 5060, 6858, - 3265, 9161, 6049, 6284, 2600, 5696, 1666, 8160, 6654, - 4, 195, 1359, 4960, 8521, 7181, 8865, 7801, 6138, - 5232, 8342, 5270, 6210, 3360, 8441, 1762, 5683, 8982, - 5151, 6181, 2277, 3989, 2354, 7040, 690, 3818, 4654, - 8998, 6989, 7424, 1876, 2235, 6291, 65, 3846, 5433, - 653, 7328, 3161, 7270, 6440, 3854, 9530, 6576, 4282, - 3869, 6253, 5868, 5179, 2108, 4988, 6966, 2124, 1163, - 395, 7769, 5649, 9625, 8844, 7379, 7826, 6877, 1758, - 1205, 3960, 4313, 5313, 7450, 1611, 5494, 6504, 1330, - 5009, 8603, 3204, 8610, 7225, 9811, 5033, 8244, 8927, - 189, 6660, 2530, 5087, 7128, 633, 3653, 9666, 5288, - 7838, 705, 8757, 5051, 3379, 8676, 8102, 6270, 3427, - 6941, 8350, 2853, 3036, 8139, 4125, 9896, 6792, 341, - 7714, 2950, 6057, 9295, 6404, 6582, 9441, 7456, 9260, - 3870, 7429, 9215, 1715, 4911, 3116, 9023, 6965, 8146, - 8742, 1823, 1052, 3129, 7548, 9310, 5087, 6452, 8523, - 4919, 3317, 6285, 4958, 687, 1721, 2711, 1456, 1993, - 7026, 6960, 2734, 8274, 8550, 6512, 5209, 3870, 1135, - 3771, 8488, 5407, 5874, 5804, 1777, 3653, 2911, 182, - 3402, 9157, 8305, 1357, 7109, 8892, 7404, 6887, 1640, - 7537, 1505, 6301, 101, 7157, 9753, 9025, 8408, 1812, - 7187, 7261, 160, 6522, 7459, 3342, 9803, 9988, 543, - 3555, 999, 2319, 3754, 8264, 3371, 3736, 5469, 9452, - 7184, 2203, 820, 9411, 4278, 4780, 9218, 1947, 3157, - 2226, 4652, 8669, 5426, 7683, 880, 435, 2033, 9292, - 2455, 4770, 2217, 699, 7572, 3934, 5467, 6026, 7062, - 2922, 2864, 4813, 8049, 9857, 6142, 778, 3565, 3686, - 9116, 2737, 9009, 4447, 2474, 971, 265, 2434, 9165, - 1954, 5853, 4055, 4843, 127, 3125, 509, 2432, 6008, - 5150, 4816, 6842, 9257, 576, 5248, 3779, 6206, 9344, - 9008, 721, 1354, 447, 2489, 5014, 7789, 6532, 232, - 5436, 9948, 4343, 3831, 8334, 5048, 6427, 6482, 9779, - 6169, 9153, 4649, 9096, 6309, 9662, 269, 4722, 5201, - 401, 5744, 3228, 8446, 3133, 3731, 7468, 4532, 1166, - 5876, 2482, 2630, 5613, 1887, 4482, 2576, 2771, 7606, - 4597, 89, 5559, 9787, 1481, 5290, 3655, 322, 166, - 8009, 1493, 7027, 2850, 2006, 4514, 2168, 2174, 9048, - 1786, 4139, 406, 6480, 917, 9722, 7578, 9741, 4917, - 2992, 1489, 1747, 4978, 7936, 2582, 5507, 1532, 5524, - 7826, 3449, 10, 9159, 7860, 3540, 198, 862, 5753, - 3470, 8359, 7581, 3144, 3079, 6434, 7746, 2607, 9147, - 7397, 201, 8081, 4846, 4284, 699, 9972, 9537, 8613, - 9875, 2782, 4871, 244, 9191, 1131, 2811, 1247, 3144, - 8178, 3501, 9452, 4821, 5343, 3, 5004, 5736, 6936, - 7862, 3290, 3383, 5283, 3840, 9625, 5012, 4121, 8759, - 8619, 3041, 5033, 1753, 6166, 6909, 1428, 1761, 1897, - 5070, 6012, 1241, 5625, 8016, 7656, 2936, 7895, 6288, - 9171, 2385, 2390, 2228, 8525, 8334, 9316, 4166, 6851, - 6776, 8997, 5468, 5982, 840, 3981, 4943, 5782, 8333, - 8121, 5911, 8729, 9039, 537, 2752, 6485, 3047, 283, - 5307, 7386, 1100, 7487, 5842, 6557, 5254, 4333, 415, - 9591, 1641, 2548, 8771, 2243, 6011, 7901, 3548, 8089, - 8958, 3344, 8858, 702, 9886, 5874, 7708, 7200, 4618, - 4778, 5665, 4614, 8010, 7456, 3047, 9039, 2044, 9926, - 4002, 1862, 2650, 9698, 4716, 2451, 6247, 8897, 4989, - 8623, 9464, 5599, 7310, 740, 1061, 6357, 3766, 5965, - 4638, 6114, 5529, 2164, 1494, 5729, 4627, 3348, 8109, - 5015, 7896, 3749, 213, 7180, 2156, 7644, 3374, 8920, - 2280, 4385, 5660, 4940, 7376, 6062, 7689, 6701, 3982, - 6731, 4335, 5363, 6281, 7290, 491, 3312, 4385, 9124, - 2888, 314, 2496, 8388, 360, 7969, 9150, 4336, 3714, - 7446, 7899, 6546, 956, 3288, 4036, 5808, 9141, 1394, - 7843, 7906, 1832, 9783, 5676, 1090, 40, 4659, 3502, - 84, 6161, 2144, 5947, 2349, 4766, 6358, 2005, 8455, - 7173, 7646, 4290, 8339, 7908, 943, 4131, 6672, 9870, - 4086, 6590, 205, 6057, 8733, 8600, 2315, 9854, 949, - 6942, 3555, 4782, 9913, 2335, 3741, 105, 9676, 829, - 4513, 5377, 7160, 7815, 5725, 4449, 7984, 7022, 9075, - 2973, 7234, 3301, 4106, 1057, 2796, 5400, 9826, 9608, - 1399, 3273, 7886, 5410, 8781, 1618, 8921, 750, 6541, - 313, 693, 2706, 9062, 82, 1001, 7170, 1044, 8082, - 5581, 7140, 949, 2283, 1698, 7268, 3598, 2618, 7101, - 917, 6363, 1607, 9830, 8474, 3927, 7297, 5514, 3427, - 8343, 6039, 3891, 1347, 5609, 7, 7597, 7807, 2355, - 5270, 382, 5935, 7213, 1197, 9647, 3059, 1828, 1291, - 5826, 9873, 5640, 2784, 8784, 1051, 644, 8752, 810, - 4403, 4357, 4219, 1880, 6353, 2009, 6795, 232, 4527, - 4898]), - values=tensor([1.4008e-01, 7.3938e-01, 7.6639e-01, 5.0164e-01, - 1.7486e-01, 3.2598e-01, 5.6159e-01, 5.5178e-01, - 5.7399e-01, 3.9297e-01, 4.2201e-01, 2.1666e-01, - 8.8345e-04, 9.6154e-01, 8.3073e-01, 6.5757e-01, - 1.6074e-01, 7.8577e-01, 9.7772e-01, 1.0075e-01, - 2.5921e-01, 7.4860e-01, 1.8867e-01, 8.9320e-01, - 8.3174e-01, 6.0226e-01, 1.7683e-01, 3.6553e-01, - 5.7124e-01, 2.0441e-01, 6.4951e-01, 7.0821e-01, - 3.7566e-01, 8.9297e-01, 9.1514e-01, 6.5969e-01, - 2.0172e-01, 6.7599e-01, 2.1470e-01, 9.4784e-01, - 1.3850e-01, 4.2041e-01, 8.9476e-01, 5.7393e-01, - 8.5383e-01, 2.3294e-01, 9.3112e-01, 1.4895e-02, - 7.9940e-01, 1.4394e-01, 7.5610e-01, 7.5678e-01, - 7.1119e-01, 7.0345e-02, 9.2463e-01, 5.5449e-01, - 2.2250e-01, 7.0462e-02, 1.3814e-01, 7.0456e-01, - 6.8619e-01, 5.0257e-01, 1.3518e-01, 8.3724e-01, - 3.2626e-01, 4.5308e-01, 6.4764e-01, 5.6043e-01, - 1.1045e-01, 7.2989e-01, 3.4674e-01, 6.6224e-02, - 3.6046e-01, 1.9957e-01, 8.2177e-01, 5.4828e-02, - 5.7991e-02, 6.8134e-01, 1.6562e-01, 7.6522e-02, - 2.3683e-01, 7.7761e-01, 7.1370e-01, 4.4629e-01, - 8.3816e-01, 5.1936e-01, 5.6090e-01, 4.0387e-01, - 6.4428e-01, 7.7701e-01, 6.7169e-01, 2.6102e-01, - 7.5529e-03, 5.9419e-01, 7.6740e-01, 3.7976e-01, - 4.2995e-01, 9.5223e-01, 3.4165e-01, 4.6248e-01, - 2.2243e-01, 4.5279e-01, 4.3565e-01, 6.6677e-01, - 6.2478e-01, 4.4391e-01, 7.1044e-01, 5.8594e-01, - 1.8528e-01, 3.9540e-01, 7.3149e-01, 8.0810e-01, - 4.1755e-01, 3.1698e-01, 1.5593e-01, 4.5307e-02, - 1.7898e-01, 1.2264e-01, 5.7366e-01, 8.9976e-02, - 9.5322e-01, 6.5297e-01, 1.6100e-01, 3.2706e-01, - 1.5637e-01, 2.2980e-01, 7.5639e-01, 3.9695e-01, - 4.5194e-01, 8.5862e-01, 4.7966e-01, 1.6712e-01, - 2.8354e-01, 8.5715e-01, 8.2491e-01, 9.3895e-01, - 6.2207e-01, 7.4772e-01, 6.8222e-01, 7.8302e-01, - 3.6293e-01, 9.3747e-01, 4.3535e-01, 3.2649e-01, - 1.2576e-01, 6.7395e-01, 3.8755e-01, 7.3431e-03, - 5.8672e-01, 1.7514e-01, 6.2576e-02, 4.2600e-01, - 8.4353e-01, 4.4830e-01, 4.2652e-01, 3.4003e-01, - 5.6530e-01, 5.0825e-01, 6.3458e-01, 6.3771e-01, - 5.3740e-01, 6.0430e-01, 5.2080e-01, 8.9504e-01, - 8.5600e-01, 9.5176e-01, 1.4018e-02, 8.8048e-01, - 8.5488e-01, 3.8407e-01, 7.3008e-01, 8.9626e-01, - 9.8292e-01, 3.1930e-01, 2.7730e-01, 6.6716e-02, - 1.1928e-01, 3.4153e-01, 4.5995e-02, 6.9950e-01, - 6.8140e-01, 7.1533e-01, 7.6891e-01, 1.3632e-01, - 8.5117e-01, 1.3578e-01, 2.1992e-01, 5.9821e-01, - 6.4608e-01, 4.2725e-01, 2.6707e-01, 5.4533e-01, - 5.1069e-01, 4.1512e-01, 7.8236e-01, 6.3547e-01, - 3.3141e-01, 8.3363e-01, 3.3275e-01, 9.7171e-01, - 3.5198e-01, 3.2371e-01, 5.9398e-01, 4.4829e-01, - 1.6692e-01, 1.2956e-01, 4.9761e-01, 1.4321e-01, - 5.6927e-01, 3.3084e-01, 8.5313e-01, 1.4241e-02, - 1.6810e-01, 6.1849e-01, 5.3128e-01, 3.1383e-01, - 8.0268e-01, 8.3374e-01, 5.7558e-01, 9.4233e-01, - 8.0228e-02, 3.9858e-01, 2.0918e-01, 9.6143e-01, - 1.9960e-01, 3.6105e-01, 7.1222e-01, 2.0743e-01, - 8.8267e-01, 8.0173e-01, 7.1956e-01, 7.6413e-01, - 1.1618e-01, 5.6266e-01, 7.1777e-01, 2.1787e-01, - 3.1249e-01, 1.1366e-01, 7.2133e-01, 5.0897e-01, - 7.7747e-01, 5.1124e-01, 2.3964e-02, 1.7601e-01, - 5.2421e-01, 4.7343e-01, 4.1193e-01, 5.0398e-02, - 9.4604e-01, 4.4435e-01, 8.4770e-01, 8.0073e-01, - 8.2843e-01, 8.4026e-01, 9.5200e-03, 5.5657e-01, - 1.1322e-01, 9.5564e-01, 2.7161e-01, 4.7624e-01, - 5.9371e-01, 5.1445e-01, 9.1385e-01, 7.6815e-01, - 7.7340e-01, 2.4805e-01, 3.4765e-01, 6.3520e-01, - 4.1396e-01, 6.5529e-01, 6.4127e-01, 9.8354e-01, - 9.7802e-01, 9.5844e-02, 2.3629e-01, 2.5244e-02, - 5.8430e-01, 7.2587e-01, 3.9399e-01, 6.5657e-01, - 2.2602e-01, 1.5947e-01, 1.2436e-01, 2.3184e-02, - 4.0334e-01, 3.3395e-01, 7.9580e-01, 9.0331e-01, - 1.4314e-01, 3.6350e-01, 9.8742e-01, 3.9153e-01, - 5.7572e-01, 8.9712e-01, 6.8108e-02, 3.4880e-01, - 6.9105e-02, 9.5150e-01, 1.5018e-01, 7.4890e-01, - 9.4472e-01, 1.9367e-02, 3.8256e-02, 7.8180e-02, - 3.9791e-01, 6.8360e-01, 2.8489e-01, 6.6300e-01, - 8.7842e-02, 9.8383e-01, 9.0831e-01, 9.7155e-02, - 1.2693e-01, 8.7860e-01, 2.6256e-02, 5.6248e-01, - 6.7185e-02, 8.0618e-01, 5.5146e-01, 2.3212e-01, - 6.5006e-01, 5.1569e-01, 7.5629e-01, 5.0736e-01, - 8.6644e-01, 3.2805e-01, 2.3392e-01, 6.9532e-01, - 6.4069e-02, 8.0766e-01, 4.8519e-02, 9.4545e-01, - 9.2322e-01, 9.4059e-01, 8.8406e-01, 1.4705e-01, - 2.0696e-01, 6.4209e-01, 1.7211e-01, 7.7366e-01, - 8.8312e-01, 4.1724e-01, 5.9560e-01, 1.0047e-02, - 8.0776e-01, 8.5045e-01, 2.7704e-02, 2.9548e-01, - 8.7383e-01, 1.2672e-01, 6.2008e-01, 7.8048e-01, - 5.4722e-01, 3.7268e-02, 7.6316e-01, 5.4004e-01, - 8.2708e-01, 7.7993e-01, 5.1032e-01, 5.7951e-01, - 6.7007e-01, 6.9343e-01, 9.1925e-01, 4.3266e-01, - 8.2137e-01, 9.5167e-01, 1.3972e-01, 5.4549e-01, - 9.6649e-01, 4.2568e-01, 3.9725e-01, 3.5461e-01, - 7.7180e-01, 7.2418e-01, 7.8794e-01, 8.7493e-01, - 6.5834e-01, 9.4314e-01, 7.9979e-01, 6.5380e-01, - 3.7019e-01, 8.1362e-01, 6.5310e-01, 5.5138e-01, - 4.2526e-02, 9.5295e-01, 6.3106e-01, 6.6372e-01, - 6.5914e-01, 2.1948e-01, 6.8732e-01, 6.8666e-01, - 2.3578e-01, 6.7994e-01, 9.0510e-01, 8.3924e-01, - 4.9513e-01, 2.9955e-01, 1.8215e-02, 9.1885e-01, - 1.7408e-01, 1.8051e-01, 2.2848e-01, 6.6964e-02, - 3.9377e-01, 4.2686e-02, 1.3510e-01, 5.1147e-01, - 1.7646e-01, 6.7097e-01, 1.5327e-01, 8.9006e-01, - 2.5804e-01, 6.4589e-01, 4.3026e-01, 8.9192e-01, - 5.0256e-01, 5.1282e-01, 4.1308e-01, 2.4295e-01, - 3.1734e-02, 3.6877e-02, 2.2299e-02, 2.9965e-01, - 2.9078e-01, 3.8183e-01, 5.2222e-01, 5.7421e-01, - 1.1717e-01, 6.3181e-01, 5.8114e-01, 7.2116e-01, - 4.4703e-01, 4.2801e-01, 9.0478e-01, 3.9503e-01, - 1.8289e-01, 9.8264e-01, 7.7652e-01, 8.1865e-01, - 4.5727e-01, 3.9744e-01, 7.5562e-01, 1.2155e-01, - 9.7382e-02, 2.0197e-01, 4.8545e-01, 9.4925e-01, - 2.6545e-01, 3.5580e-01, 3.0269e-01, 1.1651e-02, - 4.1913e-01, 8.4954e-01, 1.1003e-01, 7.3324e-01, - 3.0650e-02, 8.6838e-01, 4.9323e-01, 8.1522e-01, - 3.4304e-01, 3.0905e-01, 3.9779e-01, 5.7834e-01, - 5.1821e-01, 5.0984e-01, 7.4684e-01, 5.0277e-01, - 5.7314e-01, 2.2013e-02, 1.2640e-01, 1.2089e-01, - 9.0576e-01, 5.1526e-01, 8.2341e-01, 1.4643e-02, - 9.2647e-01, 1.3228e-01, 6.2683e-01, 7.8975e-01, - 4.4056e-01, 5.0657e-01, 4.9976e-01, 2.1161e-01, - 5.5244e-01, 3.4723e-01, 2.1648e-01, 5.6745e-01, - 2.5556e-01, 8.8321e-03, 1.0900e-02, 2.1680e-01, - 8.8063e-01, 9.1328e-01, 3.2536e-02, 4.9694e-01, - 2.7345e-01, 7.4327e-01, 7.2809e-01, 6.6353e-01, - 5.3136e-01, 7.5753e-01, 4.8998e-01, 7.1511e-01, - 7.0253e-01, 6.4707e-01, 3.7836e-02, 7.7134e-01, - 1.5308e-02, 2.6444e-01, 4.6545e-01, 1.3966e-01, - 9.6812e-01, 3.7718e-01, 9.6591e-01, 4.2873e-01, - 7.8220e-01, 3.8017e-01, 3.5602e-01, 4.0939e-01, - 7.6208e-01, 1.8022e-01, 5.5876e-02, 6.7166e-01, - 3.8980e-01, 7.0437e-01, 9.9344e-01, 5.9567e-01, - 6.1296e-01, 8.9698e-01, 9.1062e-01, 9.2650e-01, - 9.1190e-01, 8.9253e-01, 4.9180e-01, 7.4096e-01, - 2.4088e-01, 1.0479e-01, 7.7261e-01, 4.3386e-01, - 3.9329e-01, 5.4304e-01, 3.6663e-01, 8.5654e-02, - 9.5144e-01, 1.5547e-01, 7.8376e-01, 1.1481e-01, - 5.2874e-01, 4.6678e-02, 1.6680e-01, 9.6803e-01, - 8.3213e-01, 5.9813e-01, 7.0737e-01, 8.3350e-01, - 3.7380e-01, 4.4355e-01, 8.7051e-01, 5.7225e-01, - 4.9317e-01, 3.6354e-01, 9.2835e-01, 5.9679e-01, - 1.3281e-01, 2.1930e-01, 7.9322e-01, 3.0523e-01, - 5.8165e-01, 7.5390e-01, 1.2114e-01, 5.3411e-02, - 4.1585e-01, 9.6187e-01, 4.2424e-01, 1.3533e-01, - 3.7712e-01, 1.8261e-01, 6.8088e-01, 1.8639e-01, - 5.1967e-01, 8.0666e-01, 5.0981e-01, 9.7923e-02, - 1.7373e-02, 5.5994e-01, 3.3362e-01, 2.4293e-01, - 1.6934e-01, 2.2530e-01, 9.7352e-01, 4.9510e-01, - 2.1393e-01, 5.8022e-01, 3.8276e-02, 8.7272e-02, - 6.8543e-01, 3.2756e-02, 5.6718e-01, 3.9117e-01, - 9.7834e-01, 6.5395e-01, 8.5529e-01, 4.8972e-01, - 1.9190e-01, 1.9049e-01, 4.8019e-01, 7.2147e-01, - 1.2100e-01, 3.2180e-01, 5.1040e-03, 7.9651e-01, - 2.7309e-01, 6.6645e-01, 2.9896e-01, 5.1748e-01, - 6.1241e-01, 5.6827e-01, 5.9954e-01, 6.7815e-01, - 6.2197e-01, 1.1702e-01, 4.1684e-01, 7.0624e-01, - 7.6888e-01, 6.1216e-01, 1.1353e-01, 9.6720e-01, - 1.2108e-01, 5.9995e-01, 1.8773e-01, 2.3973e-01, - 5.9648e-01, 1.6246e-01, 6.1599e-02, 3.9480e-01, - 5.0531e-01, 9.9736e-01, 2.5833e-01, 8.2165e-01, - 5.5800e-01, 6.4798e-01, 9.0038e-01, 9.9833e-01, - 9.2312e-01, 2.6677e-01, 6.5352e-01, 4.6482e-01, - 1.2201e-01, 6.7988e-01, 2.7359e-01, 5.8169e-01, - 8.4593e-01, 2.9999e-01, 1.1410e-01, 3.6589e-01, - 6.5950e-01, 8.4661e-01, 3.7005e-01, 6.3599e-01, - 8.8603e-01, 4.0464e-01, 1.8286e-01, 5.2775e-02, - 8.4989e-01, 3.3061e-01, 8.2236e-01, 4.3585e-01, - 7.4979e-01, 8.1839e-01, 2.9331e-01, 7.1902e-02, - 8.5449e-01, 2.4751e-01, 7.1396e-02, 1.2001e-01, - 9.4854e-01, 6.6576e-01, 9.7865e-01, 1.0903e-01, - 5.1127e-01, 1.3729e-01, 4.3680e-01, 9.3247e-01, - 5.1767e-01, 3.0724e-02, 8.0497e-02, 6.4769e-02, - 7.6391e-01, 2.0889e-02, 4.1965e-01, 9.0319e-01, - 4.3557e-01, 9.5238e-01, 4.0303e-02, 3.6623e-01, - 3.2743e-01, 1.2845e-01, 6.4784e-01, 8.7231e-01, - 6.8798e-01, 3.1354e-01, 5.3644e-01, 9.6932e-01, - 1.7454e-01, 2.1021e-01, 1.4568e-01, 8.3907e-01, - 6.2260e-01, 9.3888e-01, 3.9588e-01, 8.7395e-01, - 5.9658e-01, 9.8182e-02, 5.6640e-01, 6.3829e-02, - 6.9286e-01, 7.5561e-01, 1.5461e-01, 3.7725e-02, - 1.9831e-01, 2.6020e-01, 8.3708e-01, 1.0480e-01, - 9.0558e-01, 6.1646e-01, 7.0305e-01, 1.9392e-01, - 4.0235e-01, 2.5149e-02, 9.1578e-02, 5.7882e-01, - 2.5579e-01, 4.0794e-02, 7.2276e-01, 1.6891e-01, - 2.5039e-01, 9.9730e-01, 7.8170e-01, 6.1608e-01, - 7.7103e-01, 2.5630e-01, 1.4927e-01, 2.7998e-01, - 3.0817e-01, 7.3242e-02, 1.2753e-01, 4.1496e-01, - 5.8219e-01, 5.8755e-01, 9.8146e-01, 6.2375e-01, - 8.5660e-01, 2.1402e-01, 5.1545e-01, 1.2679e-01, - 3.2732e-01, 1.7006e-01, 4.3828e-01, 1.6170e-01, - 1.0399e-01, 5.3933e-01, 7.8541e-01, 1.1931e-01, - 1.5504e-01, 7.8948e-01, 1.5878e-01, 7.4734e-01, - 2.3345e-01, 2.3097e-01, 1.4671e-01, 3.7307e-01, - 8.0431e-01, 4.5794e-01, 9.2994e-01, 8.9926e-01, - 8.4547e-01, 5.0121e-02, 7.2131e-01, 6.2424e-01, - 6.3904e-01, 4.3546e-01, 8.0833e-01, 4.9642e-01, - 5.7037e-01, 3.9698e-01, 4.3301e-02, 4.0795e-01, - 6.9428e-01, 9.9629e-01, 9.0683e-01, 5.4597e-01, - 5.7624e-01, 9.9789e-01, 2.4283e-01, 5.9102e-01, - 3.8101e-01, 7.2091e-03, 6.9826e-01, 6.1377e-01, - 6.0709e-01, 4.9390e-01, 6.1114e-01, 7.2574e-01, - 8.2221e-01, 5.4550e-01, 3.0011e-01, 4.4517e-01, - 4.5758e-02, 4.6015e-01, 3.4893e-02, 2.0905e-01, - 7.1053e-01, 8.6429e-01, 1.4344e-01, 5.8718e-01, - 1.3326e-01, 6.4655e-01, 1.7744e-01, 7.3568e-01, - 4.4107e-01, 3.5360e-01, 7.8895e-01, 1.3426e-02, - 2.5589e-01, 8.9759e-01, 1.8598e-01, 1.1031e-01, - 6.4082e-02, 1.4471e-01, 6.4713e-01, 5.3510e-01, - 3.5916e-01, 2.3596e-01, 3.7914e-01, 4.5283e-01, - 4.0277e-01, 8.3793e-01, 3.6913e-01, 8.0491e-01, - 8.0560e-01, 8.3787e-01, 3.2988e-01, 2.7062e-01, - 5.2644e-01, 8.0191e-01, 3.0417e-01, 8.0992e-01, - 3.7054e-01, 6.5171e-01, 8.9851e-01, 7.9938e-01, - 7.9593e-01, 4.1200e-01, 7.2346e-01, 8.8886e-01, - 7.7392e-01, 3.0164e-01, 4.8903e-01, 7.0959e-01, - 2.3055e-04, 5.5567e-01, 3.2476e-02, 4.6940e-01, - 9.5992e-01, 3.3002e-01, 1.9968e-01, 3.6916e-01, - 9.3384e-01, 4.4547e-01, 8.4545e-01, 7.0652e-01, - 2.5484e-01, 6.6093e-01, 1.0142e-01, 4.0036e-01, - 8.8027e-02, 7.0693e-01, 6.1292e-01, 3.5279e-02, - 1.6464e-01, 7.4166e-01, 7.4512e-01, 3.1187e-01, - 9.3827e-01, 8.1291e-01, 6.8774e-01, 1.6535e-01, - 8.2353e-01, 4.4462e-01, 6.2390e-01, 2.5665e-01, - 2.4823e-01, 6.5084e-01, 2.8892e-01, 8.3557e-01, - 2.6616e-01, 8.8256e-01, 3.9677e-01, 7.0336e-01, - 8.3626e-01, 8.4230e-01, 1.2696e-01, 5.4260e-01, - 4.5014e-01, 1.2684e-01, 4.1101e-01, 4.8496e-01, - 7.6136e-01, 5.3988e-01, 4.7573e-01, 1.6785e-01, - 8.8662e-01, 6.3235e-01, 2.9589e-01, 2.8836e-01, - 9.5162e-01, 5.4258e-01, 2.7719e-01, 7.2819e-01, - 4.5646e-02, 7.4687e-01, 6.9970e-01, 6.6663e-01, - 4.4353e-01, 9.2703e-01, 1.7143e-01, 9.9752e-01, - 1.6120e-01, 9.1760e-02, 7.4539e-02, 6.2124e-01, - 3.9172e-01, 8.8093e-01, 8.8278e-01, 9.1996e-01, - 9.9713e-01, 4.7916e-01, 8.6919e-01, 9.8249e-01, - 8.6668e-01, 3.9161e-01, 6.4609e-01, 5.0516e-01, - 6.3812e-02, 4.1956e-01, 4.4304e-01, 5.3391e-01, - 9.4156e-01, 4.0458e-01, 2.9747e-01, 1.0297e-01, - 2.1881e-01, 5.0346e-01, 5.5620e-01, 1.2242e-01, - 9.4646e-01, 4.2415e-01, 1.2648e-01, 8.3879e-01, - 2.1895e-01, 7.2594e-01, 9.5334e-01, 4.1030e-01, - 6.5805e-01, 6.1371e-01, 6.3829e-01, 6.4855e-01, - 1.9689e-01, 7.6433e-01, 1.4868e-01, 1.2936e-01, - 1.2645e-01, 8.9930e-01, 1.6824e-01, 9.4670e-01, - 4.8028e-01, 6.9663e-01, 4.2572e-01, 8.7528e-01, - 8.1359e-02, 9.6745e-01, 2.7830e-01, 7.5427e-01, - 5.4086e-02, 9.3150e-01, 6.8940e-01, 8.9446e-01, - 4.2269e-01, 1.5823e-02, 6.6296e-01, 8.5098e-01, - 8.8125e-01, 8.8539e-01, 6.4149e-01, 9.0541e-01, - 8.6570e-01, 1.9544e-01, 4.7643e-01, 8.5814e-01]), + col_indices=tensor([3973, 7951, 6448, 7869, 8084, 9579, 2166, 6078, 8362, + 8338, 8347, 5231, 6954, 9251, 1588, 5032, 2102, 2793, + 9690, 6831, 9069, 2808, 4275, 4074, 907, 3545, 5544, + 6941, 2356, 797, 2644, 6095, 4109, 3561, 6200, 28, + 557, 6355, 1990, 951, 69, 8267, 3139, 215, 6612, + 2860, 9213, 3348, 7098, 6592, 1146, 7228, 789, 9196, + 4382, 7744, 7817, 1180, 1510, 4317, 7077, 4265, 6219, + 9856, 311, 1497, 5748, 2535, 7861, 2853, 1662, 4174, + 4694, 7392, 5450, 3394, 4805, 2432, 1322, 8861, 6678, + 3023, 1316, 4128, 2030, 3793, 8525, 8443, 1161, 991, + 1447, 2471, 6828, 2582, 6332, 4483, 41, 4006, 219, + 5990, 1636, 3986, 5354, 8312, 8664, 9463, 4528, 141, + 3941, 4470, 6778, 5188, 9246, 7613, 8447, 2428, 1539, + 9970, 4662, 9881, 2741, 7672, 7933, 80, 6971, 8473, + 4272, 6382, 3599, 7720, 123, 2250, 1458, 8861, 4899, + 9782, 2839, 4678, 9108, 4080, 1338, 5595, 1158, 3764, + 4696, 3832, 8401, 5959, 1183, 231, 7893, 8235, 8841, + 2906, 1812, 2502, 6071, 3913, 3016, 804, 4126, 2806, + 4482, 5376, 1238, 5593, 4938, 3143, 1445, 6781, 2527, + 6429, 9354, 624, 1378, 7349, 9901, 6199, 3158, 4135, + 5456, 7226, 9816, 792, 5368, 6449, 9103, 9270, 6930, + 493, 654, 3407, 9216, 5688, 1755, 6223, 5657, 2654, + 9831, 3125, 361, 7126, 2218, 2602, 5098, 4761, 1205, + 8621, 115, 9888, 3031, 2585, 352, 4367, 811, 5475, + 2429, 7689, 3224, 5662, 6948, 6079, 5550, 9463, 1122, + 5894, 8681, 6785, 9288, 7944, 7865, 6760, 9011, 5553, + 4825, 6415, 2355, 898, 3403, 9170, 561, 9126, 4234, + 4025, 5940, 1906, 5324, 7757, 4713, 9775, 1276, 3050, + 1845, 2151, 1470, 5517, 6107, 2126, 169, 7316, 5147, + 7143, 5798, 7485, 2100, 1175, 3639, 2292, 7216, 5739, + 4604, 5676, 7900, 1743, 5543, 496, 5438, 3033, 6300, + 9625, 7884, 868, 6764, 7790, 4603, 4275, 2542, 7679, + 2517, 753, 1188, 9, 9628, 825, 1504, 4738, 4503, + 3667, 2724, 2437, 9981, 5492, 4742, 4297, 8972, 2110, + 5788, 2844, 1894, 1956, 849, 4784, 2604, 1766, 9559, + 8830, 513, 8376, 2625, 9089, 5995, 224, 4591, 8437, + 3546, 8082, 5587, 7269, 7261, 1050, 8775, 8368, 9710, + 3130, 7138, 1877, 1810, 714, 7162, 6619, 5709, 5555, + 9176, 4508, 3812, 9450, 8136, 3068, 6301, 8245, 2547, + 3686, 2549, 4881, 760, 7053, 4068, 2932, 8510, 5794, + 3610, 5663, 6536, 6323, 7083, 9722, 3009, 5541, 1592, + 8519, 4330, 9599, 8483, 5422, 8749, 7140, 7425, 3258, + 4071, 3508, 440, 3491, 8958, 227, 1822, 1899, 4417, + 4130, 5376, 7607, 2711, 5070, 8453, 5974, 5725, 3102, + 4319, 1136, 5353, 2913, 2100, 2380, 7803, 8429, 7033, + 9865, 4179, 2758, 7091, 9712, 3940, 1660, 2738, 1161, + 1273, 1987, 8698, 9908, 1522, 5855, 6557, 5362, 6199, + 5495, 3618, 2810, 4992, 2049, 5628, 2971, 7729, 8844, + 8814, 237, 6048, 2296, 9533, 7612, 7926, 9780, 925, + 8697, 4822, 2510, 5570, 1858, 3634, 978, 9466, 3386, + 5188, 7331, 2020, 4713, 1236, 507, 3473, 8015, 4093, + 2302, 6423, 8539, 9221, 3, 6883, 8377, 9158, 2785, + 7310, 6982, 1898, 4277, 8390, 9571, 9839, 1183, 4005, + 7487, 5970, 6616, 6116, 6012, 1446, 5537, 5387, 1151, + 2755, 3911, 5651, 5271, 3235, 386, 4450, 6442, 6100, + 3848, 6984, 2294, 4300, 3422, 41, 1221, 8412, 5345, + 4518, 2990, 4466, 2619, 6417, 7807, 6949, 2231, 5092, + 8664, 1107, 988, 4816, 494, 6420, 3491, 453, 3600, + 7054, 2027, 3564, 1710, 9554, 5472, 8900, 9386, 9031, + 1816, 8149, 7567, 8266, 3663, 9033, 5921, 364, 8749, + 6698, 9060, 756, 9355, 9834, 1694, 9007, 8663, 6164, + 4533, 453, 3525, 2999, 1620, 8551, 5584, 7302, 3431, + 3511, 1151, 2693, 6701, 2811, 1846, 3914, 8113, 4732, + 1710, 1768, 4864, 3721, 6244, 7942, 2108, 6527, 3669, + 9755, 6535, 6335, 1200, 5012, 8614, 6815, 9348, 9957, + 5437, 7361, 3722, 976, 9311, 7143, 6455, 9740, 8884, + 3173, 3993, 7603, 353, 4034, 6110, 6153, 8098, 8815, + 2092, 5561, 6424, 9337, 488, 1819, 5055, 9226, 3017, + 4576, 898, 1858, 9119, 597, 6301, 2611, 6385, 9665, + 5459, 358, 3146, 9108, 9707, 1541, 9224, 4402, 9579, + 3681, 9452, 1698, 7040, 3635, 9172, 7054, 3774, 3656, + 6702, 2178, 7785, 5385, 919, 2093, 7462, 2285, 9457, + 7427, 4598, 3631, 4308, 7505, 7302, 2092, 7835, 4224, + 3882, 7590, 9918, 7814, 756, 9050, 2847, 7772, 7696, + 3977, 171, 1283, 9421, 5242, 3078, 4953, 7874, 4620, + 4267, 7589, 331, 7280, 5130, 4485, 3121, 6860, 8138, + 9853, 8960, 2877, 9855, 2762, 9180, 8810, 4152, 3051, + 3629, 8822, 2644, 3273, 9726, 2350, 9066, 6303, 8018, + 5285, 1079, 2137, 4590, 4781, 8984, 5550, 1556, 9883, + 9833, 9681, 4081, 9689, 673, 4409, 5987, 6177, 8822, + 6139, 8036, 5927, 4714, 6545, 838, 9662, 2644, 5070, + 3487, 3165, 5195, 3829, 6379, 2232, 5754, 5129, 5848, + 3894, 8283, 5980, 9902, 3662, 8890, 8853, 7110, 9795, + 7642, 7103, 469, 4810, 6688, 2429, 4076, 5306, 9979, + 1005, 5135, 7240, 217, 8643, 9053, 6814, 9298, 2894, + 2956, 6801, 5613, 8067, 4531, 7499, 8653, 2058, 7557, + 4361, 1877, 3469, 8406, 121, 4566, 7838, 1546, 7137, + 4932, 2801, 8544, 3003, 8745, 6652, 473, 4604, 8791, + 639, 3375, 5223, 2802, 4534, 7871, 7405, 7614, 7083, + 843, 5874, 5304, 4212, 528, 4583, 223, 4030, 2578, + 141, 353, 5303, 9898, 6794, 7807, 3099, 9090, 5793, + 758, 3061, 5500, 4165, 6157, 5208, 7754, 1844, 5707, + 3293, 5083, 2302, 690, 1183, 1997, 5490, 4852, 2205, + 7232, 7597, 6171, 1383, 4301, 5376, 664, 168, 2009, + 2094, 399, 3659, 3084, 5546, 7468, 2182, 3513, 2614, + 8010, 278, 6465, 4957, 8539, 7108, 9242, 2256, 2890, + 8404, 3586, 6800, 8966, 5769, 8430, 6846, 6811, 7644, + 7111, 317, 8434, 8264, 6583, 9847, 3990, 5681, 2242, + 1436, 1513, 9864, 3452, 6689, 6046, 7561, 4932, 3944, + 3218, 1928, 7678, 8172, 4776, 5768, 1423, 8354, 4677, + 3404, 6672, 7842, 4930, 9021, 1299, 7118, 1359, 3096, + 9765, 756, 9451, 1433, 7324, 129, 7736, 2685, 402, + 4229, 7359, 2482, 8948, 9187, 8125, 7326, 2103, 1079, + 9957, 336, 3873, 8288, 3188, 9701, 7293, 9132, 9723, + 9022, 7867, 4259, 3133, 9090, 6528, 9027, 8659, 1507, + 7617, 4841, 45, 3093, 3583, 4343, 5982, 8161, 6155, + 7739, 6985, 8567, 9853, 723, 6930, 7751, 6992, 7484, + 1228, 8212, 8093, 5964, 2926, 1855, 8661, 8972, 1067, + 8476, 2481, 2860, 3244, 2959, 6123, 2484, 6976, 5707, + 149, 8194, 1354, 3355, 7336, 2434, 3067, 2904, 7963, + 9385, 9454, 6997, 2534, 2719, 9409, 566, 9588, 6159, + 6836]), + values=tensor([0.1595, 0.3574, 0.1569, 0.1927, 0.3455, 0.5475, 0.2360, + 0.1267, 0.8383, 0.2014, 0.9754, 0.6677, 0.1560, 0.5822, + 0.1680, 0.8880, 0.2016, 0.4887, 0.3651, 0.0525, 0.8366, + 0.9019, 0.0584, 0.5934, 0.4533, 0.4719, 0.3162, 0.0856, + 0.7503, 0.4555, 0.9429, 0.4009, 0.2620, 0.7629, 0.8959, + 0.8100, 0.4815, 0.9912, 0.3905, 0.7870, 0.9051, 0.8110, + 0.3963, 0.7546, 0.5404, 0.9403, 0.6281, 0.8601, 0.2128, + 0.9335, 0.8597, 0.6945, 0.6850, 0.0122, 0.8533, 0.8021, + 0.9410, 0.6950, 0.0132, 0.6242, 0.6252, 0.4892, 0.9799, + 0.2600, 0.5261, 0.3792, 0.5610, 0.3160, 0.6218, 0.2206, + 0.4237, 0.9982, 0.6522, 0.9333, 0.4171, 0.0460, 0.8531, + 0.2544, 0.1701, 0.8725, 0.4151, 0.7436, 0.8380, 0.0031, + 0.3674, 0.8777, 0.9995, 0.2664, 0.8279, 0.9505, 0.7657, + 0.0903, 0.6663, 0.4719, 0.7276, 0.6169, 0.8148, 0.4273, + 0.3201, 0.1031, 0.2252, 0.8940, 0.3381, 0.8762, 0.7333, + 0.0849, 0.8563, 0.6688, 0.1880, 0.9474, 0.8424, 0.7469, + 0.9112, 0.2026, 0.6237, 0.9001, 0.2204, 0.9422, 0.4954, + 0.2980, 0.9255, 0.7271, 0.1643, 0.0068, 0.3178, 0.5072, + 0.3845, 0.1691, 0.3369, 0.7974, 0.8022, 0.9517, 0.0776, + 0.3732, 0.7589, 0.9862, 0.5735, 0.0931, 0.9904, 0.2630, + 0.6907, 0.2327, 0.9485, 0.3933, 0.0070, 0.5468, 0.1908, + 0.4896, 0.4702, 0.2500, 0.3063, 0.1212, 0.3153, 0.1618, + 0.7622, 0.8638, 0.1344, 0.6568, 0.5980, 0.9557, 0.8515, + 0.2277, 0.3628, 0.2727, 0.0373, 0.9037, 0.4799, 0.3062, + 0.6071, 0.8382, 0.4268, 0.8911, 0.8229, 0.2938, 0.0976, + 0.8421, 0.7644, 0.9978, 0.3199, 0.6222, 0.4954, 0.9778, + 0.0275, 0.7170, 0.6011, 0.1531, 0.5312, 0.9783, 0.8205, + 0.8456, 0.4085, 0.4253, 0.7621, 0.1505, 0.1381, 0.3816, + 0.5020, 0.0751, 0.7124, 0.3677, 0.0986, 0.0496, 0.6638, + 0.3861, 0.0351, 0.3193, 0.4186, 0.4530, 0.9467, 0.4987, + 0.3540, 0.1193, 0.8914, 0.4872, 0.4479, 0.6686, 0.2751, + 0.6904, 0.3039, 0.5511, 0.0068, 0.1531, 0.4781, 0.0240, + 0.0954, 0.4173, 0.8675, 0.3561, 0.5523, 0.8362, 0.8739, + 0.2651, 0.1857, 0.6250, 0.3480, 0.8560, 0.1622, 0.0542, + 0.0157, 0.5141, 0.3731, 0.1157, 0.9155, 0.3058, 0.7435, + 0.7859, 0.4688, 0.0056, 0.1613, 0.7726, 0.0041, 0.3619, + 0.2268, 0.9956, 0.9510, 0.3827, 0.0067, 0.1149, 0.7615, + 0.9664, 0.8445, 0.0184, 0.4913, 0.9609, 0.7391, 0.2482, + 0.6660, 0.0209, 0.8868, 0.7347, 0.8362, 0.2449, 0.8127, + 0.7980, 0.3277, 0.9091, 0.5022, 0.3583, 0.2423, 0.2692, + 0.1713, 0.8883, 0.9609, 0.9490, 0.5907, 0.4577, 0.2866, + 0.7624, 0.0106, 0.3356, 0.3755, 0.3176, 0.2329, 0.6294, + 0.2743, 0.3101, 0.4588, 0.4036, 0.2885, 0.0873, 0.1858, + 0.6210, 0.6215, 0.7748, 0.7653, 0.5778, 0.6956, 0.8102, + 0.5336, 0.2692, 0.9831, 0.1045, 0.0380, 0.6104, 0.9903, + 0.9600, 0.4662, 0.6465, 0.0804, 0.3548, 0.4656, 0.2254, + 0.5211, 0.0469, 0.0289, 0.7491, 0.2704, 0.0359, 0.3814, + 0.7932, 0.0705, 0.6724, 0.6843, 0.5705, 0.8756, 0.5358, + 0.2306, 0.2562, 0.1388, 0.0248, 0.6323, 0.9284, 0.8278, + 0.7314, 0.2155, 0.2294, 0.0161, 0.6530, 0.5020, 0.7866, + 0.8715, 0.3957, 0.6929, 0.8887, 0.2496, 0.0985, 0.9688, + 0.1963, 0.1758, 0.9882, 0.3141, 0.5378, 0.2842, 0.7512, + 0.7785, 0.2724, 0.7845, 0.7457, 0.7853, 0.4491, 0.0892, + 0.6755, 0.0822, 0.3497, 0.2198, 0.2096, 0.7362, 0.7000, + 0.7558, 0.6886, 0.3624, 0.4586, 0.4677, 0.6188, 0.3817, + 0.9762, 0.6388, 0.3421, 0.3758, 0.4427, 0.6492, 0.6447, + 0.3194, 0.7097, 0.4354, 0.7354, 0.5318, 0.8658, 0.7094, + 0.6004, 0.9097, 0.9603, 0.8838, 0.0910, 0.6694, 0.1681, + 0.2590, 0.0355, 0.8694, 0.7102, 0.2759, 0.5787, 0.4767, + 0.3597, 0.7267, 0.6895, 0.4960, 0.8730, 0.2749, 0.1586, + 0.6161, 0.7990, 0.8168, 0.0944, 0.7334, 0.9960, 0.2785, + 0.0470, 0.3508, 0.4295, 0.0284, 0.0051, 0.3737, 0.5871, + 0.4284, 0.8218, 0.9847, 0.3679, 0.1207, 0.2346, 0.4243, + 0.9559, 0.2612, 0.4282, 0.3933, 0.2705, 0.3399, 0.6871, + 0.4253, 0.3048, 0.5530, 0.8665, 0.9604, 0.7933, 0.5495, + 0.3547, 0.6941, 0.3034, 0.1736, 0.2158, 0.5342, 0.2303, + 0.3372, 0.4158, 0.6518, 0.9761, 0.7109, 0.3505, 0.9214, + 0.1522, 0.0453, 0.0191, 0.9590, 0.8644, 0.7588, 0.8704, + 0.1540, 0.2129, 0.5901, 0.1942, 0.5206, 0.2060, 0.2277, + 0.7450, 0.0777, 0.9499, 0.5353, 0.5186, 0.1737, 0.7503, + 0.8631, 0.7618, 0.5550, 0.2174, 0.3974, 0.1521, 0.6051, + 0.4034, 0.4703, 0.9003, 0.2967, 0.9908, 0.5484, 0.1785, + 0.4706, 0.1583, 0.1875, 0.9349, 0.1669, 0.1316, 0.9691, + 0.6559, 0.8500, 0.6218, 0.3967, 0.4572, 0.9139, 0.6810, + 0.1586, 0.1486, 0.8315, 0.8096, 0.9857, 0.7295, 0.3313, + 0.2901, 0.7519, 0.2999, 0.4506, 0.2748, 0.0649, 0.1849, + 0.4934, 0.1966, 0.2320, 0.1827, 0.6308, 0.8147, 0.3176, + 0.1413, 0.7432, 0.0179, 0.7421, 0.3941, 0.5882, 0.7801, + 0.9334, 0.1856, 0.6599, 0.6152, 0.2121, 0.9447, 0.7494, + 0.0679, 0.1033, 0.5551, 0.1571, 0.0595, 0.8123, 0.0859, + 0.9549, 0.1265, 0.5115, 0.4947, 0.6271, 0.6976, 0.8284, + 0.6156, 0.3197, 0.4649, 0.6608, 0.0350, 0.6588, 0.4689, + 0.8863, 0.6367, 0.8396, 0.7628, 0.4943, 0.9468, 0.0315, + 0.9582, 0.2547, 0.1886, 0.6267, 0.7794, 0.4776, 0.4022, + 0.5361, 0.7915, 0.8303, 0.9516, 0.0065, 0.0486, 0.5721, + 0.6300, 0.7896, 0.2178, 0.6430, 0.6654, 0.7293, 0.9324, + 0.0904, 0.0029, 0.6017, 0.9638, 0.4045, 0.4907, 0.9349, + 0.3193, 0.8456, 0.7305, 0.1329, 0.8873, 0.6186, 0.7027, + 0.8375, 0.5304, 0.7685, 0.9669, 0.0769, 0.5929, 0.9438, + 0.8255, 0.1349, 0.9462, 0.6909, 0.5550, 0.8560, 0.1634, + 0.1474, 0.2667, 0.0145, 0.8628, 0.5634, 0.9169, 0.9663, + 0.2805, 0.7481, 0.5412, 0.6579, 0.1723, 0.4958, 0.1952, + 0.2705, 0.8297, 0.8781, 0.9300, 0.4932, 0.4241, 0.8045, + 0.7911, 0.2353, 0.7012, 0.0295, 0.4429, 0.5761, 0.6847, + 0.7203, 0.0226, 0.4063, 0.4252, 0.4095, 0.6422, 0.3906, + 0.9874, 0.3819, 0.4620, 0.5675, 0.5519, 0.8265, 0.6693, + 0.1202, 0.8118, 0.1855, 0.3789, 0.0306, 0.3898, 0.5980, + 0.1281, 0.5170, 0.3924, 0.7384, 0.2157, 0.0988, 0.9655, + 0.9173, 0.2438, 0.6109, 0.4547, 0.7814, 0.3018, 0.9140, + 0.4492, 0.0868, 0.3575, 0.8858, 0.7853, 0.5862, 0.9430, + 0.2082, 0.5310, 0.7981, 0.9057, 0.9136, 0.6813, 0.3059, + 0.8130, 0.5169, 0.0825, 0.2122, 0.0361, 0.0680, 0.7877, + 0.1443, 0.9315, 0.9537, 0.6573, 0.6673, 0.6096, 0.7720, + 0.4006, 0.2793, 0.9923, 0.9568, 0.7372, 0.4581, 0.2225, + 0.4795, 0.3467, 0.5454, 0.8269, 0.7995, 0.9928, 0.1039, + 0.1411, 0.1543, 0.1304, 0.0386, 0.3874, 0.3397, 0.3370, + 0.6393, 0.6802, 0.4644, 0.5235, 0.9807, 0.7889, 0.1021, + 0.9696, 0.0877, 0.1046, 0.8426, 0.2928, 0.7682, 0.8307, + 0.0498, 0.3274, 0.2247, 0.5502, 0.2341, 0.4460, 0.7793, + 0.0766, 0.4276, 0.5283, 0.7434, 0.7790, 0.6635, 0.9685, + 0.5303, 0.3645, 0.0898, 0.4529, 0.3974, 0.9204, 0.8693, + 0.1193, 0.3095, 0.1361, 0.1585, 0.0797, 0.0386, 0.4867, + 0.8062, 0.7372, 0.7472, 0.2565, 0.7787, 0.5623, 0.0895, + 0.4141, 0.0579, 0.2073, 0.2739, 0.2587, 0.0279, 0.8989, + 0.3379, 0.3508, 0.9852, 0.3682, 0.7416, 0.4370, 0.7560, + 0.6964, 0.0462, 0.8293, 0.5669, 0.8160, 0.4919, 0.0343, + 0.3307, 0.6785, 0.5043, 0.5529, 0.4214, 0.9726, 0.0919, + 0.4339, 0.1688, 0.9404, 0.3583, 0.5260, 0.7457, 0.6037, + 0.4126, 0.8261, 0.6361, 0.7849, 0.4720, 0.4658, 0.4472, + 0.6965, 0.6849, 0.1463, 0.7329, 0.7205, 0.6296, 0.3943, + 0.6225, 0.5935, 0.3962, 0.5131, 0.0094, 0.1853, 0.3726, + 0.2671, 0.8018, 0.8606, 0.1727, 0.4492, 0.1565, 0.2321, + 0.5274, 0.3800, 0.7385, 0.1726, 0.8828, 0.5176, 0.5197, + 0.2711, 0.2519, 0.5041, 0.2737, 0.3773, 0.8524, 0.9013, + 0.3757, 0.2440, 0.0248, 0.3047, 0.1741, 0.1625, 0.4159, + 0.6619, 0.4663, 0.5995, 0.1867, 0.1602, 0.9925, 0.4213, + 0.1398, 0.2159, 0.0078, 0.9534, 0.2005, 0.1991, 0.9875, + 0.8102, 0.7752, 0.4701, 0.4231, 0.4248, 0.2862, 0.3896, + 0.6863, 0.8315, 0.2118, 0.5451, 0.2655, 0.0905, 0.5261, + 0.8693, 0.4019, 0.1497, 0.3541, 0.0860, 0.0962, 0.2879, + 0.5698, 0.1106, 0.6799, 0.8942, 0.7616, 0.6926, 0.1966, + 0.4551, 0.7681, 0.0079, 0.9274, 0.5069, 0.8968, 0.3655, + 0.2959, 0.0872, 0.0693, 0.7746, 0.5158, 0.7922, 0.0759, + 0.3888, 0.0874, 0.3282, 0.5900, 0.7945, 0.8309, 0.9513, + 0.3391, 0.1582, 0.6222, 0.7213, 0.2696, 0.2995, 0.0623, + 0.7078, 0.2954, 0.4316, 0.2744, 0.6635, 0.6872, 0.8983, + 0.7149, 0.3497, 0.3162, 0.0296, 0.5639, 0.7317, 0.1808, + 0.5588, 0.4835, 0.1889, 0.2880, 0.0596, 0.9660, 0.6453, + 0.2374, 0.0690, 0.2719, 0.9133, 0.2929, 0.5555, 0.7051, + 0.3500, 0.3031, 0.8234, 0.6216, 0.6849, 0.6063, 0.7426, + 0.6347, 0.2320, 0.0786, 0.4232, 0.7048, 0.8846, 0.7739, + 0.9266, 0.8791, 0.1752, 0.0562, 0.4849, 0.4175, 0.0203, + 0.7363, 0.1222, 0.4577, 0.5149, 0.7902, 0.1347]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.3920, 0.2913, 0.8672, ..., 0.9245, 0.8812, 0.1957]) +tensor([0.8197, 0.8953, 0.9337, ..., 0.6014, 0.8565, 0.4467]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -1133,375 +540,378 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 10.585279941558838 seconds +Time: 7.084842920303345 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '361507', '-ss', '10000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.549095392227173} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([7943, 4488, 6154, 9391, 155, 1146, 4006, 8134, 8781, - 7101, 3276, 7191, 9320, 2859, 4578, 2750, 6596, 4201, - 2541, 6640, 9961, 1286, 5879, 9740, 3536, 2151, 9582, - 2021, 7827, 8693, 2313, 555, 3377, 7363, 334, 2888, - 9782, 3162, 5677, 4519, 3889, 4828, 247, 2616, 279, - 8565, 2538, 9525, 8485, 2616, 1166, 2089, 7055, 6468, - 9499, 1310, 5525, 2540, 8419, 935, 4661, 2785, 1947, - 1602, 2918, 4726, 3718, 3716, 5417, 2404, 2572, 1793, - 4269, 7015, 419, 4336, 5223, 1709, 8875, 645, 5198, - 3752, 5677, 5777, 9470, 6191, 7729, 3008, 6984, 7165, - 5063, 8482, 7789, 9298, 6624, 3445, 4654, 5489, 7051, - 2026, 5766, 3319, 8576, 4863, 735, 6400, 8243, 4596, - 9136, 5453, 8094, 6731, 4592, 6080, 2446, 2152, 9189, - 7168, 5575, 8736, 8708, 188, 2747, 5830, 9269, 8804, - 3159, 3201, 4041, 923, 2727, 2290, 84, 4623, 7603, - 3330, 9268, 1450, 9643, 7491, 1862, 6217, 7622, 6831, - 6030, 2019, 4765, 7685, 3438, 4110, 698, 7237, 1712, - 233, 3041, 9992, 7124, 3242, 6158, 3269, 3219, 7971, - 3346, 2455, 5227, 7583, 1444, 6727, 5034, 2207, 2810, - 6860, 3956, 406, 9630, 4569, 2333, 3303, 790, 9166, - 3001, 9674, 4130, 4005, 9208, 9872, 4115, 1140, 4295, - 1232, 2077, 9451, 8659, 2998, 5331, 5112, 5332, 473, - 2236, 9565, 7574, 3329, 2231, 9436, 1794, 1973, 926, - 9017, 624, 3226, 1901, 9136, 3272, 7061, 4208, 8821, - 7803, 6260, 839, 8051, 1195, 3556, 5978, 2134, 4703, - 5878, 6569, 631, 929, 936, 8619, 2484, 8239, 5928, - 7092, 2988, 7292, 9843, 5609, 7908, 9735, 1868, 8029, - 6880, 8241, 655, 7771, 7115, 4230, 2177, 5561, 519, - 9981, 4448, 1648, 5631, 1203, 3106, 9586, 8781, 334, - 6071, 676, 3, 2908, 206, 7480, 7816, 8182, 3140, - 5100, 6638, 2660, 7295, 3954, 4994, 5905, 5343, 875, - 1395, 1415, 8075, 1833, 8285, 3241, 5726, 82, 7838, - 5658, 9442, 7175, 6495, 1263, 5251, 545, 2259, 5163, - 5212, 9905, 1946, 7828, 3306, 3170, 1189, 5060, 6858, - 3265, 9161, 6049, 6284, 2600, 5696, 1666, 8160, 6654, - 4, 195, 1359, 4960, 8521, 7181, 8865, 7801, 6138, - 5232, 8342, 5270, 6210, 3360, 8441, 1762, 5683, 8982, - 5151, 6181, 2277, 3989, 2354, 7040, 690, 3818, 4654, - 8998, 6989, 7424, 1876, 2235, 6291, 65, 3846, 5433, - 653, 7328, 3161, 7270, 6440, 3854, 9530, 6576, 4282, - 3869, 6253, 5868, 5179, 2108, 4988, 6966, 2124, 1163, - 395, 7769, 5649, 9625, 8844, 7379, 7826, 6877, 1758, - 1205, 3960, 4313, 5313, 7450, 1611, 5494, 6504, 1330, - 5009, 8603, 3204, 8610, 7225, 9811, 5033, 8244, 8927, - 189, 6660, 2530, 5087, 7128, 633, 3653, 9666, 5288, - 7838, 705, 8757, 5051, 3379, 8676, 8102, 6270, 3427, - 6941, 8350, 2853, 3036, 8139, 4125, 9896, 6792, 341, - 7714, 2950, 6057, 9295, 6404, 6582, 9441, 7456, 9260, - 3870, 7429, 9215, 1715, 4911, 3116, 9023, 6965, 8146, - 8742, 1823, 1052, 3129, 7548, 9310, 5087, 6452, 8523, - 4919, 3317, 6285, 4958, 687, 1721, 2711, 1456, 1993, - 7026, 6960, 2734, 8274, 8550, 6512, 5209, 3870, 1135, - 3771, 8488, 5407, 5874, 5804, 1777, 3653, 2911, 182, - 3402, 9157, 8305, 1357, 7109, 8892, 7404, 6887, 1640, - 7537, 1505, 6301, 101, 7157, 9753, 9025, 8408, 1812, - 7187, 7261, 160, 6522, 7459, 3342, 9803, 9988, 543, - 3555, 999, 2319, 3754, 8264, 3371, 3736, 5469, 9452, - 7184, 2203, 820, 9411, 4278, 4780, 9218, 1947, 3157, - 2226, 4652, 8669, 5426, 7683, 880, 435, 2033, 9292, - 2455, 4770, 2217, 699, 7572, 3934, 5467, 6026, 7062, - 2922, 2864, 4813, 8049, 9857, 6142, 778, 3565, 3686, - 9116, 2737, 9009, 4447, 2474, 971, 265, 2434, 9165, - 1954, 5853, 4055, 4843, 127, 3125, 509, 2432, 6008, - 5150, 4816, 6842, 9257, 576, 5248, 3779, 6206, 9344, - 9008, 721, 1354, 447, 2489, 5014, 7789, 6532, 232, - 5436, 9948, 4343, 3831, 8334, 5048, 6427, 6482, 9779, - 6169, 9153, 4649, 9096, 6309, 9662, 269, 4722, 5201, - 401, 5744, 3228, 8446, 3133, 3731, 7468, 4532, 1166, - 5876, 2482, 2630, 5613, 1887, 4482, 2576, 2771, 7606, - 4597, 89, 5559, 9787, 1481, 5290, 3655, 322, 166, - 8009, 1493, 7027, 2850, 2006, 4514, 2168, 2174, 9048, - 1786, 4139, 406, 6480, 917, 9722, 7578, 9741, 4917, - 2992, 1489, 1747, 4978, 7936, 2582, 5507, 1532, 5524, - 7826, 3449, 10, 9159, 7860, 3540, 198, 862, 5753, - 3470, 8359, 7581, 3144, 3079, 6434, 7746, 2607, 9147, - 7397, 201, 8081, 4846, 4284, 699, 9972, 9537, 8613, - 9875, 2782, 4871, 244, 9191, 1131, 2811, 1247, 3144, - 8178, 3501, 9452, 4821, 5343, 3, 5004, 5736, 6936, - 7862, 3290, 3383, 5283, 3840, 9625, 5012, 4121, 8759, - 8619, 3041, 5033, 1753, 6166, 6909, 1428, 1761, 1897, - 5070, 6012, 1241, 5625, 8016, 7656, 2936, 7895, 6288, - 9171, 2385, 2390, 2228, 8525, 8334, 9316, 4166, 6851, - 6776, 8997, 5468, 5982, 840, 3981, 4943, 5782, 8333, - 8121, 5911, 8729, 9039, 537, 2752, 6485, 3047, 283, - 5307, 7386, 1100, 7487, 5842, 6557, 5254, 4333, 415, - 9591, 1641, 2548, 8771, 2243, 6011, 7901, 3548, 8089, - 8958, 3344, 8858, 702, 9886, 5874, 7708, 7200, 4618, - 4778, 5665, 4614, 8010, 7456, 3047, 9039, 2044, 9926, - 4002, 1862, 2650, 9698, 4716, 2451, 6247, 8897, 4989, - 8623, 9464, 5599, 7310, 740, 1061, 6357, 3766, 5965, - 4638, 6114, 5529, 2164, 1494, 5729, 4627, 3348, 8109, - 5015, 7896, 3749, 213, 7180, 2156, 7644, 3374, 8920, - 2280, 4385, 5660, 4940, 7376, 6062, 7689, 6701, 3982, - 6731, 4335, 5363, 6281, 7290, 491, 3312, 4385, 9124, - 2888, 314, 2496, 8388, 360, 7969, 9150, 4336, 3714, - 7446, 7899, 6546, 956, 3288, 4036, 5808, 9141, 1394, - 7843, 7906, 1832, 9783, 5676, 1090, 40, 4659, 3502, - 84, 6161, 2144, 5947, 2349, 4766, 6358, 2005, 8455, - 7173, 7646, 4290, 8339, 7908, 943, 4131, 6672, 9870, - 4086, 6590, 205, 6057, 8733, 8600, 2315, 9854, 949, - 6942, 3555, 4782, 9913, 2335, 3741, 105, 9676, 829, - 4513, 5377, 7160, 7815, 5725, 4449, 7984, 7022, 9075, - 2973, 7234, 3301, 4106, 1057, 2796, 5400, 9826, 9608, - 1399, 3273, 7886, 5410, 8781, 1618, 8921, 750, 6541, - 313, 693, 2706, 9062, 82, 1001, 7170, 1044, 8082, - 5581, 7140, 949, 2283, 1698, 7268, 3598, 2618, 7101, - 917, 6363, 1607, 9830, 8474, 3927, 7297, 5514, 3427, - 8343, 6039, 3891, 1347, 5609, 7, 7597, 7807, 2355, - 5270, 382, 5935, 7213, 1197, 9647, 3059, 1828, 1291, - 5826, 9873, 5640, 2784, 8784, 1051, 644, 8752, 810, - 4403, 4357, 4219, 1880, 6353, 2009, 6795, 232, 4527, - 4898]), - values=tensor([1.4008e-01, 7.3938e-01, 7.6639e-01, 5.0164e-01, - 1.7486e-01, 3.2598e-01, 5.6159e-01, 5.5178e-01, - 5.7399e-01, 3.9297e-01, 4.2201e-01, 2.1666e-01, - 8.8345e-04, 9.6154e-01, 8.3073e-01, 6.5757e-01, - 1.6074e-01, 7.8577e-01, 9.7772e-01, 1.0075e-01, - 2.5921e-01, 7.4860e-01, 1.8867e-01, 8.9320e-01, - 8.3174e-01, 6.0226e-01, 1.7683e-01, 3.6553e-01, - 5.7124e-01, 2.0441e-01, 6.4951e-01, 7.0821e-01, - 3.7566e-01, 8.9297e-01, 9.1514e-01, 6.5969e-01, - 2.0172e-01, 6.7599e-01, 2.1470e-01, 9.4784e-01, - 1.3850e-01, 4.2041e-01, 8.9476e-01, 5.7393e-01, - 8.5383e-01, 2.3294e-01, 9.3112e-01, 1.4895e-02, - 7.9940e-01, 1.4394e-01, 7.5610e-01, 7.5678e-01, - 7.1119e-01, 7.0345e-02, 9.2463e-01, 5.5449e-01, - 2.2250e-01, 7.0462e-02, 1.3814e-01, 7.0456e-01, - 6.8619e-01, 5.0257e-01, 1.3518e-01, 8.3724e-01, - 3.2626e-01, 4.5308e-01, 6.4764e-01, 5.6043e-01, - 1.1045e-01, 7.2989e-01, 3.4674e-01, 6.6224e-02, - 3.6046e-01, 1.9957e-01, 8.2177e-01, 5.4828e-02, - 5.7991e-02, 6.8134e-01, 1.6562e-01, 7.6522e-02, - 2.3683e-01, 7.7761e-01, 7.1370e-01, 4.4629e-01, - 8.3816e-01, 5.1936e-01, 5.6090e-01, 4.0387e-01, - 6.4428e-01, 7.7701e-01, 6.7169e-01, 2.6102e-01, - 7.5529e-03, 5.9419e-01, 7.6740e-01, 3.7976e-01, - 4.2995e-01, 9.5223e-01, 3.4165e-01, 4.6248e-01, - 2.2243e-01, 4.5279e-01, 4.3565e-01, 6.6677e-01, - 6.2478e-01, 4.4391e-01, 7.1044e-01, 5.8594e-01, - 1.8528e-01, 3.9540e-01, 7.3149e-01, 8.0810e-01, - 4.1755e-01, 3.1698e-01, 1.5593e-01, 4.5307e-02, - 1.7898e-01, 1.2264e-01, 5.7366e-01, 8.9976e-02, - 9.5322e-01, 6.5297e-01, 1.6100e-01, 3.2706e-01, - 1.5637e-01, 2.2980e-01, 7.5639e-01, 3.9695e-01, - 4.5194e-01, 8.5862e-01, 4.7966e-01, 1.6712e-01, - 2.8354e-01, 8.5715e-01, 8.2491e-01, 9.3895e-01, - 6.2207e-01, 7.4772e-01, 6.8222e-01, 7.8302e-01, - 3.6293e-01, 9.3747e-01, 4.3535e-01, 3.2649e-01, - 1.2576e-01, 6.7395e-01, 3.8755e-01, 7.3431e-03, - 5.8672e-01, 1.7514e-01, 6.2576e-02, 4.2600e-01, - 8.4353e-01, 4.4830e-01, 4.2652e-01, 3.4003e-01, - 5.6530e-01, 5.0825e-01, 6.3458e-01, 6.3771e-01, - 5.3740e-01, 6.0430e-01, 5.2080e-01, 8.9504e-01, - 8.5600e-01, 9.5176e-01, 1.4018e-02, 8.8048e-01, - 8.5488e-01, 3.8407e-01, 7.3008e-01, 8.9626e-01, - 9.8292e-01, 3.1930e-01, 2.7730e-01, 6.6716e-02, - 1.1928e-01, 3.4153e-01, 4.5995e-02, 6.9950e-01, - 6.8140e-01, 7.1533e-01, 7.6891e-01, 1.3632e-01, - 8.5117e-01, 1.3578e-01, 2.1992e-01, 5.9821e-01, - 6.4608e-01, 4.2725e-01, 2.6707e-01, 5.4533e-01, - 5.1069e-01, 4.1512e-01, 7.8236e-01, 6.3547e-01, - 3.3141e-01, 8.3363e-01, 3.3275e-01, 9.7171e-01, - 3.5198e-01, 3.2371e-01, 5.9398e-01, 4.4829e-01, - 1.6692e-01, 1.2956e-01, 4.9761e-01, 1.4321e-01, - 5.6927e-01, 3.3084e-01, 8.5313e-01, 1.4241e-02, - 1.6810e-01, 6.1849e-01, 5.3128e-01, 3.1383e-01, - 8.0268e-01, 8.3374e-01, 5.7558e-01, 9.4233e-01, - 8.0228e-02, 3.9858e-01, 2.0918e-01, 9.6143e-01, - 1.9960e-01, 3.6105e-01, 7.1222e-01, 2.0743e-01, - 8.8267e-01, 8.0173e-01, 7.1956e-01, 7.6413e-01, - 1.1618e-01, 5.6266e-01, 7.1777e-01, 2.1787e-01, - 3.1249e-01, 1.1366e-01, 7.2133e-01, 5.0897e-01, - 7.7747e-01, 5.1124e-01, 2.3964e-02, 1.7601e-01, - 5.2421e-01, 4.7343e-01, 4.1193e-01, 5.0398e-02, - 9.4604e-01, 4.4435e-01, 8.4770e-01, 8.0073e-01, - 8.2843e-01, 8.4026e-01, 9.5200e-03, 5.5657e-01, - 1.1322e-01, 9.5564e-01, 2.7161e-01, 4.7624e-01, - 5.9371e-01, 5.1445e-01, 9.1385e-01, 7.6815e-01, - 7.7340e-01, 2.4805e-01, 3.4765e-01, 6.3520e-01, - 4.1396e-01, 6.5529e-01, 6.4127e-01, 9.8354e-01, - 9.7802e-01, 9.5844e-02, 2.3629e-01, 2.5244e-02, - 5.8430e-01, 7.2587e-01, 3.9399e-01, 6.5657e-01, - 2.2602e-01, 1.5947e-01, 1.2436e-01, 2.3184e-02, - 4.0334e-01, 3.3395e-01, 7.9580e-01, 9.0331e-01, - 1.4314e-01, 3.6350e-01, 9.8742e-01, 3.9153e-01, - 5.7572e-01, 8.9712e-01, 6.8108e-02, 3.4880e-01, - 6.9105e-02, 9.5150e-01, 1.5018e-01, 7.4890e-01, - 9.4472e-01, 1.9367e-02, 3.8256e-02, 7.8180e-02, - 3.9791e-01, 6.8360e-01, 2.8489e-01, 6.6300e-01, - 8.7842e-02, 9.8383e-01, 9.0831e-01, 9.7155e-02, - 1.2693e-01, 8.7860e-01, 2.6256e-02, 5.6248e-01, - 6.7185e-02, 8.0618e-01, 5.5146e-01, 2.3212e-01, - 6.5006e-01, 5.1569e-01, 7.5629e-01, 5.0736e-01, - 8.6644e-01, 3.2805e-01, 2.3392e-01, 6.9532e-01, - 6.4069e-02, 8.0766e-01, 4.8519e-02, 9.4545e-01, - 9.2322e-01, 9.4059e-01, 8.8406e-01, 1.4705e-01, - 2.0696e-01, 6.4209e-01, 1.7211e-01, 7.7366e-01, - 8.8312e-01, 4.1724e-01, 5.9560e-01, 1.0047e-02, - 8.0776e-01, 8.5045e-01, 2.7704e-02, 2.9548e-01, - 8.7383e-01, 1.2672e-01, 6.2008e-01, 7.8048e-01, - 5.4722e-01, 3.7268e-02, 7.6316e-01, 5.4004e-01, - 8.2708e-01, 7.7993e-01, 5.1032e-01, 5.7951e-01, - 6.7007e-01, 6.9343e-01, 9.1925e-01, 4.3266e-01, - 8.2137e-01, 9.5167e-01, 1.3972e-01, 5.4549e-01, - 9.6649e-01, 4.2568e-01, 3.9725e-01, 3.5461e-01, - 7.7180e-01, 7.2418e-01, 7.8794e-01, 8.7493e-01, - 6.5834e-01, 9.4314e-01, 7.9979e-01, 6.5380e-01, - 3.7019e-01, 8.1362e-01, 6.5310e-01, 5.5138e-01, - 4.2526e-02, 9.5295e-01, 6.3106e-01, 6.6372e-01, - 6.5914e-01, 2.1948e-01, 6.8732e-01, 6.8666e-01, - 2.3578e-01, 6.7994e-01, 9.0510e-01, 8.3924e-01, - 4.9513e-01, 2.9955e-01, 1.8215e-02, 9.1885e-01, - 1.7408e-01, 1.8051e-01, 2.2848e-01, 6.6964e-02, - 3.9377e-01, 4.2686e-02, 1.3510e-01, 5.1147e-01, - 1.7646e-01, 6.7097e-01, 1.5327e-01, 8.9006e-01, - 2.5804e-01, 6.4589e-01, 4.3026e-01, 8.9192e-01, - 5.0256e-01, 5.1282e-01, 4.1308e-01, 2.4295e-01, - 3.1734e-02, 3.6877e-02, 2.2299e-02, 2.9965e-01, - 2.9078e-01, 3.8183e-01, 5.2222e-01, 5.7421e-01, - 1.1717e-01, 6.3181e-01, 5.8114e-01, 7.2116e-01, - 4.4703e-01, 4.2801e-01, 9.0478e-01, 3.9503e-01, - 1.8289e-01, 9.8264e-01, 7.7652e-01, 8.1865e-01, - 4.5727e-01, 3.9744e-01, 7.5562e-01, 1.2155e-01, - 9.7382e-02, 2.0197e-01, 4.8545e-01, 9.4925e-01, - 2.6545e-01, 3.5580e-01, 3.0269e-01, 1.1651e-02, - 4.1913e-01, 8.4954e-01, 1.1003e-01, 7.3324e-01, - 3.0650e-02, 8.6838e-01, 4.9323e-01, 8.1522e-01, - 3.4304e-01, 3.0905e-01, 3.9779e-01, 5.7834e-01, - 5.1821e-01, 5.0984e-01, 7.4684e-01, 5.0277e-01, - 5.7314e-01, 2.2013e-02, 1.2640e-01, 1.2089e-01, - 9.0576e-01, 5.1526e-01, 8.2341e-01, 1.4643e-02, - 9.2647e-01, 1.3228e-01, 6.2683e-01, 7.8975e-01, - 4.4056e-01, 5.0657e-01, 4.9976e-01, 2.1161e-01, - 5.5244e-01, 3.4723e-01, 2.1648e-01, 5.6745e-01, - 2.5556e-01, 8.8321e-03, 1.0900e-02, 2.1680e-01, - 8.8063e-01, 9.1328e-01, 3.2536e-02, 4.9694e-01, - 2.7345e-01, 7.4327e-01, 7.2809e-01, 6.6353e-01, - 5.3136e-01, 7.5753e-01, 4.8998e-01, 7.1511e-01, - 7.0253e-01, 6.4707e-01, 3.7836e-02, 7.7134e-01, - 1.5308e-02, 2.6444e-01, 4.6545e-01, 1.3966e-01, - 9.6812e-01, 3.7718e-01, 9.6591e-01, 4.2873e-01, - 7.8220e-01, 3.8017e-01, 3.5602e-01, 4.0939e-01, - 7.6208e-01, 1.8022e-01, 5.5876e-02, 6.7166e-01, - 3.8980e-01, 7.0437e-01, 9.9344e-01, 5.9567e-01, - 6.1296e-01, 8.9698e-01, 9.1062e-01, 9.2650e-01, - 9.1190e-01, 8.9253e-01, 4.9180e-01, 7.4096e-01, - 2.4088e-01, 1.0479e-01, 7.7261e-01, 4.3386e-01, - 3.9329e-01, 5.4304e-01, 3.6663e-01, 8.5654e-02, - 9.5144e-01, 1.5547e-01, 7.8376e-01, 1.1481e-01, - 5.2874e-01, 4.6678e-02, 1.6680e-01, 9.6803e-01, - 8.3213e-01, 5.9813e-01, 7.0737e-01, 8.3350e-01, - 3.7380e-01, 4.4355e-01, 8.7051e-01, 5.7225e-01, - 4.9317e-01, 3.6354e-01, 9.2835e-01, 5.9679e-01, - 1.3281e-01, 2.1930e-01, 7.9322e-01, 3.0523e-01, - 5.8165e-01, 7.5390e-01, 1.2114e-01, 5.3411e-02, - 4.1585e-01, 9.6187e-01, 4.2424e-01, 1.3533e-01, - 3.7712e-01, 1.8261e-01, 6.8088e-01, 1.8639e-01, - 5.1967e-01, 8.0666e-01, 5.0981e-01, 9.7923e-02, - 1.7373e-02, 5.5994e-01, 3.3362e-01, 2.4293e-01, - 1.6934e-01, 2.2530e-01, 9.7352e-01, 4.9510e-01, - 2.1393e-01, 5.8022e-01, 3.8276e-02, 8.7272e-02, - 6.8543e-01, 3.2756e-02, 5.6718e-01, 3.9117e-01, - 9.7834e-01, 6.5395e-01, 8.5529e-01, 4.8972e-01, - 1.9190e-01, 1.9049e-01, 4.8019e-01, 7.2147e-01, - 1.2100e-01, 3.2180e-01, 5.1040e-03, 7.9651e-01, - 2.7309e-01, 6.6645e-01, 2.9896e-01, 5.1748e-01, - 6.1241e-01, 5.6827e-01, 5.9954e-01, 6.7815e-01, - 6.2197e-01, 1.1702e-01, 4.1684e-01, 7.0624e-01, - 7.6888e-01, 6.1216e-01, 1.1353e-01, 9.6720e-01, - 1.2108e-01, 5.9995e-01, 1.8773e-01, 2.3973e-01, - 5.9648e-01, 1.6246e-01, 6.1599e-02, 3.9480e-01, - 5.0531e-01, 9.9736e-01, 2.5833e-01, 8.2165e-01, - 5.5800e-01, 6.4798e-01, 9.0038e-01, 9.9833e-01, - 9.2312e-01, 2.6677e-01, 6.5352e-01, 4.6482e-01, - 1.2201e-01, 6.7988e-01, 2.7359e-01, 5.8169e-01, - 8.4593e-01, 2.9999e-01, 1.1410e-01, 3.6589e-01, - 6.5950e-01, 8.4661e-01, 3.7005e-01, 6.3599e-01, - 8.8603e-01, 4.0464e-01, 1.8286e-01, 5.2775e-02, - 8.4989e-01, 3.3061e-01, 8.2236e-01, 4.3585e-01, - 7.4979e-01, 8.1839e-01, 2.9331e-01, 7.1902e-02, - 8.5449e-01, 2.4751e-01, 7.1396e-02, 1.2001e-01, - 9.4854e-01, 6.6576e-01, 9.7865e-01, 1.0903e-01, - 5.1127e-01, 1.3729e-01, 4.3680e-01, 9.3247e-01, - 5.1767e-01, 3.0724e-02, 8.0497e-02, 6.4769e-02, - 7.6391e-01, 2.0889e-02, 4.1965e-01, 9.0319e-01, - 4.3557e-01, 9.5238e-01, 4.0303e-02, 3.6623e-01, - 3.2743e-01, 1.2845e-01, 6.4784e-01, 8.7231e-01, - 6.8798e-01, 3.1354e-01, 5.3644e-01, 9.6932e-01, - 1.7454e-01, 2.1021e-01, 1.4568e-01, 8.3907e-01, - 6.2260e-01, 9.3888e-01, 3.9588e-01, 8.7395e-01, - 5.9658e-01, 9.8182e-02, 5.6640e-01, 6.3829e-02, - 6.9286e-01, 7.5561e-01, 1.5461e-01, 3.7725e-02, - 1.9831e-01, 2.6020e-01, 8.3708e-01, 1.0480e-01, - 9.0558e-01, 6.1646e-01, 7.0305e-01, 1.9392e-01, - 4.0235e-01, 2.5149e-02, 9.1578e-02, 5.7882e-01, - 2.5579e-01, 4.0794e-02, 7.2276e-01, 1.6891e-01, - 2.5039e-01, 9.9730e-01, 7.8170e-01, 6.1608e-01, - 7.7103e-01, 2.5630e-01, 1.4927e-01, 2.7998e-01, - 3.0817e-01, 7.3242e-02, 1.2753e-01, 4.1496e-01, - 5.8219e-01, 5.8755e-01, 9.8146e-01, 6.2375e-01, - 8.5660e-01, 2.1402e-01, 5.1545e-01, 1.2679e-01, - 3.2732e-01, 1.7006e-01, 4.3828e-01, 1.6170e-01, - 1.0399e-01, 5.3933e-01, 7.8541e-01, 1.1931e-01, - 1.5504e-01, 7.8948e-01, 1.5878e-01, 7.4734e-01, - 2.3345e-01, 2.3097e-01, 1.4671e-01, 3.7307e-01, - 8.0431e-01, 4.5794e-01, 9.2994e-01, 8.9926e-01, - 8.4547e-01, 5.0121e-02, 7.2131e-01, 6.2424e-01, - 6.3904e-01, 4.3546e-01, 8.0833e-01, 4.9642e-01, - 5.7037e-01, 3.9698e-01, 4.3301e-02, 4.0795e-01, - 6.9428e-01, 9.9629e-01, 9.0683e-01, 5.4597e-01, - 5.7624e-01, 9.9789e-01, 2.4283e-01, 5.9102e-01, - 3.8101e-01, 7.2091e-03, 6.9826e-01, 6.1377e-01, - 6.0709e-01, 4.9390e-01, 6.1114e-01, 7.2574e-01, - 8.2221e-01, 5.4550e-01, 3.0011e-01, 4.4517e-01, - 4.5758e-02, 4.6015e-01, 3.4893e-02, 2.0905e-01, - 7.1053e-01, 8.6429e-01, 1.4344e-01, 5.8718e-01, - 1.3326e-01, 6.4655e-01, 1.7744e-01, 7.3568e-01, - 4.4107e-01, 3.5360e-01, 7.8895e-01, 1.3426e-02, - 2.5589e-01, 8.9759e-01, 1.8598e-01, 1.1031e-01, - 6.4082e-02, 1.4471e-01, 6.4713e-01, 5.3510e-01, - 3.5916e-01, 2.3596e-01, 3.7914e-01, 4.5283e-01, - 4.0277e-01, 8.3793e-01, 3.6913e-01, 8.0491e-01, - 8.0560e-01, 8.3787e-01, 3.2988e-01, 2.7062e-01, - 5.2644e-01, 8.0191e-01, 3.0417e-01, 8.0992e-01, - 3.7054e-01, 6.5171e-01, 8.9851e-01, 7.9938e-01, - 7.9593e-01, 4.1200e-01, 7.2346e-01, 8.8886e-01, - 7.7392e-01, 3.0164e-01, 4.8903e-01, 7.0959e-01, - 2.3055e-04, 5.5567e-01, 3.2476e-02, 4.6940e-01, - 9.5992e-01, 3.3002e-01, 1.9968e-01, 3.6916e-01, - 9.3384e-01, 4.4547e-01, 8.4545e-01, 7.0652e-01, - 2.5484e-01, 6.6093e-01, 1.0142e-01, 4.0036e-01, - 8.8027e-02, 7.0693e-01, 6.1292e-01, 3.5279e-02, - 1.6464e-01, 7.4166e-01, 7.4512e-01, 3.1187e-01, - 9.3827e-01, 8.1291e-01, 6.8774e-01, 1.6535e-01, - 8.2353e-01, 4.4462e-01, 6.2390e-01, 2.5665e-01, - 2.4823e-01, 6.5084e-01, 2.8892e-01, 8.3557e-01, - 2.6616e-01, 8.8256e-01, 3.9677e-01, 7.0336e-01, - 8.3626e-01, 8.4230e-01, 1.2696e-01, 5.4260e-01, - 4.5014e-01, 1.2684e-01, 4.1101e-01, 4.8496e-01, - 7.6136e-01, 5.3988e-01, 4.7573e-01, 1.6785e-01, - 8.8662e-01, 6.3235e-01, 2.9589e-01, 2.8836e-01, - 9.5162e-01, 5.4258e-01, 2.7719e-01, 7.2819e-01, - 4.5646e-02, 7.4687e-01, 6.9970e-01, 6.6663e-01, - 4.4353e-01, 9.2703e-01, 1.7143e-01, 9.9752e-01, - 1.6120e-01, 9.1760e-02, 7.4539e-02, 6.2124e-01, - 3.9172e-01, 8.8093e-01, 8.8278e-01, 9.1996e-01, - 9.9713e-01, 4.7916e-01, 8.6919e-01, 9.8249e-01, - 8.6668e-01, 3.9161e-01, 6.4609e-01, 5.0516e-01, - 6.3812e-02, 4.1956e-01, 4.4304e-01, 5.3391e-01, - 9.4156e-01, 4.0458e-01, 2.9747e-01, 1.0297e-01, - 2.1881e-01, 5.0346e-01, 5.5620e-01, 1.2242e-01, - 9.4646e-01, 4.2415e-01, 1.2648e-01, 8.3879e-01, - 2.1895e-01, 7.2594e-01, 9.5334e-01, 4.1030e-01, - 6.5805e-01, 6.1371e-01, 6.3829e-01, 6.4855e-01, - 1.9689e-01, 7.6433e-01, 1.4868e-01, 1.2936e-01, - 1.2645e-01, 8.9930e-01, 1.6824e-01, 9.4670e-01, - 4.8028e-01, 6.9663e-01, 4.2572e-01, 8.7528e-01, - 8.1359e-02, 9.6745e-01, 2.7830e-01, 7.5427e-01, - 5.4086e-02, 9.3150e-01, 6.8940e-01, 8.9446e-01, - 4.2269e-01, 1.5823e-02, 6.6296e-01, 8.5098e-01, - 8.8125e-01, 8.8539e-01, 6.4149e-01, 9.0541e-01, - 8.6570e-01, 1.9544e-01, 4.7643e-01, 8.5814e-01]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 999, 999, 1000]), + col_indices=tensor([3175, 1540, 6513, 4566, 9706, 3242, 7522, 361, 3563, + 273, 8050, 6972, 5246, 100, 2674, 5918, 3629, 808, + 6317, 2665, 3236, 7680, 4047, 5897, 1768, 5781, 8933, + 8413, 7478, 8640, 5353, 4488, 7437, 3716, 4046, 1102, + 6131, 2784, 5612, 6734, 6293, 813, 8222, 4409, 7568, + 7734, 4823, 4746, 71, 9732, 5731, 7539, 5376, 3975, + 4034, 5323, 3781, 4198, 6205, 3448, 5920, 4554, 964, + 2149, 3775, 4363, 7665, 7615, 1360, 740, 9444, 8107, + 1702, 5055, 4887, 338, 8496, 5258, 6306, 4365, 8779, + 3316, 6271, 7936, 5465, 5927, 2341, 8746, 8614, 4168, + 7453, 8302, 1818, 3772, 900, 570, 1621, 1384, 1313, + 5863, 7529, 2013, 14, 7644, 4866, 5872, 4394, 6186, + 7063, 8838, 961, 1908, 8272, 1397, 5498, 6793, 4939, + 7488, 3334, 7992, 2581, 6595, 9145, 5581, 4949, 2140, + 6797, 414, 1120, 5151, 8169, 7479, 5174, 1884, 9527, + 2164, 3682, 8813, 2069, 8178, 7946, 3103, 3936, 5349, + 2278, 6902, 2920, 1610, 3303, 3215, 9428, 1830, 2118, + 5807, 1813, 6966, 2320, 7242, 2061, 1142, 8325, 1748, + 6423, 3895, 9860, 3334, 1571, 7560, 9529, 2255, 5990, + 7873, 1082, 5084, 4882, 8756, 2595, 3740, 8442, 4906, + 4146, 3172, 76, 1682, 3498, 5746, 4339, 3299, 2674, + 4896, 7065, 1329, 4780, 8089, 9997, 1942, 3826, 4131, + 731, 9928, 4711, 108, 4149, 1842, 8173, 1747, 9110, + 443, 1885, 4298, 1140, 4797, 448, 4550, 429, 8474, + 4481, 6412, 6907, 8449, 3203, 1144, 8022, 5563, 2882, + 5617, 6692, 1347, 1909, 5954, 8651, 3933, 1838, 8274, + 2192, 3777, 9060, 791, 4605, 3528, 4962, 9087, 4984, + 2397, 5548, 7709, 3017, 2309, 3655, 6073, 8413, 9630, + 431, 1679, 7855, 1854, 2512, 7892, 7672, 3225, 8437, + 1273, 8525, 6002, 4821, 6282, 236, 2495, 4787, 7877, + 8952, 9372, 4533, 4631, 5234, 8335, 9577, 6036, 98, + 2704, 5610, 6864, 5127, 6489, 9983, 9754, 5784, 4497, + 7982, 8077, 5919, 4969, 6009, 6638, 5854, 2056, 7764, + 3940, 5182, 6794, 6660, 8586, 4187, 8042, 2416, 8812, + 3619, 6316, 9827, 3289, 3690, 1654, 2607, 451, 2745, + 5265, 4289, 6960, 1869, 7932, 5618, 4964, 3986, 215, + 9920, 528, 1074, 6998, 9786, 9451, 4860, 7150, 5317, + 2922, 1331, 9263, 8341, 4258, 5180, 5269, 5128, 4740, + 9937, 9856, 7158, 6339, 836, 9058, 8365, 1840, 9851, + 5665, 7487, 3732, 179, 4366, 2315, 9267, 1373, 2275, + 7848, 5845, 8794, 6211, 6941, 2, 804, 8730, 5269, + 7518, 2707, 7073, 3598, 7548, 8402, 55, 4035, 7643, + 9945, 7893, 5968, 8491, 1584, 8889, 7265, 5390, 8665, + 4508, 3535, 723, 893, 7985, 4490, 7267, 4951, 8120, + 1353, 6368, 4704, 5448, 5441, 8473, 9294, 3369, 4289, + 3173, 9667, 8648, 3368, 3676, 7114, 9123, 9879, 2618, + 2041, 2076, 9915, 2059, 7375, 4830, 5178, 3566, 6066, + 9528, 432, 3275, 9207, 9228, 898, 9740, 704, 7175, + 1810, 4839, 9301, 7082, 7045, 9236, 5459, 6824, 2386, + 9099, 2425, 6745, 8533, 9747, 7091, 7333, 6526, 5056, + 8568, 3601, 9200, 611, 9078, 7305, 7509, 8202, 2800, + 676, 4144, 2431, 6051, 523, 4438, 7508, 8507, 4826, + 9235, 9404, 1554, 6190, 1800, 8838, 8616, 9273, 1919, + 210, 323, 3095, 2911, 1383, 7516, 3916, 6756, 186, + 1597, 4025, 9811, 1346, 7942, 8460, 4239, 3491, 9614, + 1574, 3026, 1308, 2921, 9065, 7897, 1949, 5662, 3969, + 6445, 2108, 3419, 7470, 7688, 9282, 1284, 1190, 7548, + 5183, 1643, 8925, 2434, 1016, 3356, 4259, 736, 9699, + 4586, 9185, 7689, 8664, 6332, 6646, 6512, 3621, 87, + 6668, 2483, 2434, 1838, 4859, 4139, 6036, 8716, 5847, + 9068, 1235, 5372, 1971, 6227, 1556, 4067, 4084, 9394, + 8499, 8204, 8364, 6391, 8186, 3868, 7454, 4389, 3681, + 5817, 3764, 3744, 6962, 5091, 9055, 6773, 4867, 6944, + 2264, 652, 3256, 2751, 8863, 7674, 2641, 9240, 2245, + 9330, 8024, 9843, 4610, 82, 200, 8852, 5519, 2948, + 1600, 4032, 1320, 5925, 2330, 9335, 8707, 6761, 5646, + 813, 1857, 537, 7443, 5765, 3517, 7296, 7246, 6140, + 7043, 8305, 6878, 7142, 1532, 5166, 1412, 718, 9870, + 5087, 452, 3492, 6284, 6960, 7980, 454, 5109, 9800, + 5047, 2964, 1824, 5078, 6124, 9571, 9534, 2567, 8828, + 4193, 1120, 3726, 1508, 8125, 1525, 2609, 983, 9289, + 6898, 5932, 8744, 6717, 5439, 4405, 853, 7405, 3237, + 2345, 6750, 8221, 667, 5951, 632, 9570, 2101, 6091, + 3022, 3318, 4966, 5463, 4525, 9617, 5713, 8112, 7764, + 1714, 3834, 6463, 7656, 4347, 9414, 6948, 9428, 7757, + 709, 2683, 5160, 5955, 5530, 6376, 3284, 4446, 5698, + 2275, 2092, 6675, 5841, 5301, 6848, 9042, 3714, 5714, + 3313, 8575, 5834, 1116, 3984, 2225, 8925, 9909, 7358, + 9350, 6525, 5220, 7818, 1339, 6950, 9289, 9181, 9998, + 7845, 6947, 9177, 8578, 4463, 1874, 70, 2789, 9195, + 4686, 2356, 4115, 1229, 3411, 9168, 3818, 4667, 6830, + 6742, 8796, 4708, 6334, 9212, 955, 1317, 5763, 7646, + 8855, 9864, 5649, 4410, 9725, 7835, 2594, 4904, 9545, + 1567, 3913, 9838, 887, 929, 6237, 8603, 266, 8794, + 4699, 6401, 8344, 7226, 8586, 183, 9072, 8396, 1293, + 7009, 8570, 6640, 2705, 4187, 2146, 1943, 5404, 4534, + 3765, 8271, 1936, 8038, 6160, 6410, 5531, 6130, 449, + 6410, 825, 5772, 7843, 6366, 8489, 3632, 1857, 6681, + 6911, 6206, 9698, 5731, 2691, 7020, 9437, 9086, 7250, + 9133, 8448, 9615, 9805, 7718, 7430, 8451, 617, 7119, + 3608, 2957, 8156, 2628, 381, 2102, 5147, 1812, 7632, + 6080, 3801, 6557, 3664, 7115, 4747, 9946, 933, 9043, + 1209, 8903, 2470, 7079, 2590, 4242, 1267, 2310, 1880, + 1533, 8678, 2879, 5347, 3885, 2968, 7456, 8838, 419, + 3883, 1590, 8133, 338, 7811, 610, 5462, 8096, 5384, + 2656, 3961, 9755, 4315, 8580, 1978, 7459, 2728, 6364, + 3022, 540, 8591, 2436, 6393, 2785, 2076, 2351, 7720, + 3071, 210, 3880, 3780, 5455, 4217, 6253, 1690, 7922, + 8344, 3819, 673, 5739, 9352, 339, 2786, 4845, 1565, + 3191, 9970, 5600, 5489, 5182, 6938, 5244, 6170, 2279, + 5161, 5371, 3300, 45, 6679, 8740, 7633, 4511, 1112, + 1547, 359, 8586, 849, 9883, 8268, 2875, 7310, 8225, + 1860, 3766, 6124, 7431, 5973, 1224, 8189, 1700, 9843, + 2201, 8620, 3055, 1358, 7636, 5811, 5697, 3368, 2604, + 2311, 8744, 5512, 4770, 1238, 7759, 9050, 3348, 7125, + 1114, 7201, 7256, 5261, 6468, 3791, 4716, 1684, 3831, + 2665, 4866, 2193, 1393, 7781, 4603, 2849, 4725, 1778, + 9535, 4976, 3128, 4153, 976, 3584, 9384, 4201, 3066, + 3919, 5286, 1666, 9720, 5671, 8474, 8596, 6477, 9376, + 8643, 5995, 856, 6368, 3119, 9235, 7286, 4524, 7608, + 8305, 1316, 3514, 8323, 3763, 5982, 5171, 9325, 2051, + 3757]), + values=tensor([5.4503e-02, 2.9508e-01, 2.7621e-01, 5.9220e-01, + 6.7599e-01, 6.9875e-01, 7.2193e-02, 4.2622e-01, + 4.4064e-01, 4.9415e-01, 4.8448e-01, 4.6026e-01, + 8.3385e-01, 6.8127e-01, 7.4836e-01, 9.8446e-01, + 4.5546e-01, 1.1369e-01, 7.1924e-01, 1.0264e-01, + 9.0307e-01, 6.5371e-02, 4.6885e-02, 6.4486e-01, + 7.8103e-01, 2.8906e-01, 4.3815e-01, 3.8670e-01, + 4.1476e-01, 1.4529e-01, 4.5266e-02, 8.7285e-01, + 8.3886e-01, 5.0968e-01, 1.9297e-01, 8.7115e-01, + 4.2403e-01, 1.7959e-01, 7.8896e-01, 8.3163e-01, + 2.2324e-01, 2.8136e-01, 1.9157e-02, 7.8786e-01, + 2.7574e-01, 1.6824e-01, 8.9669e-01, 2.4513e-01, + 5.8585e-01, 3.7827e-01, 2.4067e-01, 5.7494e-01, + 7.5649e-01, 5.4031e-01, 1.9831e-01, 2.0146e-01, + 3.5111e-03, 1.4887e-01, 9.3482e-01, 8.9867e-01, + 7.3926e-01, 8.9432e-01, 7.7800e-01, 7.8593e-02, + 9.0410e-01, 1.3148e-01, 2.7506e-02, 2.8367e-01, + 3.7537e-01, 4.9944e-01, 9.2122e-01, 2.8280e-01, + 1.5841e-01, 6.6171e-01, 2.7753e-01, 7.5495e-01, + 7.8495e-01, 4.0801e-01, 5.1023e-02, 6.5582e-01, + 8.4918e-01, 4.8003e-02, 3.5027e-01, 9.2382e-01, + 4.3491e-01, 2.8400e-01, 4.9820e-01, 4.0135e-01, + 9.0951e-01, 8.1196e-01, 1.4390e-01, 3.9182e-01, + 9.1794e-01, 4.1848e-01, 8.6036e-01, 6.1219e-01, + 8.7067e-01, 8.2945e-01, 3.9732e-01, 7.8015e-01, + 8.9715e-02, 8.2485e-02, 3.5463e-01, 9.2038e-01, + 3.3054e-01, 6.8980e-01, 8.3428e-01, 6.7282e-01, + 9.0467e-01, 9.8824e-01, 2.3744e-01, 7.6530e-01, + 5.0061e-01, 1.5883e-01, 5.3783e-01, 7.9345e-01, + 3.7436e-01, 2.0235e-01, 7.7143e-01, 5.2818e-01, + 5.0000e-01, 1.1490e-02, 9.7408e-01, 2.0383e-01, + 6.8759e-02, 4.2208e-01, 7.0805e-01, 3.5602e-01, + 1.4045e-01, 1.8772e-01, 8.2939e-01, 2.6640e-01, + 7.5430e-01, 6.3779e-01, 9.8984e-01, 3.1603e-03, + 6.1629e-01, 4.2752e-01, 4.2494e-01, 3.2776e-01, + 7.0716e-01, 6.0492e-01, 2.4134e-01, 9.9805e-01, + 7.8767e-01, 9.6566e-01, 1.7115e-01, 7.1157e-02, + 4.0609e-01, 9.1913e-01, 4.0558e-01, 7.9724e-01, + 9.6047e-01, 2.4534e-01, 6.7326e-01, 7.3496e-01, + 2.5675e-01, 1.7448e-01, 6.2499e-02, 5.6949e-01, + 7.5770e-01, 3.9893e-01, 5.3252e-01, 4.1528e-01, + 3.5445e-01, 9.4052e-01, 9.9915e-01, 3.2511e-01, + 2.7636e-01, 9.6422e-01, 1.8532e-02, 4.8863e-01, + 1.6115e-01, 4.8734e-01, 4.4591e-01, 1.2811e-01, + 5.5702e-01, 6.3130e-01, 1.8993e-01, 5.4511e-01, + 2.1032e-01, 6.5660e-01, 2.6576e-01, 8.1915e-01, + 5.9613e-01, 3.3776e-01, 6.2370e-01, 8.7516e-01, + 6.3332e-01, 9.1021e-01, 5.9864e-01, 2.2683e-01, + 3.7532e-01, 3.4357e-01, 9.2278e-01, 1.0352e-01, + 7.7095e-01, 3.3999e-01, 3.1460e-01, 9.0103e-01, + 3.5506e-01, 8.8805e-01, 5.7371e-01, 2.3930e-01, + 8.7840e-01, 6.0017e-01, 4.8130e-01, 4.4910e-01, + 5.3282e-01, 8.6422e-02, 2.0105e-01, 4.1225e-01, + 3.6155e-01, 6.2255e-01, 1.8640e-01, 8.2609e-01, + 1.7861e-01, 4.5248e-01, 8.8949e-01, 9.6998e-01, + 2.6238e-01, 6.4922e-01, 6.7744e-01, 8.7497e-01, + 5.2727e-02, 2.5528e-03, 5.5124e-01, 8.9615e-01, + 7.7634e-01, 5.0560e-01, 6.4556e-01, 3.4657e-01, + 3.1546e-01, 2.4480e-02, 6.3994e-01, 1.1798e-01, + 7.4735e-01, 7.7755e-01, 4.2687e-01, 2.0743e-01, + 4.1103e-01, 4.7313e-01, 6.6535e-01, 1.5899e-01, + 5.0196e-01, 1.5159e-01, 1.8033e-01, 4.0226e-01, + 4.0348e-01, 5.4168e-01, 4.0167e-01, 1.6018e-01, + 2.5039e-01, 3.4686e-01, 2.5177e-01, 4.9281e-01, + 5.0762e-01, 8.7205e-01, 5.2584e-02, 7.4358e-01, + 3.1876e-01, 2.8162e-01, 6.8347e-01, 8.9200e-01, + 2.3220e-01, 1.1302e-01, 4.9947e-01, 3.5588e-01, + 6.1641e-01, 2.4231e-01, 6.5140e-02, 4.4885e-02, + 3.2197e-01, 6.1080e-01, 7.8795e-01, 2.6929e-02, + 5.8856e-01, 2.4904e-01, 8.5655e-01, 8.7065e-01, + 2.1587e-01, 7.8035e-01, 9.2913e-01, 2.1233e-01, + 6.0794e-02, 7.7621e-01, 5.9383e-01, 7.7282e-01, + 2.1097e-01, 4.5931e-01, 1.2216e-01, 5.5394e-01, + 8.2536e-01, 9.4006e-01, 6.9903e-01, 2.3400e-01, + 1.6489e-01, 9.9877e-01, 3.9160e-03, 9.6684e-01, + 6.2591e-01, 2.5349e-01, 8.7857e-01, 4.9299e-01, + 2.8608e-01, 9.5114e-01, 1.4473e-01, 6.2640e-01, + 4.5802e-01, 5.0106e-02, 6.0613e-01, 8.2241e-02, + 7.2741e-01, 7.5131e-01, 8.4107e-01, 2.1466e-01, + 8.8673e-01, 8.0627e-01, 7.4367e-01, 8.8514e-01, + 9.2521e-01, 6.5036e-01, 6.8984e-01, 2.2250e-01, + 1.8633e-01, 6.5714e-01, 9.9674e-01, 9.5767e-01, + 9.8742e-01, 1.4499e-01, 6.8279e-01, 8.4327e-01, + 3.1676e-01, 9.4538e-01, 2.0070e-02, 7.2806e-01, + 2.1770e-01, 8.0038e-01, 1.4197e-02, 2.4290e-01, + 5.8603e-01, 8.5921e-01, 3.7803e-02, 5.9046e-01, + 1.3055e-03, 5.8856e-01, 3.0292e-01, 9.3443e-02, + 9.6541e-01, 6.2831e-01, 4.3760e-01, 1.1317e-01, + 2.1064e-01, 5.8110e-01, 7.5660e-01, 9.2549e-01, + 1.3727e-02, 1.6075e-02, 3.0759e-01, 3.7790e-01, + 2.1176e-01, 1.3578e-01, 4.7314e-01, 1.6872e-01, + 2.7121e-01, 4.5927e-01, 2.9240e-01, 7.4103e-01, + 3.2311e-01, 5.0994e-01, 5.8812e-01, 1.0297e-01, + 9.3280e-01, 1.1119e-02, 2.3125e-01, 9.4903e-01, + 9.5077e-01, 6.3578e-01, 3.0323e-01, 9.1593e-01, + 6.8375e-01, 7.8254e-01, 4.1770e-01, 8.4649e-01, + 3.4911e-01, 1.4315e-02, 4.6432e-01, 1.0495e-01, + 8.9960e-01, 5.6481e-01, 9.8227e-01, 9.4531e-04, + 1.6178e-01, 7.1270e-01, 9.0622e-01, 2.5614e-02, + 5.3965e-02, 6.0181e-01, 4.7567e-01, 7.3600e-01, + 1.4436e-01, 9.4179e-01, 8.0074e-01, 9.8982e-01, + 8.1912e-01, 5.9117e-01, 1.4834e-01, 8.6032e-01, + 6.1336e-01, 7.8548e-01, 7.9831e-01, 5.4864e-02, + 5.7972e-01, 8.9770e-01, 9.8146e-02, 1.3538e-01, + 2.5634e-03, 9.7743e-01, 6.5789e-01, 4.9119e-01, + 1.4415e-01, 4.5435e-01, 9.3861e-01, 2.4903e-01, + 9.8609e-01, 1.0273e-01, 6.1532e-02, 8.6587e-01, + 9.9411e-01, 6.9395e-02, 5.1601e-01, 8.8388e-01, + 5.1906e-01, 3.4757e-01, 7.0502e-01, 9.7708e-01, + 6.1776e-01, 7.0773e-01, 5.8744e-02, 7.0159e-01, + 6.4261e-01, 1.6001e-01, 1.0469e-02, 2.6232e-01, + 2.3191e-01, 3.1641e-01, 5.8909e-01, 9.7064e-02, + 8.2228e-01, 2.6238e-01, 6.7340e-01, 6.6894e-01, + 1.2005e-02, 3.5561e-01, 8.9801e-01, 6.5307e-01, + 2.9558e-01, 8.5369e-01, 6.1529e-02, 2.8934e-01, + 9.6447e-01, 5.9228e-01, 5.6285e-02, 4.7556e-01, + 8.7048e-01, 3.6616e-01, 4.2623e-01, 9.2599e-01, + 6.2658e-01, 6.7118e-01, 6.5435e-01, 5.6091e-01, + 5.3675e-02, 3.3015e-01, 4.7517e-01, 9.0024e-01, + 8.8494e-02, 8.2825e-01, 8.8778e-01, 8.7782e-01, + 5.8637e-01, 6.8125e-01, 5.4448e-01, 8.7589e-02, + 2.4761e-01, 7.0949e-01, 6.1296e-01, 5.8345e-01, + 2.3981e-01, 9.4679e-01, 5.9791e-01, 5.4437e-01, + 9.2743e-01, 5.6804e-01, 2.0049e-01, 6.7777e-01, + 9.3903e-01, 3.3628e-01, 7.2323e-01, 1.5931e-01, + 5.0401e-01, 3.4850e-01, 3.1155e-01, 9.9494e-01, + 3.4038e-01, 5.7178e-01, 7.0324e-01, 3.0201e-01, + 1.0997e-01, 8.8095e-01, 3.8991e-01, 2.3953e-01, + 8.6688e-01, 1.8669e-01, 2.9318e-01, 8.1412e-01, + 7.5600e-01, 3.5357e-01, 5.3145e-01, 5.3373e-01, + 8.5296e-01, 7.1799e-01, 4.4404e-01, 1.4435e-01, + 4.3572e-01, 1.5025e-01, 1.9665e-01, 6.8243e-01, + 7.0719e-01, 4.6621e-01, 9.9873e-01, 1.9092e-01, + 3.4845e-01, 3.2613e-01, 1.3621e-01, 3.7010e-03, + 2.5735e-01, 6.3842e-01, 7.9218e-01, 5.3496e-01, + 8.3599e-01, 8.7940e-01, 9.2095e-01, 4.8514e-01, + 4.7299e-02, 5.9454e-01, 8.7756e-01, 8.1858e-01, + 3.0753e-01, 1.0135e-01, 8.9628e-01, 8.7531e-01, + 2.4338e-01, 7.6256e-01, 2.7974e-01, 7.3112e-01, + 4.5748e-01, 9.6845e-01, 4.6279e-01, 2.2181e-01, + 5.2307e-02, 3.8183e-01, 8.9776e-01, 7.9512e-01, + 5.4519e-01, 1.8232e-01, 4.2318e-01, 6.8070e-01, + 6.7107e-01, 3.5950e-01, 3.3105e-01, 1.5345e-01, + 5.0364e-01, 6.3834e-01, 2.5384e-01, 8.9469e-01, + 8.5636e-01, 8.5903e-02, 6.8240e-01, 9.1985e-01, + 1.5333e-01, 6.3524e-01, 3.8410e-01, 9.5777e-01, + 6.3208e-01, 6.9916e-01, 5.0984e-01, 9.4018e-01, + 8.5347e-01, 6.0579e-01, 4.0323e-01, 7.2110e-02, + 3.0391e-01, 8.5004e-01, 4.0011e-01, 2.1766e-01, + 2.4996e-01, 8.3817e-01, 3.3134e-01, 1.6257e-01, + 7.7411e-01, 1.8474e-01, 3.7528e-01, 2.2447e-01, + 3.3097e-01, 7.9759e-01, 6.5413e-01, 4.2646e-01, + 1.9429e-01, 3.9184e-01, 9.8345e-01, 7.0461e-01, + 9.2992e-01, 5.5353e-01, 2.4215e-01, 8.8896e-01, + 1.4076e-01, 5.3922e-01, 5.3680e-01, 3.4086e-01, + 3.6827e-02, 6.8591e-01, 5.7598e-01, 9.2286e-01, + 1.5534e-01, 8.4442e-01, 9.7693e-01, 6.3237e-01, + 2.1297e-01, 7.2103e-01, 6.7295e-01, 7.7213e-01, + 7.7624e-01, 4.4250e-01, 7.6173e-01, 6.3408e-01, + 8.4143e-01, 8.0697e-01, 1.1873e-01, 1.2005e-02, + 3.7617e-01, 2.0706e-01, 4.9533e-01, 9.6023e-01, + 3.1793e-01, 4.0810e-01, 8.9848e-02, 1.4669e-01, + 2.3224e-02, 3.4590e-01, 1.5016e-01, 6.8482e-01, + 5.4447e-02, 5.0062e-01, 2.7224e-02, 4.2357e-01, + 7.2193e-02, 7.5728e-01, 3.9624e-01, 1.7947e-01, + 9.8249e-01, 8.6582e-01, 5.8813e-01, 6.1141e-01, + 2.0695e-01, 2.9035e-03, 8.2408e-01, 8.7966e-01, + 4.3094e-01, 9.2415e-01, 4.9195e-01, 2.6851e-01, + 6.1337e-01, 9.4270e-01, 9.2968e-01, 8.5545e-01, + 3.7090e-01, 7.3166e-01, 4.3200e-01, 9.2318e-04, + 6.8527e-01, 3.8630e-01, 2.9276e-01, 9.4452e-01, + 5.5661e-01, 3.3534e-01, 6.0022e-01, 4.5533e-01, + 3.0979e-01, 4.2662e-01, 8.1662e-02, 9.0957e-01, + 3.3693e-02, 5.6665e-01, 2.1704e-01, 6.4408e-02, + 1.1745e-01, 1.8148e-01, 3.5404e-01, 5.3128e-01, + 5.1695e-01, 6.1276e-01, 9.1890e-01, 3.0434e-02, + 1.9294e-01, 4.4383e-01, 1.9963e-01, 3.9245e-01, + 2.8135e-01, 9.1337e-01, 5.9610e-01, 9.7742e-01, + 3.5662e-01, 1.7009e-01, 5.2935e-01, 9.5236e-01, + 7.5234e-01, 8.9560e-01, 9.9574e-02, 2.1619e-01, + 4.6299e-02, 4.5196e-01, 9.5694e-01, 9.2056e-02, + 5.7164e-01, 9.0188e-03, 5.9236e-01, 2.7750e-02, + 5.3824e-01, 1.4417e-01, 9.9993e-01, 2.4670e-01, + 2.9559e-01, 9.7206e-02, 4.1621e-02, 8.3694e-01, + 8.3051e-01, 5.6169e-01, 8.8527e-01, 5.1914e-01, + 4.0201e-01, 7.8341e-01, 2.9365e-01, 2.9879e-01, + 2.8162e-01, 2.0121e-01, 6.6932e-01, 6.1473e-01, + 4.9632e-03, 3.8050e-01, 3.4448e-01, 5.3838e-02, + 2.7405e-01, 8.6591e-01, 7.3213e-01, 5.1101e-01, + 9.6397e-01, 5.3489e-01, 4.2717e-01, 1.4181e-01, + 4.3734e-01, 3.5588e-01, 6.4507e-01, 7.1977e-01, + 2.5664e-01, 4.3752e-01, 5.2081e-01, 8.3232e-01, + 8.3341e-01, 8.8012e-01, 8.1018e-01, 1.3653e-01, + 6.9665e-01, 2.1682e-01, 7.9394e-01, 1.1105e-01, + 4.9175e-01, 7.1874e-01, 4.2673e-01, 3.5619e-02, + 1.7445e-01, 4.2686e-01, 2.4267e-01, 3.1348e-01, + 2.1916e-01, 8.6054e-01, 5.1870e-02, 8.2679e-01, + 7.7808e-01, 1.0574e-01, 3.0661e-01, 2.6611e-01, + 4.3519e-01, 4.6871e-01, 3.5600e-01, 7.0874e-01, + 8.7795e-02, 4.4678e-01, 7.8626e-01, 1.9312e-01, + 8.1397e-01, 8.8447e-01, 4.1526e-02, 2.2637e-01, + 4.1953e-02, 2.2587e-01, 9.1774e-01, 3.7696e-01, + 1.0103e-01, 7.8365e-01, 7.9119e-01, 3.6537e-02, + 5.2353e-02, 5.9226e-01, 5.7444e-01, 3.5985e-01, + 2.3526e-01, 4.1637e-01, 4.6316e-01, 5.2774e-01, + 2.3136e-01, 7.7246e-02, 8.8505e-01, 8.9057e-01, + 6.2438e-01, 4.2260e-01, 5.4818e-01, 4.1466e-01, + 9.8218e-01, 7.6989e-01, 9.9061e-01, 9.1208e-01, + 1.9061e-01, 3.1493e-01, 5.1772e-01, 5.6560e-01, + 6.4220e-01, 1.7290e-02, 8.9717e-01, 1.8223e-01, + 2.3977e-01, 2.0508e-01, 1.4863e-01, 1.4836e-01, + 6.5533e-01, 2.5762e-01, 6.0459e-01, 9.4558e-01, + 8.1865e-01, 6.7206e-01, 4.9234e-01, 9.2049e-01, + 2.8530e-01, 1.8503e-01, 5.5805e-01, 4.8402e-01, + 5.6399e-02, 1.9669e-01, 1.0907e-01, 8.8177e-01, + 6.3074e-01, 5.6872e-02, 7.5582e-02, 1.6196e-01, + 7.5413e-01, 9.1751e-01, 8.2396e-01, 4.8314e-02, + 4.0233e-01, 4.7439e-01, 8.6903e-01, 6.4683e-01, + 4.0413e-01, 9.6644e-01, 4.3207e-01, 2.1787e-01, + 3.8462e-01, 5.8970e-02, 9.5603e-01, 2.9289e-01, + 1.1430e-02, 5.0879e-01, 8.8051e-01, 7.5483e-01, + 1.0664e-01, 6.4358e-01, 7.3353e-02, 5.7649e-01, + 9.0772e-01, 1.2860e-01, 3.5405e-01, 5.8783e-01, + 5.1844e-02, 6.6979e-01, 3.9982e-01, 5.7713e-01, + 9.5423e-01, 4.5051e-01, 2.8531e-01, 1.4108e-02, + 8.0540e-01, 4.4277e-01, 8.9220e-01, 2.6249e-01, + 2.4123e-01, 5.4797e-01, 6.6492e-01, 2.0508e-01, + 6.7675e-01, 4.1081e-01, 5.6068e-02, 2.2294e-01, + 4.1645e-01, 9.4258e-01, 4.8056e-01, 1.9642e-01, + 8.7808e-01, 2.3744e-01, 8.4309e-01, 5.2265e-01, + 7.4547e-01, 8.2721e-01, 9.3542e-01, 3.4692e-01, + 2.9115e-02, 7.3087e-02, 3.0307e-01, 9.5155e-01, + 2.1303e-01, 7.1446e-01, 9.1782e-01, 8.7408e-01, + 5.8906e-01, 8.3877e-01, 2.7997e-01, 4.6594e-01, + 4.2045e-01, 5.5887e-01, 8.2414e-01, 6.5204e-01, + 1.5283e-01, 4.0072e-02, 4.2285e-02, 9.5155e-01, + 7.6513e-02, 6.7783e-01, 2.9920e-01, 9.2939e-01, + 1.2315e-01, 2.4026e-01, 1.7920e-01, 6.1226e-02, + 6.4655e-01, 9.9832e-01, 4.3253e-01, 7.2332e-01, + 7.3881e-01, 7.6725e-01, 1.0282e-01, 6.2526e-02, + 6.6880e-02, 7.1237e-01, 1.8045e-01, 2.8368e-01, + 8.1627e-01, 3.2290e-01, 7.1207e-01, 9.3336e-01, + 5.8264e-01, 8.6629e-01, 1.0427e-01, 4.1347e-01, + 1.2616e-01, 9.6273e-01, 2.5433e-01, 7.5316e-01, + 9.7344e-01, 2.8688e-01, 7.9705e-01, 3.3331e-01, + 9.6254e-01, 9.1487e-01, 2.8480e-01, 3.2055e-01, + 3.9523e-01, 3.6145e-01, 4.5015e-01, 9.4686e-01, + 3.2711e-02, 8.9001e-01, 1.9633e-02, 3.7308e-01, + 5.9301e-01, 8.8253e-01, 8.2784e-01, 4.4139e-01, + 6.6233e-01, 5.8030e-01, 7.4490e-01, 9.5820e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.3920, 0.2913, 0.8672, ..., 0.9245, 0.8812, 0.1957]) +tensor([0.1332, 0.3872, 0.9921, ..., 0.6563, 0.0596, 0.5136]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -1509,13 +919,389 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 10.585279941558838 seconds +Time: 10.549095392227173 seconds -[39.18, 40.08, 38.95, 38.39, 39.05, 38.86, 38.48, 38.43, 38.49, 38.42] -[64.69] -13.222010135650635 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 363782, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.585279941558838, 'TIME_S_1KI': 0.029097866143896176, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 855.3318356752395, 'W': 64.69} -[39.18, 40.08, 38.95, 38.39, 39.05, 38.86, 38.48, 38.43, 38.49, 38.42, 40.2, 38.44, 38.35, 38.82, 38.9, 38.53, 38.83, 38.37, 38.44, 38.39] -697.5050000000001 -34.87525000000001 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 363782, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.585279941558838, 'TIME_S_1KI': 0.029097866143896176, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 855.3318356752395, 'W': 64.69, 'J_1KI': 2.3512208841428097, 'W_1KI': 0.17782628057462985, 'W_D': 29.81474999999999, 'J_D': 394.2109266918896, 'W_D_1KI': 0.08195773842576046, 'J_D_1KI': 0.00022529355060382441} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 999, 999, 1000]), + col_indices=tensor([3175, 1540, 6513, 4566, 9706, 3242, 7522, 361, 3563, + 273, 8050, 6972, 5246, 100, 2674, 5918, 3629, 808, + 6317, 2665, 3236, 7680, 4047, 5897, 1768, 5781, 8933, + 8413, 7478, 8640, 5353, 4488, 7437, 3716, 4046, 1102, + 6131, 2784, 5612, 6734, 6293, 813, 8222, 4409, 7568, + 7734, 4823, 4746, 71, 9732, 5731, 7539, 5376, 3975, + 4034, 5323, 3781, 4198, 6205, 3448, 5920, 4554, 964, + 2149, 3775, 4363, 7665, 7615, 1360, 740, 9444, 8107, + 1702, 5055, 4887, 338, 8496, 5258, 6306, 4365, 8779, + 3316, 6271, 7936, 5465, 5927, 2341, 8746, 8614, 4168, + 7453, 8302, 1818, 3772, 900, 570, 1621, 1384, 1313, + 5863, 7529, 2013, 14, 7644, 4866, 5872, 4394, 6186, + 7063, 8838, 961, 1908, 8272, 1397, 5498, 6793, 4939, + 7488, 3334, 7992, 2581, 6595, 9145, 5581, 4949, 2140, + 6797, 414, 1120, 5151, 8169, 7479, 5174, 1884, 9527, + 2164, 3682, 8813, 2069, 8178, 7946, 3103, 3936, 5349, + 2278, 6902, 2920, 1610, 3303, 3215, 9428, 1830, 2118, + 5807, 1813, 6966, 2320, 7242, 2061, 1142, 8325, 1748, + 6423, 3895, 9860, 3334, 1571, 7560, 9529, 2255, 5990, + 7873, 1082, 5084, 4882, 8756, 2595, 3740, 8442, 4906, + 4146, 3172, 76, 1682, 3498, 5746, 4339, 3299, 2674, + 4896, 7065, 1329, 4780, 8089, 9997, 1942, 3826, 4131, + 731, 9928, 4711, 108, 4149, 1842, 8173, 1747, 9110, + 443, 1885, 4298, 1140, 4797, 448, 4550, 429, 8474, + 4481, 6412, 6907, 8449, 3203, 1144, 8022, 5563, 2882, + 5617, 6692, 1347, 1909, 5954, 8651, 3933, 1838, 8274, + 2192, 3777, 9060, 791, 4605, 3528, 4962, 9087, 4984, + 2397, 5548, 7709, 3017, 2309, 3655, 6073, 8413, 9630, + 431, 1679, 7855, 1854, 2512, 7892, 7672, 3225, 8437, + 1273, 8525, 6002, 4821, 6282, 236, 2495, 4787, 7877, + 8952, 9372, 4533, 4631, 5234, 8335, 9577, 6036, 98, + 2704, 5610, 6864, 5127, 6489, 9983, 9754, 5784, 4497, + 7982, 8077, 5919, 4969, 6009, 6638, 5854, 2056, 7764, + 3940, 5182, 6794, 6660, 8586, 4187, 8042, 2416, 8812, + 3619, 6316, 9827, 3289, 3690, 1654, 2607, 451, 2745, + 5265, 4289, 6960, 1869, 7932, 5618, 4964, 3986, 215, + 9920, 528, 1074, 6998, 9786, 9451, 4860, 7150, 5317, + 2922, 1331, 9263, 8341, 4258, 5180, 5269, 5128, 4740, + 9937, 9856, 7158, 6339, 836, 9058, 8365, 1840, 9851, + 5665, 7487, 3732, 179, 4366, 2315, 9267, 1373, 2275, + 7848, 5845, 8794, 6211, 6941, 2, 804, 8730, 5269, + 7518, 2707, 7073, 3598, 7548, 8402, 55, 4035, 7643, + 9945, 7893, 5968, 8491, 1584, 8889, 7265, 5390, 8665, + 4508, 3535, 723, 893, 7985, 4490, 7267, 4951, 8120, + 1353, 6368, 4704, 5448, 5441, 8473, 9294, 3369, 4289, + 3173, 9667, 8648, 3368, 3676, 7114, 9123, 9879, 2618, + 2041, 2076, 9915, 2059, 7375, 4830, 5178, 3566, 6066, + 9528, 432, 3275, 9207, 9228, 898, 9740, 704, 7175, + 1810, 4839, 9301, 7082, 7045, 9236, 5459, 6824, 2386, + 9099, 2425, 6745, 8533, 9747, 7091, 7333, 6526, 5056, + 8568, 3601, 9200, 611, 9078, 7305, 7509, 8202, 2800, + 676, 4144, 2431, 6051, 523, 4438, 7508, 8507, 4826, + 9235, 9404, 1554, 6190, 1800, 8838, 8616, 9273, 1919, + 210, 323, 3095, 2911, 1383, 7516, 3916, 6756, 186, + 1597, 4025, 9811, 1346, 7942, 8460, 4239, 3491, 9614, + 1574, 3026, 1308, 2921, 9065, 7897, 1949, 5662, 3969, + 6445, 2108, 3419, 7470, 7688, 9282, 1284, 1190, 7548, + 5183, 1643, 8925, 2434, 1016, 3356, 4259, 736, 9699, + 4586, 9185, 7689, 8664, 6332, 6646, 6512, 3621, 87, + 6668, 2483, 2434, 1838, 4859, 4139, 6036, 8716, 5847, + 9068, 1235, 5372, 1971, 6227, 1556, 4067, 4084, 9394, + 8499, 8204, 8364, 6391, 8186, 3868, 7454, 4389, 3681, + 5817, 3764, 3744, 6962, 5091, 9055, 6773, 4867, 6944, + 2264, 652, 3256, 2751, 8863, 7674, 2641, 9240, 2245, + 9330, 8024, 9843, 4610, 82, 200, 8852, 5519, 2948, + 1600, 4032, 1320, 5925, 2330, 9335, 8707, 6761, 5646, + 813, 1857, 537, 7443, 5765, 3517, 7296, 7246, 6140, + 7043, 8305, 6878, 7142, 1532, 5166, 1412, 718, 9870, + 5087, 452, 3492, 6284, 6960, 7980, 454, 5109, 9800, + 5047, 2964, 1824, 5078, 6124, 9571, 9534, 2567, 8828, + 4193, 1120, 3726, 1508, 8125, 1525, 2609, 983, 9289, + 6898, 5932, 8744, 6717, 5439, 4405, 853, 7405, 3237, + 2345, 6750, 8221, 667, 5951, 632, 9570, 2101, 6091, + 3022, 3318, 4966, 5463, 4525, 9617, 5713, 8112, 7764, + 1714, 3834, 6463, 7656, 4347, 9414, 6948, 9428, 7757, + 709, 2683, 5160, 5955, 5530, 6376, 3284, 4446, 5698, + 2275, 2092, 6675, 5841, 5301, 6848, 9042, 3714, 5714, + 3313, 8575, 5834, 1116, 3984, 2225, 8925, 9909, 7358, + 9350, 6525, 5220, 7818, 1339, 6950, 9289, 9181, 9998, + 7845, 6947, 9177, 8578, 4463, 1874, 70, 2789, 9195, + 4686, 2356, 4115, 1229, 3411, 9168, 3818, 4667, 6830, + 6742, 8796, 4708, 6334, 9212, 955, 1317, 5763, 7646, + 8855, 9864, 5649, 4410, 9725, 7835, 2594, 4904, 9545, + 1567, 3913, 9838, 887, 929, 6237, 8603, 266, 8794, + 4699, 6401, 8344, 7226, 8586, 183, 9072, 8396, 1293, + 7009, 8570, 6640, 2705, 4187, 2146, 1943, 5404, 4534, + 3765, 8271, 1936, 8038, 6160, 6410, 5531, 6130, 449, + 6410, 825, 5772, 7843, 6366, 8489, 3632, 1857, 6681, + 6911, 6206, 9698, 5731, 2691, 7020, 9437, 9086, 7250, + 9133, 8448, 9615, 9805, 7718, 7430, 8451, 617, 7119, + 3608, 2957, 8156, 2628, 381, 2102, 5147, 1812, 7632, + 6080, 3801, 6557, 3664, 7115, 4747, 9946, 933, 9043, + 1209, 8903, 2470, 7079, 2590, 4242, 1267, 2310, 1880, + 1533, 8678, 2879, 5347, 3885, 2968, 7456, 8838, 419, + 3883, 1590, 8133, 338, 7811, 610, 5462, 8096, 5384, + 2656, 3961, 9755, 4315, 8580, 1978, 7459, 2728, 6364, + 3022, 540, 8591, 2436, 6393, 2785, 2076, 2351, 7720, + 3071, 210, 3880, 3780, 5455, 4217, 6253, 1690, 7922, + 8344, 3819, 673, 5739, 9352, 339, 2786, 4845, 1565, + 3191, 9970, 5600, 5489, 5182, 6938, 5244, 6170, 2279, + 5161, 5371, 3300, 45, 6679, 8740, 7633, 4511, 1112, + 1547, 359, 8586, 849, 9883, 8268, 2875, 7310, 8225, + 1860, 3766, 6124, 7431, 5973, 1224, 8189, 1700, 9843, + 2201, 8620, 3055, 1358, 7636, 5811, 5697, 3368, 2604, + 2311, 8744, 5512, 4770, 1238, 7759, 9050, 3348, 7125, + 1114, 7201, 7256, 5261, 6468, 3791, 4716, 1684, 3831, + 2665, 4866, 2193, 1393, 7781, 4603, 2849, 4725, 1778, + 9535, 4976, 3128, 4153, 976, 3584, 9384, 4201, 3066, + 3919, 5286, 1666, 9720, 5671, 8474, 8596, 6477, 9376, + 8643, 5995, 856, 6368, 3119, 9235, 7286, 4524, 7608, + 8305, 1316, 3514, 8323, 3763, 5982, 5171, 9325, 2051, + 3757]), + values=tensor([5.4503e-02, 2.9508e-01, 2.7621e-01, 5.9220e-01, + 6.7599e-01, 6.9875e-01, 7.2193e-02, 4.2622e-01, + 4.4064e-01, 4.9415e-01, 4.8448e-01, 4.6026e-01, + 8.3385e-01, 6.8127e-01, 7.4836e-01, 9.8446e-01, + 4.5546e-01, 1.1369e-01, 7.1924e-01, 1.0264e-01, + 9.0307e-01, 6.5371e-02, 4.6885e-02, 6.4486e-01, + 7.8103e-01, 2.8906e-01, 4.3815e-01, 3.8670e-01, + 4.1476e-01, 1.4529e-01, 4.5266e-02, 8.7285e-01, + 8.3886e-01, 5.0968e-01, 1.9297e-01, 8.7115e-01, + 4.2403e-01, 1.7959e-01, 7.8896e-01, 8.3163e-01, + 2.2324e-01, 2.8136e-01, 1.9157e-02, 7.8786e-01, + 2.7574e-01, 1.6824e-01, 8.9669e-01, 2.4513e-01, + 5.8585e-01, 3.7827e-01, 2.4067e-01, 5.7494e-01, + 7.5649e-01, 5.4031e-01, 1.9831e-01, 2.0146e-01, + 3.5111e-03, 1.4887e-01, 9.3482e-01, 8.9867e-01, + 7.3926e-01, 8.9432e-01, 7.7800e-01, 7.8593e-02, + 9.0410e-01, 1.3148e-01, 2.7506e-02, 2.8367e-01, + 3.7537e-01, 4.9944e-01, 9.2122e-01, 2.8280e-01, + 1.5841e-01, 6.6171e-01, 2.7753e-01, 7.5495e-01, + 7.8495e-01, 4.0801e-01, 5.1023e-02, 6.5582e-01, + 8.4918e-01, 4.8003e-02, 3.5027e-01, 9.2382e-01, + 4.3491e-01, 2.8400e-01, 4.9820e-01, 4.0135e-01, + 9.0951e-01, 8.1196e-01, 1.4390e-01, 3.9182e-01, + 9.1794e-01, 4.1848e-01, 8.6036e-01, 6.1219e-01, + 8.7067e-01, 8.2945e-01, 3.9732e-01, 7.8015e-01, + 8.9715e-02, 8.2485e-02, 3.5463e-01, 9.2038e-01, + 3.3054e-01, 6.8980e-01, 8.3428e-01, 6.7282e-01, + 9.0467e-01, 9.8824e-01, 2.3744e-01, 7.6530e-01, + 5.0061e-01, 1.5883e-01, 5.3783e-01, 7.9345e-01, + 3.7436e-01, 2.0235e-01, 7.7143e-01, 5.2818e-01, + 5.0000e-01, 1.1490e-02, 9.7408e-01, 2.0383e-01, + 6.8759e-02, 4.2208e-01, 7.0805e-01, 3.5602e-01, + 1.4045e-01, 1.8772e-01, 8.2939e-01, 2.6640e-01, + 7.5430e-01, 6.3779e-01, 9.8984e-01, 3.1603e-03, + 6.1629e-01, 4.2752e-01, 4.2494e-01, 3.2776e-01, + 7.0716e-01, 6.0492e-01, 2.4134e-01, 9.9805e-01, + 7.8767e-01, 9.6566e-01, 1.7115e-01, 7.1157e-02, + 4.0609e-01, 9.1913e-01, 4.0558e-01, 7.9724e-01, + 9.6047e-01, 2.4534e-01, 6.7326e-01, 7.3496e-01, + 2.5675e-01, 1.7448e-01, 6.2499e-02, 5.6949e-01, + 7.5770e-01, 3.9893e-01, 5.3252e-01, 4.1528e-01, + 3.5445e-01, 9.4052e-01, 9.9915e-01, 3.2511e-01, + 2.7636e-01, 9.6422e-01, 1.8532e-02, 4.8863e-01, + 1.6115e-01, 4.8734e-01, 4.4591e-01, 1.2811e-01, + 5.5702e-01, 6.3130e-01, 1.8993e-01, 5.4511e-01, + 2.1032e-01, 6.5660e-01, 2.6576e-01, 8.1915e-01, + 5.9613e-01, 3.3776e-01, 6.2370e-01, 8.7516e-01, + 6.3332e-01, 9.1021e-01, 5.9864e-01, 2.2683e-01, + 3.7532e-01, 3.4357e-01, 9.2278e-01, 1.0352e-01, + 7.7095e-01, 3.3999e-01, 3.1460e-01, 9.0103e-01, + 3.5506e-01, 8.8805e-01, 5.7371e-01, 2.3930e-01, + 8.7840e-01, 6.0017e-01, 4.8130e-01, 4.4910e-01, + 5.3282e-01, 8.6422e-02, 2.0105e-01, 4.1225e-01, + 3.6155e-01, 6.2255e-01, 1.8640e-01, 8.2609e-01, + 1.7861e-01, 4.5248e-01, 8.8949e-01, 9.6998e-01, + 2.6238e-01, 6.4922e-01, 6.7744e-01, 8.7497e-01, + 5.2727e-02, 2.5528e-03, 5.5124e-01, 8.9615e-01, + 7.7634e-01, 5.0560e-01, 6.4556e-01, 3.4657e-01, + 3.1546e-01, 2.4480e-02, 6.3994e-01, 1.1798e-01, + 7.4735e-01, 7.7755e-01, 4.2687e-01, 2.0743e-01, + 4.1103e-01, 4.7313e-01, 6.6535e-01, 1.5899e-01, + 5.0196e-01, 1.5159e-01, 1.8033e-01, 4.0226e-01, + 4.0348e-01, 5.4168e-01, 4.0167e-01, 1.6018e-01, + 2.5039e-01, 3.4686e-01, 2.5177e-01, 4.9281e-01, + 5.0762e-01, 8.7205e-01, 5.2584e-02, 7.4358e-01, + 3.1876e-01, 2.8162e-01, 6.8347e-01, 8.9200e-01, + 2.3220e-01, 1.1302e-01, 4.9947e-01, 3.5588e-01, + 6.1641e-01, 2.4231e-01, 6.5140e-02, 4.4885e-02, + 3.2197e-01, 6.1080e-01, 7.8795e-01, 2.6929e-02, + 5.8856e-01, 2.4904e-01, 8.5655e-01, 8.7065e-01, + 2.1587e-01, 7.8035e-01, 9.2913e-01, 2.1233e-01, + 6.0794e-02, 7.7621e-01, 5.9383e-01, 7.7282e-01, + 2.1097e-01, 4.5931e-01, 1.2216e-01, 5.5394e-01, + 8.2536e-01, 9.4006e-01, 6.9903e-01, 2.3400e-01, + 1.6489e-01, 9.9877e-01, 3.9160e-03, 9.6684e-01, + 6.2591e-01, 2.5349e-01, 8.7857e-01, 4.9299e-01, + 2.8608e-01, 9.5114e-01, 1.4473e-01, 6.2640e-01, + 4.5802e-01, 5.0106e-02, 6.0613e-01, 8.2241e-02, + 7.2741e-01, 7.5131e-01, 8.4107e-01, 2.1466e-01, + 8.8673e-01, 8.0627e-01, 7.4367e-01, 8.8514e-01, + 9.2521e-01, 6.5036e-01, 6.8984e-01, 2.2250e-01, + 1.8633e-01, 6.5714e-01, 9.9674e-01, 9.5767e-01, + 9.8742e-01, 1.4499e-01, 6.8279e-01, 8.4327e-01, + 3.1676e-01, 9.4538e-01, 2.0070e-02, 7.2806e-01, + 2.1770e-01, 8.0038e-01, 1.4197e-02, 2.4290e-01, + 5.8603e-01, 8.5921e-01, 3.7803e-02, 5.9046e-01, + 1.3055e-03, 5.8856e-01, 3.0292e-01, 9.3443e-02, + 9.6541e-01, 6.2831e-01, 4.3760e-01, 1.1317e-01, + 2.1064e-01, 5.8110e-01, 7.5660e-01, 9.2549e-01, + 1.3727e-02, 1.6075e-02, 3.0759e-01, 3.7790e-01, + 2.1176e-01, 1.3578e-01, 4.7314e-01, 1.6872e-01, + 2.7121e-01, 4.5927e-01, 2.9240e-01, 7.4103e-01, + 3.2311e-01, 5.0994e-01, 5.8812e-01, 1.0297e-01, + 9.3280e-01, 1.1119e-02, 2.3125e-01, 9.4903e-01, + 9.5077e-01, 6.3578e-01, 3.0323e-01, 9.1593e-01, + 6.8375e-01, 7.8254e-01, 4.1770e-01, 8.4649e-01, + 3.4911e-01, 1.4315e-02, 4.6432e-01, 1.0495e-01, + 8.9960e-01, 5.6481e-01, 9.8227e-01, 9.4531e-04, + 1.6178e-01, 7.1270e-01, 9.0622e-01, 2.5614e-02, + 5.3965e-02, 6.0181e-01, 4.7567e-01, 7.3600e-01, + 1.4436e-01, 9.4179e-01, 8.0074e-01, 9.8982e-01, + 8.1912e-01, 5.9117e-01, 1.4834e-01, 8.6032e-01, + 6.1336e-01, 7.8548e-01, 7.9831e-01, 5.4864e-02, + 5.7972e-01, 8.9770e-01, 9.8146e-02, 1.3538e-01, + 2.5634e-03, 9.7743e-01, 6.5789e-01, 4.9119e-01, + 1.4415e-01, 4.5435e-01, 9.3861e-01, 2.4903e-01, + 9.8609e-01, 1.0273e-01, 6.1532e-02, 8.6587e-01, + 9.9411e-01, 6.9395e-02, 5.1601e-01, 8.8388e-01, + 5.1906e-01, 3.4757e-01, 7.0502e-01, 9.7708e-01, + 6.1776e-01, 7.0773e-01, 5.8744e-02, 7.0159e-01, + 6.4261e-01, 1.6001e-01, 1.0469e-02, 2.6232e-01, + 2.3191e-01, 3.1641e-01, 5.8909e-01, 9.7064e-02, + 8.2228e-01, 2.6238e-01, 6.7340e-01, 6.6894e-01, + 1.2005e-02, 3.5561e-01, 8.9801e-01, 6.5307e-01, + 2.9558e-01, 8.5369e-01, 6.1529e-02, 2.8934e-01, + 9.6447e-01, 5.9228e-01, 5.6285e-02, 4.7556e-01, + 8.7048e-01, 3.6616e-01, 4.2623e-01, 9.2599e-01, + 6.2658e-01, 6.7118e-01, 6.5435e-01, 5.6091e-01, + 5.3675e-02, 3.3015e-01, 4.7517e-01, 9.0024e-01, + 8.8494e-02, 8.2825e-01, 8.8778e-01, 8.7782e-01, + 5.8637e-01, 6.8125e-01, 5.4448e-01, 8.7589e-02, + 2.4761e-01, 7.0949e-01, 6.1296e-01, 5.8345e-01, + 2.3981e-01, 9.4679e-01, 5.9791e-01, 5.4437e-01, + 9.2743e-01, 5.6804e-01, 2.0049e-01, 6.7777e-01, + 9.3903e-01, 3.3628e-01, 7.2323e-01, 1.5931e-01, + 5.0401e-01, 3.4850e-01, 3.1155e-01, 9.9494e-01, + 3.4038e-01, 5.7178e-01, 7.0324e-01, 3.0201e-01, + 1.0997e-01, 8.8095e-01, 3.8991e-01, 2.3953e-01, + 8.6688e-01, 1.8669e-01, 2.9318e-01, 8.1412e-01, + 7.5600e-01, 3.5357e-01, 5.3145e-01, 5.3373e-01, + 8.5296e-01, 7.1799e-01, 4.4404e-01, 1.4435e-01, + 4.3572e-01, 1.5025e-01, 1.9665e-01, 6.8243e-01, + 7.0719e-01, 4.6621e-01, 9.9873e-01, 1.9092e-01, + 3.4845e-01, 3.2613e-01, 1.3621e-01, 3.7010e-03, + 2.5735e-01, 6.3842e-01, 7.9218e-01, 5.3496e-01, + 8.3599e-01, 8.7940e-01, 9.2095e-01, 4.8514e-01, + 4.7299e-02, 5.9454e-01, 8.7756e-01, 8.1858e-01, + 3.0753e-01, 1.0135e-01, 8.9628e-01, 8.7531e-01, + 2.4338e-01, 7.6256e-01, 2.7974e-01, 7.3112e-01, + 4.5748e-01, 9.6845e-01, 4.6279e-01, 2.2181e-01, + 5.2307e-02, 3.8183e-01, 8.9776e-01, 7.9512e-01, + 5.4519e-01, 1.8232e-01, 4.2318e-01, 6.8070e-01, + 6.7107e-01, 3.5950e-01, 3.3105e-01, 1.5345e-01, + 5.0364e-01, 6.3834e-01, 2.5384e-01, 8.9469e-01, + 8.5636e-01, 8.5903e-02, 6.8240e-01, 9.1985e-01, + 1.5333e-01, 6.3524e-01, 3.8410e-01, 9.5777e-01, + 6.3208e-01, 6.9916e-01, 5.0984e-01, 9.4018e-01, + 8.5347e-01, 6.0579e-01, 4.0323e-01, 7.2110e-02, + 3.0391e-01, 8.5004e-01, 4.0011e-01, 2.1766e-01, + 2.4996e-01, 8.3817e-01, 3.3134e-01, 1.6257e-01, + 7.7411e-01, 1.8474e-01, 3.7528e-01, 2.2447e-01, + 3.3097e-01, 7.9759e-01, 6.5413e-01, 4.2646e-01, + 1.9429e-01, 3.9184e-01, 9.8345e-01, 7.0461e-01, + 9.2992e-01, 5.5353e-01, 2.4215e-01, 8.8896e-01, + 1.4076e-01, 5.3922e-01, 5.3680e-01, 3.4086e-01, + 3.6827e-02, 6.8591e-01, 5.7598e-01, 9.2286e-01, + 1.5534e-01, 8.4442e-01, 9.7693e-01, 6.3237e-01, + 2.1297e-01, 7.2103e-01, 6.7295e-01, 7.7213e-01, + 7.7624e-01, 4.4250e-01, 7.6173e-01, 6.3408e-01, + 8.4143e-01, 8.0697e-01, 1.1873e-01, 1.2005e-02, + 3.7617e-01, 2.0706e-01, 4.9533e-01, 9.6023e-01, + 3.1793e-01, 4.0810e-01, 8.9848e-02, 1.4669e-01, + 2.3224e-02, 3.4590e-01, 1.5016e-01, 6.8482e-01, + 5.4447e-02, 5.0062e-01, 2.7224e-02, 4.2357e-01, + 7.2193e-02, 7.5728e-01, 3.9624e-01, 1.7947e-01, + 9.8249e-01, 8.6582e-01, 5.8813e-01, 6.1141e-01, + 2.0695e-01, 2.9035e-03, 8.2408e-01, 8.7966e-01, + 4.3094e-01, 9.2415e-01, 4.9195e-01, 2.6851e-01, + 6.1337e-01, 9.4270e-01, 9.2968e-01, 8.5545e-01, + 3.7090e-01, 7.3166e-01, 4.3200e-01, 9.2318e-04, + 6.8527e-01, 3.8630e-01, 2.9276e-01, 9.4452e-01, + 5.5661e-01, 3.3534e-01, 6.0022e-01, 4.5533e-01, + 3.0979e-01, 4.2662e-01, 8.1662e-02, 9.0957e-01, + 3.3693e-02, 5.6665e-01, 2.1704e-01, 6.4408e-02, + 1.1745e-01, 1.8148e-01, 3.5404e-01, 5.3128e-01, + 5.1695e-01, 6.1276e-01, 9.1890e-01, 3.0434e-02, + 1.9294e-01, 4.4383e-01, 1.9963e-01, 3.9245e-01, + 2.8135e-01, 9.1337e-01, 5.9610e-01, 9.7742e-01, + 3.5662e-01, 1.7009e-01, 5.2935e-01, 9.5236e-01, + 7.5234e-01, 8.9560e-01, 9.9574e-02, 2.1619e-01, + 4.6299e-02, 4.5196e-01, 9.5694e-01, 9.2056e-02, + 5.7164e-01, 9.0188e-03, 5.9236e-01, 2.7750e-02, + 5.3824e-01, 1.4417e-01, 9.9993e-01, 2.4670e-01, + 2.9559e-01, 9.7206e-02, 4.1621e-02, 8.3694e-01, + 8.3051e-01, 5.6169e-01, 8.8527e-01, 5.1914e-01, + 4.0201e-01, 7.8341e-01, 2.9365e-01, 2.9879e-01, + 2.8162e-01, 2.0121e-01, 6.6932e-01, 6.1473e-01, + 4.9632e-03, 3.8050e-01, 3.4448e-01, 5.3838e-02, + 2.7405e-01, 8.6591e-01, 7.3213e-01, 5.1101e-01, + 9.6397e-01, 5.3489e-01, 4.2717e-01, 1.4181e-01, + 4.3734e-01, 3.5588e-01, 6.4507e-01, 7.1977e-01, + 2.5664e-01, 4.3752e-01, 5.2081e-01, 8.3232e-01, + 8.3341e-01, 8.8012e-01, 8.1018e-01, 1.3653e-01, + 6.9665e-01, 2.1682e-01, 7.9394e-01, 1.1105e-01, + 4.9175e-01, 7.1874e-01, 4.2673e-01, 3.5619e-02, + 1.7445e-01, 4.2686e-01, 2.4267e-01, 3.1348e-01, + 2.1916e-01, 8.6054e-01, 5.1870e-02, 8.2679e-01, + 7.7808e-01, 1.0574e-01, 3.0661e-01, 2.6611e-01, + 4.3519e-01, 4.6871e-01, 3.5600e-01, 7.0874e-01, + 8.7795e-02, 4.4678e-01, 7.8626e-01, 1.9312e-01, + 8.1397e-01, 8.8447e-01, 4.1526e-02, 2.2637e-01, + 4.1953e-02, 2.2587e-01, 9.1774e-01, 3.7696e-01, + 1.0103e-01, 7.8365e-01, 7.9119e-01, 3.6537e-02, + 5.2353e-02, 5.9226e-01, 5.7444e-01, 3.5985e-01, + 2.3526e-01, 4.1637e-01, 4.6316e-01, 5.2774e-01, + 2.3136e-01, 7.7246e-02, 8.8505e-01, 8.9057e-01, + 6.2438e-01, 4.2260e-01, 5.4818e-01, 4.1466e-01, + 9.8218e-01, 7.6989e-01, 9.9061e-01, 9.1208e-01, + 1.9061e-01, 3.1493e-01, 5.1772e-01, 5.6560e-01, + 6.4220e-01, 1.7290e-02, 8.9717e-01, 1.8223e-01, + 2.3977e-01, 2.0508e-01, 1.4863e-01, 1.4836e-01, + 6.5533e-01, 2.5762e-01, 6.0459e-01, 9.4558e-01, + 8.1865e-01, 6.7206e-01, 4.9234e-01, 9.2049e-01, + 2.8530e-01, 1.8503e-01, 5.5805e-01, 4.8402e-01, + 5.6399e-02, 1.9669e-01, 1.0907e-01, 8.8177e-01, + 6.3074e-01, 5.6872e-02, 7.5582e-02, 1.6196e-01, + 7.5413e-01, 9.1751e-01, 8.2396e-01, 4.8314e-02, + 4.0233e-01, 4.7439e-01, 8.6903e-01, 6.4683e-01, + 4.0413e-01, 9.6644e-01, 4.3207e-01, 2.1787e-01, + 3.8462e-01, 5.8970e-02, 9.5603e-01, 2.9289e-01, + 1.1430e-02, 5.0879e-01, 8.8051e-01, 7.5483e-01, + 1.0664e-01, 6.4358e-01, 7.3353e-02, 5.7649e-01, + 9.0772e-01, 1.2860e-01, 3.5405e-01, 5.8783e-01, + 5.1844e-02, 6.6979e-01, 3.9982e-01, 5.7713e-01, + 9.5423e-01, 4.5051e-01, 2.8531e-01, 1.4108e-02, + 8.0540e-01, 4.4277e-01, 8.9220e-01, 2.6249e-01, + 2.4123e-01, 5.4797e-01, 6.6492e-01, 2.0508e-01, + 6.7675e-01, 4.1081e-01, 5.6068e-02, 2.2294e-01, + 4.1645e-01, 9.4258e-01, 4.8056e-01, 1.9642e-01, + 8.7808e-01, 2.3744e-01, 8.4309e-01, 5.2265e-01, + 7.4547e-01, 8.2721e-01, 9.3542e-01, 3.4692e-01, + 2.9115e-02, 7.3087e-02, 3.0307e-01, 9.5155e-01, + 2.1303e-01, 7.1446e-01, 9.1782e-01, 8.7408e-01, + 5.8906e-01, 8.3877e-01, 2.7997e-01, 4.6594e-01, + 4.2045e-01, 5.5887e-01, 8.2414e-01, 6.5204e-01, + 1.5283e-01, 4.0072e-02, 4.2285e-02, 9.5155e-01, + 7.6513e-02, 6.7783e-01, 2.9920e-01, 9.2939e-01, + 1.2315e-01, 2.4026e-01, 1.7920e-01, 6.1226e-02, + 6.4655e-01, 9.9832e-01, 4.3253e-01, 7.2332e-01, + 7.3881e-01, 7.6725e-01, 1.0282e-01, 6.2526e-02, + 6.6880e-02, 7.1237e-01, 1.8045e-01, 2.8368e-01, + 8.1627e-01, 3.2290e-01, 7.1207e-01, 9.3336e-01, + 5.8264e-01, 8.6629e-01, 1.0427e-01, 4.1347e-01, + 1.2616e-01, 9.6273e-01, 2.5433e-01, 7.5316e-01, + 9.7344e-01, 2.8688e-01, 7.9705e-01, 3.3331e-01, + 9.6254e-01, 9.1487e-01, 2.8480e-01, 3.2055e-01, + 3.9523e-01, 3.6145e-01, 4.5015e-01, 9.4686e-01, + 3.2711e-02, 8.9001e-01, 1.9633e-02, 3.7308e-01, + 5.9301e-01, 8.8253e-01, 8.2784e-01, 4.4139e-01, + 6.6233e-01, 5.8030e-01, 7.4490e-01, 9.5820e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.1332, 0.3872, 0.9921, ..., 0.6563, 0.0596, 0.5136]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.549095392227173 seconds + +[45.65, 38.89, 39.88, 38.76, 38.37, 38.3, 38.7, 38.8, 39.08, 38.56] +[65.27] +13.208173513412476 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 361507, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.549095392227173, 'TIME_S_1KI': 0.029180888315377497, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 862.0974852204322, 'W': 65.27} +[45.65, 38.89, 39.88, 38.76, 38.37, 38.3, 38.7, 38.8, 39.08, 38.56, 39.02, 38.54, 38.45, 38.34, 38.8, 39.14, 38.83, 39.15, 38.35, 39.72] +701.855 +35.09275 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 361507, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.549095392227173, 'TIME_S_1KI': 0.029180888315377497, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 862.0974852204322, 'W': 65.27, 'J_1KI': 2.3847324815852313, 'W_1KI': 0.18054975422329303, 'W_D': 30.177249999999994, 'J_D': 398.58635415762654, 'W_D_1KI': 0.08347625357185336, 'J_D_1KI': 0.0002309118594435332} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_500000_1e-05.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_500000_1e-05.json index f3fd74b..98e0f55 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_500000_1e-05.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_500000_1e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 1366, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.481968879699707, "TIME_S_1KI": 7.673476485870942, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 980.9324438238144, "W": 72.78, "J_1KI": 718.1057421843444, "W_1KI": 53.2796486090776, "W_D": 37.9805, "J_D": 511.9030596681833, "W_D_1KI": 27.804172767203514, "J_D_1KI": 20.35444565681077} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 1357, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.362020254135132, "TIME_S_1KI": 7.635976605847555, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 997.3382936573029, "W": 74.92, "J_1KI": 734.9582119803264, "W_1KI": 55.21002210759028, "W_D": 39.366, "J_D": 524.0419016032218, "W_D_1KI": 29.00957995578482, "J_D_1KI": 21.377730254815635} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_500000_1e-05.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_500000_1e-05.output index 170f45c..f772ea4 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_500000_1e-05.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_500000_1e-05.output @@ -1,15 +1,15 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '500000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 7.686005115509033} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 7.737008094787598} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 7, ..., 2499986, - 2499989, 2500000]), - col_indices=tensor([176994, 249617, 373837, ..., 283997, 343168, - 447931]), - values=tensor([0.4576, 0.5348, 0.2572, ..., 0.1314, 0.2229, 0.5974]), +tensor(crow_indices=tensor([ 0, 6, 9, ..., 2499995, + 2499998, 2500000]), + col_indices=tensor([ 13538, 14404, 124427, ..., 299545, 64656, + 263709]), + values=tensor([0.6726, 0.7704, 0.5503, ..., 0.8434, 0.2560, 0.2989]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.1382, 0.9782, 0.8741, ..., 0.2337, 0.6569, 0.8329]) +tensor([0.7902, 0.8995, 0.9133, ..., 0.8775, 0.6765, 0.9460]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -17,20 +17,20 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 7.686005115509033 seconds +Time: 7.737008094787598 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1366', '-ss', '500000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.481968879699707} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1357', '-ss', '500000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.362020254135132} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 9, ..., 2499988, +tensor(crow_indices=tensor([ 0, 7, 12, ..., 2499987, 2499995, 2500000]), - col_indices=tensor([ 13301, 29016, 299078, ..., 480591, 481476, - 496604]), - values=tensor([0.4578, 0.5414, 0.1917, ..., 0.8449, 0.5002, 0.9459]), + col_indices=tensor([ 74385, 156503, 312661, ..., 102229, 341067, + 464580]), + values=tensor([0.2383, 0.0369, 0.7603, ..., 0.0658, 0.9688, 0.3918]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.0252, 0.3938, 0.2908, ..., 0.4459, 0.5549, 0.8752]) +tensor([0.4224, 0.2766, 0.2547, ..., 0.2726, 0.8333, 0.3690]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -38,17 +38,17 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 10.481968879699707 seconds +Time: 10.362020254135132 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 9, ..., 2499988, +tensor(crow_indices=tensor([ 0, 7, 12, ..., 2499987, 2499995, 2500000]), - col_indices=tensor([ 13301, 29016, 299078, ..., 480591, 481476, - 496604]), - values=tensor([0.4578, 0.5414, 0.1917, ..., 0.8449, 0.5002, 0.9459]), + col_indices=tensor([ 74385, 156503, 312661, ..., 102229, 341067, + 464580]), + values=tensor([0.2383, 0.0369, 0.7603, ..., 0.0658, 0.9688, 0.3918]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.0252, 0.3938, 0.2908, ..., 0.4459, 0.5549, 0.8752]) +tensor([0.4224, 0.2766, 0.2547, ..., 0.2726, 0.8333, 0.3690]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -56,13 +56,13 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 10.481968879699707 seconds +Time: 10.362020254135132 seconds -[39.21, 38.47, 38.56, 38.59, 38.63, 38.48, 38.95, 38.74, 38.52, 38.83] -[72.78] -13.478049516677856 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1366, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.481968879699707, 'TIME_S_1KI': 7.673476485870942, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 980.9324438238144, 'W': 72.78} -[39.21, 38.47, 38.56, 38.59, 38.63, 38.48, 38.95, 38.74, 38.52, 38.83, 39.07, 38.47, 38.63, 38.57, 38.58, 38.77, 38.83, 38.82, 38.46, 38.73] -695.99 -34.7995 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1366, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.481968879699707, 'TIME_S_1KI': 7.673476485870942, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 980.9324438238144, 'W': 72.78, 'J_1KI': 718.1057421843444, 'W_1KI': 53.2796486090776, 'W_D': 37.9805, 'J_D': 511.9030596681833, 'W_D_1KI': 27.804172767203514, 'J_D_1KI': 20.35444565681077} +[40.3, 38.79, 39.14, 38.79, 39.0, 38.98, 38.53, 44.02, 38.67, 38.42] +[74.92] +13.31204342842102 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1357, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.362020254135132, 'TIME_S_1KI': 7.635976605847555, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 997.3382936573029, 'W': 74.92} +[40.3, 38.79, 39.14, 38.79, 39.0, 38.98, 38.53, 44.02, 38.67, 38.42, 39.44, 38.36, 38.82, 38.43, 45.76, 38.31, 39.93, 38.5, 38.79, 38.36] +711.08 +35.554 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1357, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.362020254135132, 'TIME_S_1KI': 7.635976605847555, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 997.3382936573029, 'W': 74.92, 'J_1KI': 734.9582119803264, 'W_1KI': 55.21002210759028, 'W_D': 39.366, 'J_D': 524.0419016032218, 'W_D_1KI': 29.00957995578482, 'J_D_1KI': 21.377730254815635} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.0001.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.0001.json index 3949dab..0569add 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.0001.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.0001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 15344, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.396661281585693, "TIME_S_1KI": 0.6775717727832178, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 846.4673494148254, "W": 65.08, "J_1KI": 55.166015994188314, "W_1KI": 4.241397288842545, "W_D": 30.18325, "J_D": 392.5804490507841, "W_D_1KI": 1.9671044056308655, "J_D_1KI": 0.1282002349863703} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 15401, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.70950984954834, "TIME_S_1KI": 0.6953775631159236, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 846.8573481559754, "W": 65.04, "J_1KI": 54.987166298031, "W_1KI": 4.223102395948315, "W_D": 30.268000000000008, "J_D": 394.10636860370647, "W_D_1KI": 1.9653269268229343, "J_D_1KI": 0.12761034522582523} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.0001.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.0001.output index 4ae3041..5835407 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.0001.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.0001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '50000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.6842620372772217} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.6817739009857178} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 10, ..., 249991, 249995, +tensor(crow_indices=tensor([ 0, 3, 11, ..., 249990, 249996, 250000]), - col_indices=tensor([ 5258, 47122, 48422, ..., 30033, 41208, 46342]), - values=tensor([0.6499, 0.7211, 0.6182, ..., 0.7244, 0.8782, 0.8107]), + col_indices=tensor([22352, 25754, 44016, ..., 24187, 38739, 43878]), + values=tensor([0.9987, 0.7536, 0.3762, ..., 0.2868, 0.8081, 0.6848]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.4190, 0.1278, 0.1748, ..., 0.3464, 0.8679, 0.1666]) +tensor([0.2548, 0.4461, 0.9076, ..., 0.8528, 0.8836, 0.6180]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 0.6842620372772217 seconds +Time: 0.6817739009857178 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '15344', '-ss', '50000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.396661281585693} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '15401', '-ss', '50000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.70950984954834} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 8, 9, ..., 249992, 249997, +tensor(crow_indices=tensor([ 0, 5, 9, ..., 249990, 249994, 250000]), - col_indices=tensor([10534, 13796, 13942, ..., 20381, 35132, 47921]), - values=tensor([0.7820, 0.3755, 0.2967, ..., 0.2418, 0.5762, 0.2824]), + col_indices=tensor([21278, 27457, 27912, ..., 25636, 33177, 40764]), + values=tensor([0.5508, 0.6259, 0.1639, ..., 0.1456, 0.5920, 0.1745]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.5105, 0.5604, 0.4598, ..., 0.4891, 0.0194, 0.7500]) +tensor([0.3112, 0.1298, 0.2276, ..., 0.1739, 0.6060, 0.6815]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,16 +36,16 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 10.396661281585693 seconds +Time: 10.70950984954834 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 8, 9, ..., 249992, 249997, +tensor(crow_indices=tensor([ 0, 5, 9, ..., 249990, 249994, 250000]), - col_indices=tensor([10534, 13796, 13942, ..., 20381, 35132, 47921]), - values=tensor([0.7820, 0.3755, 0.2967, ..., 0.2418, 0.5762, 0.2824]), + col_indices=tensor([21278, 27457, 27912, ..., 25636, 33177, 40764]), + values=tensor([0.5508, 0.6259, 0.1639, ..., 0.1456, 0.5920, 0.1745]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.5105, 0.5604, 0.4598, ..., 0.4891, 0.0194, 0.7500]) +tensor([0.3112, 0.1298, 0.2276, ..., 0.1739, 0.6060, 0.6815]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -53,13 +53,13 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 10.396661281585693 seconds +Time: 10.70950984954834 seconds -[39.48, 38.54, 38.4, 39.05, 39.65, 38.93, 38.49, 38.76, 38.48, 39.28] -[65.08] -13.006566524505615 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 15344, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.396661281585693, 'TIME_S_1KI': 0.6775717727832178, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 846.4673494148254, 'W': 65.08} -[39.48, 38.54, 38.4, 39.05, 39.65, 38.93, 38.49, 38.76, 38.48, 39.28, 39.19, 38.51, 38.7, 38.61, 39.0, 38.52, 39.05, 38.49, 38.55, 38.46] -697.935 -34.89675 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 15344, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.396661281585693, 'TIME_S_1KI': 0.6775717727832178, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 846.4673494148254, 'W': 65.08, 'J_1KI': 55.166015994188314, 'W_1KI': 4.241397288842545, 'W_D': 30.18325, 'J_D': 392.5804490507841, 'W_D_1KI': 1.9671044056308655, 'J_D_1KI': 0.1282002349863703} +[39.37, 38.33, 39.38, 39.01, 38.5, 38.38, 38.36, 38.51, 38.52, 38.33] +[65.04] +13.020561933517456 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 15401, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.70950984954834, 'TIME_S_1KI': 0.6953775631159236, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 846.8573481559754, 'W': 65.04} +[39.37, 38.33, 39.38, 39.01, 38.5, 38.38, 38.36, 38.51, 38.52, 38.33, 40.16, 38.83, 38.77, 38.25, 38.69, 38.32, 38.3, 38.47, 38.28, 39.22] +695.4399999999999 +34.772 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 15401, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.70950984954834, 'TIME_S_1KI': 0.6953775631159236, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 846.8573481559754, 'W': 65.04, 'J_1KI': 54.987166298031, 'W_1KI': 4.223102395948315, 'W_D': 30.268000000000008, 'J_D': 394.10636860370647, 'W_D_1KI': 1.9653269268229343, 'J_D_1KI': 0.12761034522582523} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.001.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.001.json index bde3932..4fe2ca5 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.001.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 3489, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.437294721603394, "TIME_S_1KI": 2.991486019376152, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 933.3928768968582, "W": 69.22, "J_1KI": 267.52447030577764, "W_1KI": 19.83949555746632, "W_D": 34.06175, "J_D": 459.30359469288595, "W_D_1KI": 9.762611063341934, "J_D_1KI": 2.798111511419299} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 3498, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.43606948852539, "TIME_S_1KI": 2.983438961842593, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 938.4889250850676, "W": 69.66, "J_1KI": 268.29300316897303, "W_1KI": 19.914236706689536, "W_D": 34.37075, "J_D": 463.0572526825667, "W_D_1KI": 9.82582904516867, "J_D_1KI": 2.8089848613975614} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.001.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.001.output index 4299e13..08291b1 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.001.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '50000', '-sd', '0.001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 3.0088562965393066} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 3.0015740394592285} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 34, 93, ..., 2499916, - 2499957, 2500000]), - col_indices=tensor([ 603, 3952, 4942, ..., 45684, 45744, 47378]), - values=tensor([0.2755, 0.3359, 0.2897, ..., 0.6537, 0.9903, 0.6398]), +tensor(crow_indices=tensor([ 0, 44, 103, ..., 2499905, + 2499956, 2500000]), + col_indices=tensor([ 226, 2395, 3856, ..., 46208, 48736, 49649]), + values=tensor([0.2794, 0.3289, 0.9047, ..., 0.2004, 0.4257, 0.7682]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.0226, 0.9900, 0.4586, ..., 0.9619, 0.5778, 0.7456]) +tensor([0.4960, 0.6719, 0.9417, ..., 0.9330, 0.7654, 0.9120]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 3.0088562965393066 seconds +Time: 3.0015740394592285 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '3489', '-ss', '50000', '-sd', '0.001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.437294721603394} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '3498', '-ss', '50000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.43606948852539} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 60, 107, ..., 2499916, - 2499955, 2500000]), - col_indices=tensor([ 84, 88, 1962, ..., 43229, 45310, 46070]), - values=tensor([0.8625, 0.0720, 0.1202, ..., 0.4148, 0.7410, 0.3059]), +tensor(crow_indices=tensor([ 0, 53, 101, ..., 2499890, + 2499947, 2500000]), + col_indices=tensor([ 1, 302, 356, ..., 47860, 48391, 48616]), + values=tensor([0.1949, 0.9610, 0.6433, ..., 0.8236, 0.0074, 0.9971]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.1590, 0.6012, 0.6850, ..., 0.6120, 0.4384, 0.7195]) +tensor([0.6285, 0.6234, 0.6444, ..., 0.5791, 0.7727, 0.1804]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,16 +36,16 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 10.437294721603394 seconds +Time: 10.43606948852539 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 60, 107, ..., 2499916, - 2499955, 2500000]), - col_indices=tensor([ 84, 88, 1962, ..., 43229, 45310, 46070]), - values=tensor([0.8625, 0.0720, 0.1202, ..., 0.4148, 0.7410, 0.3059]), +tensor(crow_indices=tensor([ 0, 53, 101, ..., 2499890, + 2499947, 2500000]), + col_indices=tensor([ 1, 302, 356, ..., 47860, 48391, 48616]), + values=tensor([0.1949, 0.9610, 0.6433, ..., 0.8236, 0.0074, 0.9971]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.1590, 0.6012, 0.6850, ..., 0.6120, 0.4384, 0.7195]) +tensor([0.6285, 0.6234, 0.6444, ..., 0.5791, 0.7727, 0.1804]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -53,13 +53,13 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 10.437294721603394 seconds +Time: 10.43606948852539 seconds -[39.1, 38.45, 44.52, 38.52, 39.08, 38.38, 40.78, 38.54, 38.4, 38.35] -[69.22] -13.484439134597778 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 3489, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.437294721603394, 'TIME_S_1KI': 2.991486019376152, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 933.3928768968582, 'W': 69.22} -[39.1, 38.45, 44.52, 38.52, 39.08, 38.38, 40.78, 38.54, 38.4, 38.35, 39.37, 38.65, 38.68, 38.35, 38.4, 38.76, 38.81, 38.76, 38.48, 38.39] -703.165 -35.158249999999995 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 3489, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.437294721603394, 'TIME_S_1KI': 2.991486019376152, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 933.3928768968582, 'W': 69.22, 'J_1KI': 267.52447030577764, 'W_1KI': 19.83949555746632, 'W_D': 34.06175, 'J_D': 459.30359469288595, 'W_D_1KI': 9.762611063341934, 'J_D_1KI': 2.798111511419299} +[38.98, 38.56, 38.59, 38.25, 38.75, 38.31, 38.43, 38.27, 38.33, 43.32] +[69.66] +13.472422122955322 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 3498, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.43606948852539, 'TIME_S_1KI': 2.983438961842593, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 938.4889250850676, 'W': 69.66} +[38.98, 38.56, 38.59, 38.25, 38.75, 38.31, 38.43, 38.27, 38.33, 43.32, 39.04, 38.73, 38.9, 38.87, 38.44, 45.59, 38.97, 38.44, 40.32, 38.73] +705.785 +35.289249999999996 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 3498, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.43606948852539, 'TIME_S_1KI': 2.983438961842593, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 938.4889250850676, 'W': 69.66, 'J_1KI': 268.29300316897303, 'W_1KI': 19.914236706689536, 'W_D': 34.37075, 'J_D': 463.0572526825667, 'W_D_1KI': 9.82582904516867, 'J_D_1KI': 2.8089848613975614} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_1e-05.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_1e-05.json index eb380f5..5c43412 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_1e-05.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_1e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 35734, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.471917629241943, "TIME_S_1KI": 0.2930519289539918, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 841.987050704956, "W": 64.44, "J_1KI": 23.562630847510942, "W_1KI": 1.803324564840208, "W_D": 29.634499999999996, "J_D": 387.2108202066421, "W_D_1KI": 0.8293082218615323, "J_D_1KI": 0.023207819495761246} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 35695, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.507647275924683, "TIME_S_1KI": 0.2943730851918947, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 842.3661928725243, "W": 64.41, "J_1KI": 23.598996858734395, "W_1KI": 1.8044544053789044, "W_D": 29.134750000000004, "J_D": 381.0297847817541, "W_D_1KI": 0.8162137554279313, "J_D_1KI": 0.02286633297178684} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_1e-05.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_1e-05.output index 96eed5a..c6e8d71 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_1e-05.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_1e-05.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '50000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.3157460689544678} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.3123207092285156} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 25000, 25000, 25000]), - col_indices=tensor([ 4062, 9525, 48228, ..., 39550, 26780, 46383]), - values=tensor([0.9682, 0.2653, 0.7546, ..., 0.8059, 0.5876, 0.9597]), +tensor(crow_indices=tensor([ 0, 3, 3, ..., 25000, 25000, 25000]), + col_indices=tensor([ 1731, 4163, 39043, ..., 48142, 1105, 32715]), + values=tensor([0.9730, 0.5233, 0.5883, ..., 0.0098, 0.9466, 0.3610]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.6423, 0.4854, 0.6493, ..., 0.6821, 0.6803, 0.2283]) +tensor([0.3233, 0.5001, 0.4757, ..., 0.9452, 0.0190, 0.8013]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -15,18 +15,18 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 0.3157460689544678 seconds +Time: 0.3123207092285156 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '33254', '-ss', '50000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.771223545074463} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '33619', '-ss', '50000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.88913083076477} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 25000, 25000, 25000]), - col_indices=tensor([27980, 12083, 1659, ..., 17852, 35908, 47898]), - values=tensor([0.9789, 0.4410, 0.2389, ..., 0.6711, 0.3630, 0.6906]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 24999, 25000, 25000]), + col_indices=tensor([10235, 29693, 19116, ..., 40289, 44691, 23523]), + values=tensor([0.1639, 0.2137, 0.2836, ..., 0.1546, 0.8297, 0.2686]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.9986, 0.5860, 0.4640, ..., 0.2646, 0.6800, 0.7666]) +tensor([0.0511, 0.8204, 0.3831, ..., 0.1304, 0.0964, 0.0598]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -34,18 +34,18 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 9.771223545074463 seconds +Time: 9.88913083076477 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '35734', '-ss', '50000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.471917629241943} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '35695', '-ss', '50000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.507647275924683} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 24999, 25000, 25000]), - col_indices=tensor([14210, 9782, 13262, ..., 32699, 48019, 38373]), - values=tensor([0.8162, 0.2704, 0.1597, ..., 0.7469, 0.6704, 0.2691]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 24999, 24999, 25000]), + col_indices=tensor([19065, 20351, 39842, ..., 40423, 9509, 47347]), + values=tensor([0.9158, 0.3839, 0.2352, ..., 0.6644, 0.6974, 0.4594]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.5208, 0.6581, 0.5659, ..., 0.1337, 0.4152, 0.4244]) +tensor([0.0381, 0.0022, 0.0479, ..., 0.9299, 0.2975, 0.9449]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -53,15 +53,15 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 10.471917629241943 seconds +Time: 10.507647275924683 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 24999, 25000, 25000]), - col_indices=tensor([14210, 9782, 13262, ..., 32699, 48019, 38373]), - values=tensor([0.8162, 0.2704, 0.1597, ..., 0.7469, 0.6704, 0.2691]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 24999, 24999, 25000]), + col_indices=tensor([19065, 20351, 39842, ..., 40423, 9509, 47347]), + values=tensor([0.9158, 0.3839, 0.2352, ..., 0.6644, 0.6974, 0.4594]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.5208, 0.6581, 0.5659, ..., 0.1337, 0.4152, 0.4244]) +tensor([0.0381, 0.0022, 0.0479, ..., 0.9299, 0.2975, 0.9449]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -69,13 +69,13 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 10.471917629241943 seconds +Time: 10.507647275924683 seconds -[39.24, 38.48, 38.44, 38.43, 38.45, 38.48, 38.5, 39.07, 38.96, 38.4] -[64.44] -13.066217422485352 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 35734, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.471917629241943, 'TIME_S_1KI': 0.2930519289539918, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 841.987050704956, 'W': 64.44} -[39.24, 38.48, 38.44, 38.43, 38.45, 38.48, 38.5, 39.07, 38.96, 38.4, 39.76, 38.41, 38.52, 38.56, 38.6, 38.96, 38.81, 38.77, 38.7, 38.54] -696.11 -34.8055 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 35734, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.471917629241943, 'TIME_S_1KI': 0.2930519289539918, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 841.987050704956, 'W': 64.44, 'J_1KI': 23.562630847510942, 'W_1KI': 1.803324564840208, 'W_D': 29.634499999999996, 'J_D': 387.2108202066421, 'W_D_1KI': 0.8293082218615323, 'J_D_1KI': 0.023207819495761246} +[39.22, 44.39, 40.14, 38.47, 39.94, 38.41, 38.49, 38.91, 39.41, 38.82] +[64.41] +13.078189611434937 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 35695, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.507647275924683, 'TIME_S_1KI': 0.2943730851918947, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 842.3661928725243, 'W': 64.41} +[39.22, 44.39, 40.14, 38.47, 39.94, 38.41, 38.49, 38.91, 39.41, 38.82, 39.08, 38.6, 38.48, 38.48, 38.48, 38.38, 38.83, 39.42, 38.84, 38.55] +705.5049999999999 +35.27524999999999 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 35695, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.507647275924683, 'TIME_S_1KI': 0.2943730851918947, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 842.3661928725243, 'W': 64.41, 'J_1KI': 23.598996858734395, 'W_1KI': 1.8044544053789044, 'W_D': 29.134750000000004, 'J_D': 381.0297847817541, 'W_D_1KI': 0.8162137554279313, 'J_D_1KI': 0.02286633297178684} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.0001.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.0001.json new file mode 100644 index 0000000..cbaf9ec --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 478217, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.855157613754272, "TIME_S_1KI": 0.022699229876299402, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 856.2491577148438, "W": 64.87, "J_1KI": 1.7905033859416202, "W_1KI": 0.1356497155057223, "W_D": 29.804500000000004, "J_D": 393.40339172363286, "W_D_1KI": 0.06232421683043473, "J_D_1KI": 0.00013032622602382335} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.0001.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.0001.output new file mode 100644 index 0000000..dc99150 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.0001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '5000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.03418374061584473} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 2500, 2500, 2500]), + col_indices=tensor([3258, 3666, 785, ..., 592, 2528, 4295]), + values=tensor([0.0745, 0.3346, 0.7433, ..., 0.4561, 0.1450, 0.7729]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.6815, 0.4251, 0.0154, ..., 0.8636, 0.4620, 0.2584]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 0.03418374061584473 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '307163', '-ss', '5000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 6.744239568710327} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 2500, 2500, 2500]), + col_indices=tensor([1557, 2371, 1241, ..., 4745, 784, 3444]), + values=tensor([0.6224, 0.1480, 0.3479, ..., 0.3226, 0.4259, 0.8584]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.6292, 0.0071, 0.7726, ..., 0.8443, 0.3847, 0.4326]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 6.744239568710327 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '478217', '-ss', '5000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.855157613754272} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 2499, 2499, 2500]), + col_indices=tensor([ 537, 942, 2250, ..., 4421, 3640, 3689]), + values=tensor([0.2431, 0.3591, 0.7204, ..., 0.2868, 0.0163, 0.2334]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.9258, 0.9006, 0.7252, ..., 0.7255, 0.3779, 0.2202]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.855157613754272 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 2499, 2499, 2500]), + col_indices=tensor([ 537, 942, 2250, ..., 4421, 3640, 3689]), + values=tensor([0.2431, 0.3591, 0.7204, ..., 0.2868, 0.0163, 0.2334]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.9258, 0.9006, 0.7252, ..., 0.7255, 0.3779, 0.2202]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.855157613754272 seconds + +[39.25, 38.14, 38.36, 38.18, 38.58, 38.56, 38.4, 38.47, 38.48, 38.68] +[64.87] +13.199462890625 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 478217, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.855157613754272, 'TIME_S_1KI': 0.022699229876299402, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 856.2491577148438, 'W': 64.87} +[39.25, 38.14, 38.36, 38.18, 38.58, 38.56, 38.4, 38.47, 38.48, 38.68, 39.07, 38.8, 38.23, 39.53, 38.54, 38.21, 38.59, 38.49, 41.46, 47.58] +701.31 +35.0655 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 478217, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.855157613754272, 'TIME_S_1KI': 0.022699229876299402, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 856.2491577148438, 'W': 64.87, 'J_1KI': 1.7905033859416202, 'W_1KI': 0.1356497155057223, 'W_D': 29.804500000000004, 'J_D': 393.40339172363286, 'W_D_1KI': 0.06232421683043473, 'J_D_1KI': 0.00013032622602382335} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.001.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.001.json new file mode 100644 index 0000000..34a1b0c --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 248678, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.619725465774536, "TIME_S_1KI": 0.04270472444596843, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 878.5904247951509, "W": 65.93, "J_1KI": 3.5330444381696444, "W_1KI": 0.2651219649506591, "W_D": 31.137750000000004, "J_D": 414.9450781080723, "W_D_1KI": 0.12521312701565881, "J_D_1KI": 0.0005035150958897} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.001.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.001.output new file mode 100644 index 0000000..c454cf5 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '5000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.05426168441772461} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 9, ..., 24991, 24997, 25000]), + col_indices=tensor([1287, 1316, 2359, ..., 1751, 2298, 3529]), + values=tensor([0.1773, 0.9664, 0.4947, ..., 0.2806, 0.9364, 0.2474]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.5449, 0.4697, 0.1251, ..., 0.6031, 0.3711, 0.9109]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 0.05426168441772461 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '193506', '-ss', '5000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 8.17044973373413} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 10, ..., 24989, 24995, 25000]), + col_indices=tensor([ 563, 1432, 1628, ..., 3910, 4925, 4964]), + values=tensor([0.0779, 0.2473, 0.4860, ..., 0.8752, 0.7145, 0.0936]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.5862, 0.8689, 0.7521, ..., 0.3378, 0.8388, 0.0430]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 8.17044973373413 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '248678', '-ss', '5000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.619725465774536} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 5, ..., 24990, 24993, 25000]), + col_indices=tensor([ 49, 355, 745, ..., 2877, 3597, 4425]), + values=tensor([0.2389, 0.4883, 0.4431, ..., 0.9568, 0.0569, 0.8170]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.9647, 0.9839, 0.1030, ..., 0.7979, 0.9168, 0.5702]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.619725465774536 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 5, ..., 24990, 24993, 25000]), + col_indices=tensor([ 49, 355, 745, ..., 2877, 3597, 4425]), + values=tensor([0.2389, 0.4883, 0.4431, ..., 0.9568, 0.0569, 0.8170]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.9647, 0.9839, 0.1030, ..., 0.7979, 0.9168, 0.5702]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.619725465774536 seconds + +[39.55, 38.57, 38.5, 38.49, 39.15, 38.5, 38.44, 38.52, 38.74, 38.66] +[65.93] +13.326109886169434 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 248678, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.619725465774536, 'TIME_S_1KI': 0.04270472444596843, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 878.5904247951509, 'W': 65.93} +[39.55, 38.57, 38.5, 38.49, 39.15, 38.5, 38.44, 38.52, 38.74, 38.66, 39.58, 38.56, 38.38, 38.87, 38.31, 38.44, 38.39, 38.74, 38.99, 38.72] +695.845 +34.79225 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 248678, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.619725465774536, 'TIME_S_1KI': 0.04270472444596843, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 878.5904247951509, 'W': 65.93, 'J_1KI': 3.5330444381696444, 'W_1KI': 0.2651219649506591, 'W_D': 31.137750000000004, 'J_D': 414.9450781080723, 'W_D_1KI': 0.12521312701565881, 'J_D_1KI': 0.0005035150958897} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.01.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.01.json new file mode 100644 index 0000000..163b6dc --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 39651, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.074631690979004, "TIME_S_1KI": 0.25408266351363157, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 909.2396620059013, "W": 71.91, "J_1KI": 22.931065093084698, "W_1KI": 1.8135734281607019, "W_D": 21.342999999999996, "J_D": 269.8637478263378, "W_D_1KI": 0.5382714181231241, "J_D_1KI": 0.013575229328973395} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.01.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.01.output new file mode 100644 index 0000000..5c37580 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.01.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '5000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.26480770111083984} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 43, 97, ..., 249898, 249949, + 250000]), + col_indices=tensor([ 46, 106, 224, ..., 4804, 4890, 4986]), + values=tensor([0.9512, 0.1564, 0.8337, ..., 0.0764, 0.6147, 0.8806]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.5067, 0.1013, 0.0742, ..., 0.2212, 0.5429, 0.9437]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 0.26480770111083984 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '39651', '-ss', '5000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.074631690979004} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 49, 105, ..., 249900, 249947, + 250000]), + col_indices=tensor([ 129, 155, 285, ..., 4713, 4736, 4825]), + values=tensor([0.9050, 0.4779, 0.3101, ..., 0.9077, 0.5485, 0.2382]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.7723, 0.0685, 0.7362, ..., 0.7986, 0.1054, 0.6909]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.074631690979004 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 49, 105, ..., 249900, 249947, + 250000]), + col_indices=tensor([ 129, 155, 285, ..., 4713, 4736, 4825]), + values=tensor([0.9050, 0.4779, 0.3101, ..., 0.9077, 0.5485, 0.2382]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.7723, 0.0685, 0.7362, ..., 0.7986, 0.1054, 0.6909]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.074631690979004 seconds + +[39.13, 38.4, 39.4, 38.95, 39.01, 38.64, 38.39, 62.19, 64.39, 63.32] +[71.91] +12.644133806228638 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 39651, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.074631690979004, 'TIME_S_1KI': 0.25408266351363157, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 909.2396620059013, 'W': 71.91} +[39.13, 38.4, 39.4, 38.95, 39.01, 38.64, 38.39, 62.19, 64.39, 63.32, 68.24, 64.54, 66.6, 65.57, 64.04, 68.19, 66.94, 66.24, 69.2, 70.61] +1011.34 +50.567 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 39651, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.074631690979004, 'TIME_S_1KI': 0.25408266351363157, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 909.2396620059013, 'W': 71.91, 'J_1KI': 22.931065093084698, 'W_1KI': 1.8135734281607019, 'W_D': 21.342999999999996, 'J_D': 269.8637478263378, 'W_D_1KI': 0.5382714181231241, 'J_D_1KI': 0.013575229328973395} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.05.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.05.json new file mode 100644 index 0000000..c6f366b --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 8104, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.399449348449707, "TIME_S_1KI": 1.2832489324345642, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 868.1994806170463, "W": 65.85, "J_1KI": 107.132216265677, "W_1KI": 8.125616979269497, "W_D": 30.541749999999993, "J_D": 402.67777505141487, "W_D_1KI": 3.768725320829219, "J_D_1KI": 0.46504507907566867} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.05.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.05.output new file mode 100644 index 0000000..440ed84 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.05.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '5000', '-sd', '0.05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 1.2955126762390137} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 222, 488, ..., 1249497, + 1249743, 1250000]), + col_indices=tensor([ 0, 1, 24, ..., 4925, 4934, 4978]), + values=tensor([0.4956, 0.3294, 0.5952, ..., 0.4990, 0.9373, 0.9148]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.4962, 0.1920, 0.2421, ..., 0.8601, 0.2392, 0.4151]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 1.2955126762390137 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '8104', '-ss', '5000', '-sd', '0.05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.399449348449707} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 238, 495, ..., 1249467, + 1249713, 1250000]), + col_indices=tensor([ 4, 6, 45, ..., 4913, 4952, 4965]), + values=tensor([0.6573, 0.3725, 0.2540, ..., 0.9752, 0.8782, 0.5831]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.7880, 0.7423, 0.8544, ..., 0.3557, 0.5396, 0.2540]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.399449348449707 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 238, 495, ..., 1249467, + 1249713, 1250000]), + col_indices=tensor([ 4, 6, 45, ..., 4913, 4952, 4965]), + values=tensor([0.6573, 0.3725, 0.2540, ..., 0.9752, 0.8782, 0.5831]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.7880, 0.7423, 0.8544, ..., 0.3557, 0.5396, 0.2540]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.399449348449707 seconds + +[39.35, 38.88, 44.16, 39.47, 38.8, 39.17, 38.52, 38.75, 38.59, 39.07] +[65.85] +13.184502363204956 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 8104, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.399449348449707, 'TIME_S_1KI': 1.2832489324345642, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 868.1994806170463, 'W': 65.85} +[39.35, 38.88, 44.16, 39.47, 38.8, 39.17, 38.52, 38.75, 38.59, 39.07, 39.04, 39.89, 38.99, 38.43, 38.88, 38.64, 38.45, 39.36, 39.15, 38.61] +706.165 +35.30825 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 8104, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.399449348449707, 'TIME_S_1KI': 1.2832489324345642, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 868.1994806170463, 'W': 65.85, 'J_1KI': 107.132216265677, 'W_1KI': 8.125616979269497, 'W_D': 30.541749999999993, 'J_D': 402.67777505141487, 'W_D_1KI': 3.768725320829219, 'J_D_1KI': 0.46504507907566867} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.1.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.1.json new file mode 100644 index 0000000..f20d05f --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 3588, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.469439029693604, "TIME_S_1KI": 2.9179038544296554, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 926.2283216476441, "W": 68.56, "J_1KI": 258.1461320088194, "W_1KI": 19.108138238573023, "W_D": 33.73800000000001, "J_D": 455.7918774175645, "W_D_1KI": 9.403010033444819, "J_D_1KI": 2.620682840982391} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.1.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.1.output new file mode 100644 index 0000000..3e39a46 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.1.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '5000', '-sd', '0.1', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 2.925701141357422} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 485, 1004, ..., 2498982, + 2499482, 2500000]), + col_indices=tensor([ 18, 27, 28, ..., 4963, 4979, 4987]), + values=tensor([0.5744, 0.1591, 0.4039, ..., 0.3146, 0.5536, 0.6554]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.8562, 0.0559, 0.5751, ..., 0.9013, 0.4689, 0.3374]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 2.925701141357422 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '3588', '-ss', '5000', '-sd', '0.1', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.469439029693604} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 486, 1006, ..., 2498987, + 2499524, 2500000]), + col_indices=tensor([ 6, 12, 25, ..., 4979, 4985, 4986]), + values=tensor([0.9526, 0.5714, 0.7457, ..., 0.5995, 0.2741, 0.0768]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.9940, 0.0288, 0.0030, ..., 0.3299, 0.0903, 0.2227]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.469439029693604 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 486, 1006, ..., 2498987, + 2499524, 2500000]), + col_indices=tensor([ 6, 12, 25, ..., 4979, 4985, 4986]), + values=tensor([0.9526, 0.5714, 0.7457, ..., 0.5995, 0.2741, 0.0768]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.9940, 0.0288, 0.0030, ..., 0.3299, 0.0903, 0.2227]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.469439029693604 seconds + +[39.61, 38.42, 39.72, 38.79, 38.63, 38.61, 38.72, 38.24, 38.41, 38.4] +[68.56] +13.509747982025146 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 3588, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.469439029693604, 'TIME_S_1KI': 2.9179038544296554, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 926.2283216476441, 'W': 68.56} +[39.61, 38.42, 39.72, 38.79, 38.63, 38.61, 38.72, 38.24, 38.41, 38.4, 39.84, 38.26, 38.74, 38.59, 38.81, 38.31, 38.94, 38.63, 38.58, 38.23] +696.4399999999999 +34.821999999999996 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 3588, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.469439029693604, 'TIME_S_1KI': 2.9179038544296554, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 926.2283216476441, 'W': 68.56, 'J_1KI': 258.1461320088194, 'W_1KI': 19.108138238573023, 'W_D': 33.73800000000001, 'J_D': 455.7918774175645, 'W_D_1KI': 9.403010033444819, 'J_D_1KI': 2.620682840982391} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_1e-05.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_1e-05.json new file mode 100644 index 0000000..d170a6b --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 565598, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.406220436096191, "TIME_S_1KI": 0.018398616041952396, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 847.5254345631599, "W": 64.13, "J_1KI": 1.498459037272338, "W_1KI": 0.11338441790812556, "W_D": 29.180249999999987, "J_D": 385.6386100407241, "W_D_1KI": 0.05159185499241509, "J_D_1KI": 9.121647352433192e-05} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_1e-05.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_1e-05.output new file mode 100644 index 0000000..4a0b8d8 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_1e-05.output @@ -0,0 +1,329 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '5000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.055680274963378906} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([4927, 850, 790, 511, 1, 4275, 3659, 3202, 4099, + 3346, 1589, 716, 4620, 4989, 3861, 2882, 2487, 356, + 3163, 4196, 2032, 713, 507, 4615, 4269, 4035, 1320, + 655, 4926, 1128, 2992, 3058, 2439, 4007, 3555, 1710, + 2353, 655, 2875, 397, 2586, 4948, 858, 1089, 783, + 1767, 1975, 2378, 3541, 1407, 868, 4760, 4954, 4948, + 2154, 3756, 192, 4715, 2175, 343, 3413, 855, 3051, + 4256, 4765, 3143, 3774, 3357, 1362, 3915, 3187, 3177, + 3730, 4948, 4331, 2972, 3797, 963, 1487, 1791, 3014, + 3104, 4150, 4779, 2304, 1176, 1597, 3268, 4290, 3867, + 1778, 4097, 4190, 1835, 2167, 1131, 4492, 3907, 2098, + 4204, 4273, 3262, 2220, 4871, 3645, 4702, 4344, 3548, + 398, 3919, 762, 3209, 941, 2587, 4871, 1294, 846, + 4270, 1587, 490, 3776, 205, 4893, 4944, 3389, 1241, + 319, 1205, 149, 2679, 835, 185, 1679, 305, 803, + 3987, 4919, 1049, 2984, 150, 2222, 3548, 4559, 2082, + 773, 3809, 333, 4072, 2819, 773, 1940, 3544, 2429, + 4213, 3874, 3370, 3390, 3737, 2306, 2576, 3944, 3962, + 2700, 3672, 1959, 2924, 1160, 2820, 201, 3021, 1400, + 2786, 3009, 3104, 1799, 1722, 1307, 4435, 3240, 3490, + 3514, 3928, 2870, 339, 280, 3127, 278, 43, 1063, + 3176, 1262, 2341, 4542, 3316, 4835, 2103, 3750, 2839, + 1642, 4880, 4963, 1368, 4924, 2484, 1087, 26, 3186, + 4671, 3346, 1979, 748, 800, 144, 54, 3361, 3955, + 4948, 2768, 2175, 216, 0, 934, 3902, 3054, 854, + 1551, 310, 382, 1750, 779, 4286, 2768, 4550, 2371, + 2027, 2115, 2210, 4053, 3461, 4944, 349, 2236, 2467, + 2141, 1730, 73, 1349, 3773, 2561, 2961]), + values=tensor([0.0052, 0.9685, 0.5552, 0.5554, 0.3769, 0.8417, 0.2484, + 0.8557, 0.2810, 0.1770, 0.3815, 0.5491, 0.2804, 0.7014, + 0.4668, 0.6665, 0.6885, 0.4406, 0.0793, 0.0505, 0.2168, + 0.2768, 0.8793, 0.5292, 0.6124, 0.8331, 0.8520, 0.8953, + 0.2979, 0.9092, 0.1021, 0.9939, 0.8355, 0.6875, 0.6744, + 0.7797, 0.7132, 0.1964, 0.7787, 0.7395, 0.3653, 0.6907, + 0.2135, 0.4345, 0.6550, 0.1169, 0.1290, 0.6211, 0.7886, + 0.4978, 0.8807, 0.4515, 0.8365, 0.6929, 0.0657, 0.2646, + 0.3895, 0.0998, 0.4953, 0.3952, 0.3596, 0.9459, 0.2141, + 0.1718, 0.1717, 0.3607, 0.1199, 0.7175, 0.8124, 0.4557, + 0.0741, 0.2089, 0.8742, 0.1642, 0.0425, 0.9409, 0.3852, + 0.8648, 0.0435, 0.7984, 0.2433, 0.6033, 0.1259, 0.5531, + 0.2437, 0.6326, 0.4382, 0.6680, 0.3511, 0.0596, 0.0831, + 0.8185, 0.6864, 0.6621, 0.0203, 0.2915, 0.7632, 0.4015, + 0.1622, 0.5710, 0.1068, 0.3154, 0.7156, 0.1137, 0.7110, + 0.7922, 0.6817, 0.4208, 0.8226, 0.6751, 0.5470, 0.6580, + 0.9115, 0.2395, 0.8631, 0.8946, 0.8633, 0.9964, 0.1781, + 0.0456, 0.7692, 0.7333, 0.7567, 0.4246, 0.7150, 0.3292, + 0.8102, 0.3763, 0.7077, 0.9596, 0.7799, 0.8995, 0.4237, + 0.8044, 0.0028, 0.6094, 0.0822, 0.3516, 0.1473, 0.3747, + 0.2994, 0.6148, 0.9715, 0.8176, 0.8036, 0.4058, 0.2036, + 0.3753, 0.4509, 0.2117, 0.5735, 0.9721, 0.6964, 0.3733, + 0.2389, 0.5980, 0.7861, 0.1124, 0.7224, 0.2736, 0.1517, + 0.1578, 0.1015, 0.9540, 0.9804, 0.5457, 0.1059, 0.7649, + 0.7606, 0.0359, 0.3684, 0.4744, 0.3881, 0.5669, 0.6894, + 0.8642, 0.1190, 0.1465, 0.4614, 0.1113, 0.6697, 0.9048, + 0.9025, 0.8550, 0.3322, 0.9950, 0.8601, 0.6688, 0.9556, + 0.6649, 0.0390, 0.6075, 0.3304, 0.8947, 0.7252, 0.7691, + 0.7526, 0.8639, 0.4721, 0.9403, 0.4391, 0.6933, 0.1244, + 0.9914, 0.2708, 0.4335, 0.8597, 0.4714, 0.6817, 0.8948, + 0.1646, 0.6199, 0.1780, 0.7119, 0.3391, 0.9514, 0.4224, + 0.9358, 0.1033, 0.8786, 0.4834, 0.9743, 0.3774, 0.6356, + 0.0241, 0.9866, 0.3267, 0.8949, 0.2494, 0.9412, 0.8442, + 0.7104, 0.1721, 0.4102, 0.7763, 0.4723, 0.0485, 0.1320, + 0.4711, 0.1941, 0.9435, 0.7325, 0.9932, 0.9457, 0.1546, + 0.7522, 0.6262, 0.4856, 0.7356, 0.9269]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.4907, 0.7631, 0.4016, ..., 0.1364, 0.7839, 0.0874]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 0.055680274963378906 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '188576', '-ss', '5000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 3.5007991790771484} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([1945, 1023, 4059, 2482, 4205, 303, 4777, 854, 2860, + 3128, 1003, 4735, 2788, 4977, 4888, 1184, 1747, 1500, + 1488, 4664, 4234, 267, 2917, 1657, 2512, 4827, 4561, + 702, 1237, 3411, 3165, 543, 1337, 83, 2870, 335, + 3814, 4999, 149, 4519, 2422, 4719, 798, 1942, 1622, + 3623, 4934, 3536, 2679, 1799, 4397, 3267, 2356, 3096, + 939, 547, 3544, 3068, 871, 1836, 3638, 2030, 3514, + 3175, 329, 4905, 2001, 311, 2973, 4563, 1817, 1048, + 929, 4023, 2988, 4454, 1785, 1847, 1514, 4852, 2649, + 3063, 1763, 4293, 987, 4530, 3247, 562, 3333, 1092, + 3107, 2490, 531, 4875, 990, 2781, 1158, 1668, 810, + 4571, 1453, 4830, 4987, 542, 1478, 3139, 2797, 4337, + 4005, 1729, 1210, 1760, 2876, 492, 717, 4559, 1380, + 2637, 1249, 2077, 2637, 1153, 3843, 4108, 3845, 3286, + 4892, 4744, 3227, 2586, 83, 679, 2941, 1087, 894, + 781, 3420, 957, 2881, 2363, 2348, 2617, 2659, 1938, + 1995, 162, 900, 4007, 2523, 4470, 4394, 2657, 1289, + 3860, 3369, 1091, 538, 136, 430, 3091, 862, 1648, + 643, 490, 4863, 2809, 1365, 1101, 1331, 516, 1710, + 2693, 2751, 328, 677, 727, 1218, 3858, 2408, 4041, + 4770, 1765, 2463, 3676, 4301, 3125, 2410, 3828, 4357, + 3454, 2697, 3913, 3850, 2386, 3319, 2739, 967, 2681, + 2619, 1855, 848, 4820, 42, 3478, 2615, 4379, 3969, + 318, 169, 4793, 3405, 1411, 1550, 4436, 2892, 2747, + 2076, 4350, 3765, 3931, 2191, 4279, 3507, 1647, 3640, + 24, 2376, 2290, 3244, 118, 4586, 1505, 1122, 1321, + 3378, 2663, 1121, 2193, 4996, 4050, 1149, 1171, 674, + 98, 868, 2491, 2360, 3984, 4243, 3717]), + values=tensor([0.8923, 0.2170, 0.4055, 0.4662, 0.6388, 0.1130, 0.3558, + 0.8111, 0.3477, 0.3800, 0.1079, 0.8330, 0.9521, 0.2703, + 0.3856, 0.0011, 0.5451, 0.8270, 0.6026, 0.6871, 0.2987, + 0.0297, 0.9583, 0.5169, 0.4017, 0.2171, 0.4756, 0.5607, + 0.0472, 0.1280, 0.3544, 0.8497, 0.3044, 0.7975, 0.4038, + 0.2219, 0.0782, 0.3625, 0.4265, 0.7585, 0.5674, 0.8855, + 0.8283, 0.3415, 0.0517, 0.5793, 0.6358, 0.0955, 0.8953, + 0.4821, 0.5628, 0.3527, 0.5347, 0.9985, 0.4438, 0.9458, + 0.8619, 0.6814, 0.4148, 0.2273, 0.3882, 0.1003, 0.0543, + 0.4150, 0.9185, 0.0166, 0.8297, 0.5190, 0.1538, 0.6141, + 0.2637, 0.0598, 0.8180, 0.7469, 0.0453, 0.5538, 0.8701, + 0.6469, 0.0982, 0.7176, 0.0465, 0.3670, 0.5104, 0.4937, + 0.2148, 0.7740, 0.3290, 0.8672, 0.1889, 0.4020, 0.0735, + 0.7646, 0.0051, 0.2270, 0.0781, 0.9331, 0.9272, 0.2719, + 0.1297, 0.3201, 0.5551, 0.7162, 0.8369, 0.6662, 0.1046, + 0.5488, 0.7113, 0.7847, 0.2788, 0.8185, 0.6566, 0.4871, + 0.5299, 0.6218, 0.8570, 0.1819, 0.5175, 0.1532, 0.4515, + 0.3371, 0.8231, 0.7575, 0.8237, 0.2542, 0.7977, 0.3121, + 0.6201, 0.3327, 0.3804, 0.3314, 0.3106, 0.6784, 0.7520, + 0.4798, 0.5547, 0.5647, 0.4448, 0.9580, 0.7896, 0.4903, + 0.1080, 0.8992, 0.5980, 0.8970, 0.6636, 0.7995, 0.6348, + 0.1663, 0.2370, 0.3831, 0.4667, 0.7285, 0.6074, 0.1379, + 0.1650, 0.4365, 0.1346, 0.0493, 0.9094, 0.8343, 0.5503, + 0.6878, 0.9726, 0.3666, 0.9441, 0.6828, 0.4331, 0.9621, + 0.0173, 0.9911, 0.0894, 0.4748, 0.0217, 0.1933, 0.3591, + 0.5607, 0.7065, 0.9013, 0.5608, 0.5400, 0.0070, 0.9469, + 0.6275, 0.4975, 0.8745, 0.1132, 0.5527, 0.6696, 0.7603, + 0.2454, 0.5447, 0.0979, 0.6116, 0.0408, 0.5683, 0.5779, + 0.1881, 0.0095, 0.3924, 0.6268, 0.9119, 0.2320, 0.0019, + 0.0175, 0.8569, 0.7934, 0.3311, 0.4757, 0.7819, 0.0089, + 0.0688, 0.2934, 0.7037, 0.0307, 0.4797, 0.2771, 0.4270, + 0.8332, 0.6054, 0.8327, 0.8285, 0.2236, 0.0301, 0.9022, + 0.2426, 0.5397, 0.0668, 0.3464, 0.5399, 0.2689, 0.4924, + 0.7416, 0.9953, 0.1583, 0.4326, 0.2863, 0.4395, 0.4620, + 0.4220, 0.0019, 0.8210, 0.7450, 0.1671, 0.2691, 0.2129, + 0.6046, 0.1184, 0.5733, 0.5791, 0.6764]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.1655, 0.0894, 0.3335, ..., 0.5896, 0.4748, 0.7424]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 3.5007991790771484 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '565598', '-ss', '5000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.406220436096191} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 249, 249, 250]), + col_indices=tensor([3711, 2509, 1480, 2246, 4155, 2306, 315, 3219, 781, + 3895, 3381, 2148, 1468, 1317, 2648, 3838, 486, 2691, + 4269, 1833, 4130, 2494, 2935, 4534, 1404, 631, 2237, + 3119, 2408, 4857, 3452, 3551, 652, 1979, 294, 2907, + 4341, 963, 1166, 1723, 2311, 2016, 4067, 2454, 3108, + 4422, 594, 1090, 1798, 1231, 1189, 3083, 3007, 2134, + 3681, 526, 4251, 1258, 2420, 4062, 326, 2947, 386, + 3623, 4002, 1015, 2488, 2914, 344, 749, 2046, 3369, + 2183, 4810, 804, 4709, 4216, 4774, 3285, 1736, 1631, + 1116, 2085, 4390, 2715, 1633, 1339, 4203, 1468, 3776, + 4650, 1964, 1644, 3484, 556, 1113, 359, 2615, 4829, + 4748, 1322, 159, 1685, 3154, 4693, 4031, 1252, 1027, + 678, 4884, 997, 1416, 284, 2922, 2849, 4079, 606, + 470, 1943, 1148, 4302, 4930, 4799, 1057, 474, 2030, + 3336, 862, 2916, 4504, 1767, 3103, 2022, 3927, 3702, + 2754, 2164, 4564, 2862, 341, 1369, 1305, 4261, 2181, + 1646, 3936, 3010, 930, 4647, 2915, 4405, 3874, 1229, + 1875, 855, 1323, 963, 2816, 4148, 4829, 4066, 4913, + 691, 4066, 1415, 2632, 3157, 1676, 346, 4763, 246, + 2345, 1525, 4678, 2542, 2753, 3445, 3912, 2714, 1361, + 733, 3308, 420, 1698, 1705, 3596, 4607, 2749, 2452, + 4692, 611, 3476, 336, 999, 2085, 3920, 2039, 3357, + 4270, 3263, 3475, 3737, 446, 1786, 2984, 2510, 2736, + 3086, 1080, 3428, 4087, 375, 2103, 1319, 4228, 2727, + 4839, 645, 2259, 3905, 3083, 2174, 1253, 1258, 2465, + 3785, 2824, 24, 1918, 2335, 918, 1175, 3575, 2352, + 4164, 2100, 1603, 715, 4639, 1853, 3257, 1572, 4514, + 2943, 1003, 4748, 1038, 1012, 3061, 294]), + values=tensor([0.0072, 0.2895, 0.9639, 0.0057, 0.4191, 0.2094, 0.7103, + 0.8218, 0.3375, 0.5039, 0.5062, 0.5584, 0.5972, 0.9352, + 0.8333, 0.7188, 0.6342, 0.9555, 0.9103, 0.1687, 0.2984, + 0.7732, 0.0449, 0.0772, 0.1352, 0.5023, 0.0443, 0.4171, + 0.2148, 0.7142, 0.2678, 0.2649, 0.5734, 0.2586, 0.1803, + 0.3367, 0.7155, 0.6815, 0.6287, 0.8390, 0.5032, 0.1992, + 0.5162, 0.5707, 0.0670, 0.5923, 0.5384, 0.7500, 0.0960, + 0.4905, 0.7846, 0.7390, 0.3348, 0.9396, 0.2679, 0.8099, + 0.4907, 0.0176, 0.1919, 0.5036, 0.7682, 0.7675, 0.5778, + 0.9394, 0.8838, 0.1647, 0.2045, 0.3204, 0.5816, 0.4877, + 0.4316, 0.5907, 0.3880, 0.5556, 0.6079, 0.5805, 0.9477, + 0.7717, 0.2301, 0.4363, 0.4192, 0.7264, 0.9246, 0.5163, + 0.0957, 0.1670, 0.3706, 0.2621, 0.2557, 0.7081, 0.3520, + 0.9207, 0.5713, 0.9991, 0.2774, 0.9953, 0.3693, 0.6174, + 0.8286, 0.4524, 0.9605, 0.1877, 0.9322, 0.0179, 0.6890, + 0.8811, 0.8437, 0.1818, 0.1680, 0.0986, 0.7979, 0.9912, + 0.8202, 0.1132, 0.4257, 0.5766, 0.6866, 0.1937, 0.7442, + 0.9210, 0.2915, 0.9278, 0.6093, 0.0128, 0.7291, 0.8036, + 0.5824, 0.8528, 0.6888, 0.3925, 0.4263, 0.3416, 0.9010, + 0.2543, 0.7049, 0.8368, 0.2533, 0.1239, 0.2556, 0.3482, + 0.6122, 0.3407, 0.8598, 0.6533, 0.0993, 0.8400, 0.5464, + 0.2659, 0.0791, 0.9360, 0.6384, 0.4202, 0.5451, 0.6770, + 0.9558, 0.2536, 0.5924, 0.5367, 0.4377, 0.3759, 0.9344, + 0.0785, 0.9178, 0.5703, 0.2621, 0.7840, 0.6650, 0.5173, + 0.7316, 0.8675, 0.0573, 0.5592, 0.5656, 0.1368, 0.7342, + 0.4891, 0.5212, 0.5980, 0.9850, 0.3144, 0.9416, 0.3586, + 0.5874, 0.8863, 0.8557, 0.4322, 0.3167, 0.3279, 0.7906, + 0.9595, 0.6426, 0.5182, 0.3380, 0.6725, 0.1898, 0.5553, + 0.6660, 0.7693, 0.0543, 0.1495, 0.4661, 0.0013, 0.2189, + 0.2756, 0.4230, 0.3033, 0.9296, 0.0600, 0.3160, 0.8967, + 0.7981, 0.0839, 0.1133, 0.3382, 0.5864, 0.5344, 0.5684, + 0.8353, 0.4735, 0.5909, 0.0547, 0.2196, 0.1029, 0.2516, + 0.4455, 0.6775, 0.1108, 0.8486, 0.1605, 0.0632, 0.7729, + 0.1033, 0.7416, 0.1100, 0.7509, 0.4420, 0.1639, 0.2794, + 0.8260, 0.8724, 0.3230, 0.8818, 0.5434, 0.6423, 0.5673, + 0.7089, 0.6119, 0.9976, 0.0416, 0.2792]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.0013, 0.0858, 0.8984, ..., 0.5676, 0.8612, 0.3338]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.406220436096191 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 249, 249, 250]), + col_indices=tensor([3711, 2509, 1480, 2246, 4155, 2306, 315, 3219, 781, + 3895, 3381, 2148, 1468, 1317, 2648, 3838, 486, 2691, + 4269, 1833, 4130, 2494, 2935, 4534, 1404, 631, 2237, + 3119, 2408, 4857, 3452, 3551, 652, 1979, 294, 2907, + 4341, 963, 1166, 1723, 2311, 2016, 4067, 2454, 3108, + 4422, 594, 1090, 1798, 1231, 1189, 3083, 3007, 2134, + 3681, 526, 4251, 1258, 2420, 4062, 326, 2947, 386, + 3623, 4002, 1015, 2488, 2914, 344, 749, 2046, 3369, + 2183, 4810, 804, 4709, 4216, 4774, 3285, 1736, 1631, + 1116, 2085, 4390, 2715, 1633, 1339, 4203, 1468, 3776, + 4650, 1964, 1644, 3484, 556, 1113, 359, 2615, 4829, + 4748, 1322, 159, 1685, 3154, 4693, 4031, 1252, 1027, + 678, 4884, 997, 1416, 284, 2922, 2849, 4079, 606, + 470, 1943, 1148, 4302, 4930, 4799, 1057, 474, 2030, + 3336, 862, 2916, 4504, 1767, 3103, 2022, 3927, 3702, + 2754, 2164, 4564, 2862, 341, 1369, 1305, 4261, 2181, + 1646, 3936, 3010, 930, 4647, 2915, 4405, 3874, 1229, + 1875, 855, 1323, 963, 2816, 4148, 4829, 4066, 4913, + 691, 4066, 1415, 2632, 3157, 1676, 346, 4763, 246, + 2345, 1525, 4678, 2542, 2753, 3445, 3912, 2714, 1361, + 733, 3308, 420, 1698, 1705, 3596, 4607, 2749, 2452, + 4692, 611, 3476, 336, 999, 2085, 3920, 2039, 3357, + 4270, 3263, 3475, 3737, 446, 1786, 2984, 2510, 2736, + 3086, 1080, 3428, 4087, 375, 2103, 1319, 4228, 2727, + 4839, 645, 2259, 3905, 3083, 2174, 1253, 1258, 2465, + 3785, 2824, 24, 1918, 2335, 918, 1175, 3575, 2352, + 4164, 2100, 1603, 715, 4639, 1853, 3257, 1572, 4514, + 2943, 1003, 4748, 1038, 1012, 3061, 294]), + values=tensor([0.0072, 0.2895, 0.9639, 0.0057, 0.4191, 0.2094, 0.7103, + 0.8218, 0.3375, 0.5039, 0.5062, 0.5584, 0.5972, 0.9352, + 0.8333, 0.7188, 0.6342, 0.9555, 0.9103, 0.1687, 0.2984, + 0.7732, 0.0449, 0.0772, 0.1352, 0.5023, 0.0443, 0.4171, + 0.2148, 0.7142, 0.2678, 0.2649, 0.5734, 0.2586, 0.1803, + 0.3367, 0.7155, 0.6815, 0.6287, 0.8390, 0.5032, 0.1992, + 0.5162, 0.5707, 0.0670, 0.5923, 0.5384, 0.7500, 0.0960, + 0.4905, 0.7846, 0.7390, 0.3348, 0.9396, 0.2679, 0.8099, + 0.4907, 0.0176, 0.1919, 0.5036, 0.7682, 0.7675, 0.5778, + 0.9394, 0.8838, 0.1647, 0.2045, 0.3204, 0.5816, 0.4877, + 0.4316, 0.5907, 0.3880, 0.5556, 0.6079, 0.5805, 0.9477, + 0.7717, 0.2301, 0.4363, 0.4192, 0.7264, 0.9246, 0.5163, + 0.0957, 0.1670, 0.3706, 0.2621, 0.2557, 0.7081, 0.3520, + 0.9207, 0.5713, 0.9991, 0.2774, 0.9953, 0.3693, 0.6174, + 0.8286, 0.4524, 0.9605, 0.1877, 0.9322, 0.0179, 0.6890, + 0.8811, 0.8437, 0.1818, 0.1680, 0.0986, 0.7979, 0.9912, + 0.8202, 0.1132, 0.4257, 0.5766, 0.6866, 0.1937, 0.7442, + 0.9210, 0.2915, 0.9278, 0.6093, 0.0128, 0.7291, 0.8036, + 0.5824, 0.8528, 0.6888, 0.3925, 0.4263, 0.3416, 0.9010, + 0.2543, 0.7049, 0.8368, 0.2533, 0.1239, 0.2556, 0.3482, + 0.6122, 0.3407, 0.8598, 0.6533, 0.0993, 0.8400, 0.5464, + 0.2659, 0.0791, 0.9360, 0.6384, 0.4202, 0.5451, 0.6770, + 0.9558, 0.2536, 0.5924, 0.5367, 0.4377, 0.3759, 0.9344, + 0.0785, 0.9178, 0.5703, 0.2621, 0.7840, 0.6650, 0.5173, + 0.7316, 0.8675, 0.0573, 0.5592, 0.5656, 0.1368, 0.7342, + 0.4891, 0.5212, 0.5980, 0.9850, 0.3144, 0.9416, 0.3586, + 0.5874, 0.8863, 0.8557, 0.4322, 0.3167, 0.3279, 0.7906, + 0.9595, 0.6426, 0.5182, 0.3380, 0.6725, 0.1898, 0.5553, + 0.6660, 0.7693, 0.0543, 0.1495, 0.4661, 0.0013, 0.2189, + 0.2756, 0.4230, 0.3033, 0.9296, 0.0600, 0.3160, 0.8967, + 0.7981, 0.0839, 0.1133, 0.3382, 0.5864, 0.5344, 0.5684, + 0.8353, 0.4735, 0.5909, 0.0547, 0.2196, 0.1029, 0.2516, + 0.4455, 0.6775, 0.1108, 0.8486, 0.1605, 0.0632, 0.7729, + 0.1033, 0.7416, 0.1100, 0.7509, 0.4420, 0.1639, 0.2794, + 0.8260, 0.8724, 0.3230, 0.8818, 0.5434, 0.6423, 0.5673, + 0.7089, 0.6119, 0.9976, 0.0416, 0.2792]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.0013, 0.0858, 0.8984, ..., 0.5676, 0.8612, 0.3338]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.406220436096191 seconds + +[39.5, 43.3, 38.37, 38.43, 38.66, 38.52, 38.75, 38.13, 38.31, 39.28] +[64.13] +13.215740442276001 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 565598, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.406220436096191, 'TIME_S_1KI': 0.018398616041952396, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 847.5254345631599, 'W': 64.13} +[39.5, 43.3, 38.37, 38.43, 38.66, 38.52, 38.75, 38.13, 38.31, 39.28, 40.56, 38.94, 38.24, 38.25, 38.39, 38.16, 38.85, 38.41, 38.49, 38.25] +698.9950000000001 +34.94975000000001 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 565598, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.406220436096191, 'TIME_S_1KI': 0.018398616041952396, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 847.5254345631599, 'W': 64.13, 'J_1KI': 1.498459037272338, 'W_1KI': 0.11338441790812556, 'W_D': 29.180249999999987, 'J_D': 385.6386100407241, 'W_D_1KI': 0.05159185499241509, 'J_D_1KI': 9.121647352433192e-05} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.0001.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.0001.json index 2a06eb8..027433b 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.0001.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.0001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 3646, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.427528619766235, "TIME_S_1KI": 2.8599913932436194, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 692.8558839225768, "W": 47.94, "J_1KI": 190.03178385150215, "W_1KI": 13.14865606143719, "W_D": 31.4125, "J_D": 453.99114421606066, "W_D_1KI": 8.615606143719145, "J_D_1KI": 2.3630296609213235} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 3626, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.40371823310852, "TIME_S_1KI": 2.869199733344876, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 756.9219121170044, "W": 52.54, "J_1KI": 208.74845894015564, "W_1KI": 14.489795918367347, "W_D": 36.50025, "J_D": 525.8439098353386, "W_D_1KI": 10.066257584114727, "J_D_1KI": 2.776132814151883} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.0001.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.0001.output index cc7a205..8a43e29 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.0001.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.0001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '100000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.879791498184204} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.89510178565979} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 10, 23, ..., 999979, - 999989, 1000000]), - col_indices=tensor([ 5015, 13201, 16372, ..., 56043, 65196, 77096]), - values=tensor([0.8877, 0.8022, 0.3967, ..., 0.7199, 0.8399, 0.8151]), +tensor(crow_indices=tensor([ 0, 8, 17, ..., 999968, + 999983, 1000000]), + col_indices=tensor([23348, 35658, 56723, ..., 82423, 86979, 88187]), + values=tensor([0.8917, 0.1559, 0.5748, ..., 0.5915, 0.7647, 0.8715]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.6595, 0.9245, 0.4951, ..., 0.4587, 0.0765, 0.0892]) +tensor([0.4707, 0.9474, 0.3412, ..., 0.5588, 0.8812, 0.4153]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 2.879791498184204 seconds +Time: 2.89510178565979 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '3646', '-ss', '100000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.427528619766235} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '3626', '-ss', '100000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.40371823310852} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 8, 24, ..., 999984, - 999992, 1000000]), - col_indices=tensor([12724, 24596, 29019, ..., 72798, 83516, 98300]), - values=tensor([0.5582, 0.8508, 0.8777, ..., 0.7164, 0.8705, 0.2253]), +tensor(crow_indices=tensor([ 0, 5, 15, ..., 999979, + 999990, 1000000]), + col_indices=tensor([16760, 54124, 62778, ..., 86983, 90495, 98787]), + values=tensor([0.0638, 0.0650, 0.2338, ..., 0.3776, 0.7465, 0.0262]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.8849, 0.3552, 0.8045, ..., 0.9875, 0.5127, 0.0107]) +tensor([0.7059, 0.4263, 0.8303, ..., 0.6514, 0.5791, 0.5612]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,16 +36,16 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 10.427528619766235 seconds +Time: 10.40371823310852 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 8, 24, ..., 999984, - 999992, 1000000]), - col_indices=tensor([12724, 24596, 29019, ..., 72798, 83516, 98300]), - values=tensor([0.5582, 0.8508, 0.8777, ..., 0.7164, 0.8705, 0.2253]), +tensor(crow_indices=tensor([ 0, 5, 15, ..., 999979, + 999990, 1000000]), + col_indices=tensor([16760, 54124, 62778, ..., 86983, 90495, 98787]), + values=tensor([0.0638, 0.0650, 0.2338, ..., 0.3776, 0.7465, 0.0262]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.8849, 0.3552, 0.8045, ..., 0.9875, 0.5127, 0.0107]) +tensor([0.7059, 0.4263, 0.8303, ..., 0.6514, 0.5791, 0.5612]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -53,13 +53,13 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 10.427528619766235 seconds +Time: 10.40371823310852 seconds -[18.35, 17.89, 18.28, 18.14, 17.94, 18.1, 18.09, 18.19, 18.06, 18.09] -[47.94] -14.452563285827637 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 3646, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.427528619766235, 'TIME_S_1KI': 2.8599913932436194, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 692.8558839225768, 'W': 47.94} -[18.35, 17.89, 18.28, 18.14, 17.94, 18.1, 18.09, 18.19, 18.06, 18.09, 18.01, 18.32, 17.86, 17.98, 18.22, 18.07, 19.34, 21.72, 17.99, 18.27] -330.54999999999995 -16.527499999999996 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 3646, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.427528619766235, 'TIME_S_1KI': 2.8599913932436194, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 692.8558839225768, 'W': 47.94, 'J_1KI': 190.03178385150215, 'W_1KI': 13.14865606143719, 'W_D': 31.4125, 'J_D': 453.99114421606066, 'W_D_1KI': 8.615606143719145, 'J_D_1KI': 2.3630296609213235} +[18.45, 17.61, 17.77, 17.55, 17.61, 17.99, 17.58, 18.4, 17.81, 17.62] +[52.54] +14.406583786010742 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 3626, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.40371823310852, 'TIME_S_1KI': 2.869199733344876, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 756.9219121170044, 'W': 52.54} +[18.45, 17.61, 17.77, 17.55, 17.61, 17.99, 17.58, 18.4, 17.81, 17.62, 18.49, 17.87, 17.62, 17.77, 17.72, 17.81, 18.01, 17.57, 17.69, 18.27] +320.79499999999996 +16.039749999999998 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 3626, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.40371823310852, 'TIME_S_1KI': 2.869199733344876, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 756.9219121170044, 'W': 52.54, 'J_1KI': 208.74845894015564, 'W_1KI': 14.489795918367347, 'W_D': 36.50025, 'J_D': 525.8439098353386, 'W_D_1KI': 10.066257584114727, 'J_D_1KI': 2.776132814151883} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.001.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.001.json new file mode 100644 index 0000000..46cf4d4 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 27.417505741119385, "TIME_S_1KI": 27.417505741119385, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1870.535600104332, "W": 53.17, "J_1KI": 1870.535600104332, "W_1KI": 53.17, "W_D": 36.779250000000005, "J_D": 1293.9043910125495, "W_D_1KI": 36.779250000000005, "J_D_1KI": 36.779250000000005} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.001.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.001.output new file mode 100644 index 0000000..523884d --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.001.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '100000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 27.417505741119385} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 87, 206, ..., 9999814, + 9999907, 10000000]), + col_indices=tensor([ 430, 1206, 1283, ..., 96095, 96254, 99884]), + values=tensor([0.0855, 0.2486, 0.3160, ..., 0.5781, 0.8085, 0.1274]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.4876, 0.8099, 0.9530, ..., 0.3051, 0.4863, 0.6986]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 27.417505741119385 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 87, 206, ..., 9999814, + 9999907, 10000000]), + col_indices=tensor([ 430, 1206, 1283, ..., 96095, 96254, 99884]), + values=tensor([0.0855, 0.2486, 0.3160, ..., 0.5781, 0.8085, 0.1274]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.4876, 0.8099, 0.9530, ..., 0.3051, 0.4863, 0.6986]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 27.417505741119385 seconds + +[18.51, 17.88, 18.06, 17.74, 17.69, 18.37, 18.17, 17.77, 18.14, 17.72] +[53.17] +35.18028211593628 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 27.417505741119385, 'TIME_S_1KI': 27.417505741119385, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1870.535600104332, 'W': 53.17} +[18.51, 17.88, 18.06, 17.74, 17.69, 18.37, 18.17, 17.77, 18.14, 17.72, 18.92, 17.72, 17.91, 22.37, 18.39, 17.62, 17.83, 17.88, 17.9, 17.6] +327.815 +16.39075 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 27.417505741119385, 'TIME_S_1KI': 27.417505741119385, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1870.535600104332, 'W': 53.17, 'J_1KI': 1870.535600104332, 'W_1KI': 53.17, 'W_D': 36.779250000000005, 'J_D': 1293.9043910125495, 'W_D_1KI': 36.779250000000005, 'J_D_1KI': 36.779250000000005} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_1e-05.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_1e-05.json index 1871367..fb45506 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_1e-05.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_1e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 8006, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.406643390655518, "TIME_S_1KI": 1.2998555321828025, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 663.0740174865723, "W": 46.720000000000006, "J_1KI": 82.82213558413343, "W_1KI": 5.835623282538097, "W_D": 30.276250000000005, "J_D": 429.69594867140063, "W_D_1KI": 3.7816949787659264, "J_D_1KI": 0.47235760414263384} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 7957, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.378219842910767, "TIME_S_1KI": 1.3042880285171252, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 722.9498359966278, "W": 51.18, "J_1KI": 90.85708633864871, "W_1KI": 6.432072389091366, "W_D": 34.9585, "J_D": 493.81089960312846, "W_D_1KI": 4.3934271710443635, "J_D_1KI": 0.5521461821093834} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_1e-05.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_1e-05.output index b29167b..f2950b1 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_1e-05.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_1e-05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '100000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.3114714622497559} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.319572925567627} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 2, ..., 99998, 99998, +tensor(crow_indices=tensor([ 0, 3, 4, ..., 99998, 100000, 100000]), - col_indices=tensor([15714, 63018, 47083, ..., 95898, 11433, 73543]), - values=tensor([0.8298, 0.7556, 0.0451, ..., 0.9622, 0.2125, 0.4932]), + col_indices=tensor([ 8050, 18600, 47626, ..., 72573, 7071, 11396]), + values=tensor([0.6679, 0.8144, 0.2788, ..., 0.2480, 0.1170, 0.9852]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.8440, 0.1023, 0.7738, ..., 0.5206, 0.7518, 0.6360]) +tensor([0.3322, 0.6851, 0.8140, ..., 0.1719, 0.4686, 0.0560]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 1.3114714622497559 seconds +Time: 1.319572925567627 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '8006', '-ss', '100000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.406643390655518} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '7957', '-ss', '100000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.378219842910767} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 100000, 100000, +tensor(crow_indices=tensor([ 0, 1, 5, ..., 99999, 99999, 100000]), - col_indices=tensor([38549, 23010, 96204, ..., 15384, 78128, 94145]), - values=tensor([0.9276, 0.2040, 0.0329, ..., 0.0402, 0.0179, 0.0490]), + col_indices=tensor([79139, 34438, 57240, ..., 99522, 68399, 1834]), + values=tensor([0.8717, 0.0754, 0.3550, ..., 0.4586, 0.3508, 0.4372]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.1445, 0.8456, 0.7445, ..., 0.5274, 0.1855, 0.5940]) +tensor([0.1033, 0.1471, 0.4199, ..., 0.6623, 0.5752, 0.0388]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,16 +36,16 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 10.406643390655518 seconds +Time: 10.378219842910767 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 100000, 100000, +tensor(crow_indices=tensor([ 0, 1, 5, ..., 99999, 99999, 100000]), - col_indices=tensor([38549, 23010, 96204, ..., 15384, 78128, 94145]), - values=tensor([0.9276, 0.2040, 0.0329, ..., 0.0402, 0.0179, 0.0490]), + col_indices=tensor([79139, 34438, 57240, ..., 99522, 68399, 1834]), + values=tensor([0.8717, 0.0754, 0.3550, ..., 0.4586, 0.3508, 0.4372]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.1445, 0.8456, 0.7445, ..., 0.5274, 0.1855, 0.5940]) +tensor([0.1033, 0.1471, 0.4199, ..., 0.6623, 0.5752, 0.0388]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -53,13 +53,13 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 10.406643390655518 seconds +Time: 10.378219842910767 seconds -[20.45, 19.81, 17.91, 18.51, 18.24, 17.88, 18.26, 18.12, 17.9, 18.59] -[46.72] -14.192508935928345 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 8006, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.406643390655518, 'TIME_S_1KI': 1.2998555321828025, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 663.0740174865723, 'W': 46.720000000000006} -[20.45, 19.81, 17.91, 18.51, 18.24, 17.88, 18.26, 18.12, 17.9, 18.59, 18.4, 18.11, 18.06, 18.12, 17.9, 18.34, 18.04, 17.94, 17.82, 18.39] -328.875 -16.44375 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 8006, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.406643390655518, 'TIME_S_1KI': 1.2998555321828025, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 663.0740174865723, 'W': 46.720000000000006, 'J_1KI': 82.82213558413343, 'W_1KI': 5.835623282538097, 'W_D': 30.276250000000005, 'J_D': 429.69594867140063, 'W_D_1KI': 3.7816949787659264, 'J_D_1KI': 0.47235760414263384} +[19.98, 17.81, 17.8, 18.03, 17.89, 17.86, 17.65, 18.02, 17.79, 17.59] +[51.18] +14.12563180923462 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 7957, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.378219842910767, 'TIME_S_1KI': 1.3042880285171252, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 722.9498359966278, 'W': 51.18} +[19.98, 17.81, 17.8, 18.03, 17.89, 17.86, 17.65, 18.02, 17.79, 17.59, 18.5, 18.06, 17.99, 17.58, 17.86, 17.88, 17.85, 17.69, 17.49, 22.29] +324.43 +16.2215 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 7957, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.378219842910767, 'TIME_S_1KI': 1.3042880285171252, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 722.9498359966278, 'W': 51.18, 'J_1KI': 90.85708633864871, 'W_1KI': 6.432072389091366, 'W_D': 34.9585, 'J_D': 493.81089960312846, 'W_D_1KI': 4.3934271710443635, 'J_D_1KI': 0.5521461821093834} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.0001.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.0001.json index eaa436a..641470f 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.0001.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.0001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 85057, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.436183214187622, "TIME_S_1KI": 0.12269634732223829, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 663.123476600647, "W": 46.56, "J_1KI": 7.796224609387199, "W_1KI": 0.5473976274733414, "W_D": 8.939750000000004, "J_D": 127.32298324614769, "W_D_1KI": 0.10510304854391765, "J_D_1KI": 0.0012356778224475076} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 83764, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.409894704818726, "TIME_S_1KI": 0.12427647563175977, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 711.6688519239426, "W": 50.42, "J_1KI": 8.49611828379665, "W_1KI": 0.6019292297407001, "W_D": 34.06175, "J_D": 480.7752185049654, "W_D_1KI": 0.40663948713050957, "J_D_1KI": 0.004854585348485144} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.0001.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.0001.output index 0f19044..168b413 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.0001.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.0001.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.138319730758667} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.13988041877746582} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 9998, 9999, 10000]), - col_indices=tensor([6848, 9607, 2682, ..., 9449, 6129, 3470]), - values=tensor([0.4694, 0.9529, 0.1463, ..., 0.1268, 0.1399, 0.3765]), +tensor(crow_indices=tensor([ 0, 2, 5, ..., 9997, 9998, 10000]), + col_indices=tensor([5444, 7298, 2758, ..., 5406, 201, 2159]), + values=tensor([0.2785, 0.9301, 0.1173, ..., 0.6105, 0.0625, 0.6073]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.5998, 0.7790, 0.8385, ..., 0.1561, 0.5420, 0.2267]) +tensor([0.9117, 0.7600, 0.5676, ..., 0.4107, 0.0296, 0.3559]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -15,18 +15,18 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 0.138319730758667 seconds +Time: 0.13988041877746582 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '75911', '-ss', '10000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.370872497558594} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '75064', '-ss', '10000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.409368753433228} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 10000, 10000, 10000]), - col_indices=tensor([2414, 5580, 2005, ..., 9768, 442, 1851]), - values=tensor([0.7205, 0.5630, 0.0022, ..., 0.3635, 0.2630, 0.6566]), +tensor(crow_indices=tensor([ 0, 2, 3, ..., 9999, 9999, 10000]), + col_indices=tensor([ 559, 1691, 3057, ..., 6770, 161, 9445]), + values=tensor([0.2390, 0.7843, 0.4833, ..., 0.8916, 0.1224, 0.1645]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.5071, 0.1792, 0.6304, ..., 0.9432, 0.9596, 0.2753]) +tensor([0.9833, 0.3493, 0.9306, ..., 0.5004, 0.5453, 0.7909]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -34,18 +34,18 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 9.370872497558594 seconds +Time: 9.409368753433228 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '85057', '-ss', '10000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.436183214187622} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '83764', '-ss', '10000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.409894704818726} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 3, ..., 10000, 10000, 10000]), - col_indices=tensor([2255, 7580, 9802, ..., 6433, 5292, 8461]), - values=tensor([0.3444, 0.5478, 0.9067, ..., 0.7957, 0.9972, 0.7349]), +tensor(crow_indices=tensor([ 0, 4, 5, ..., 9999, 10000, 10000]), + col_indices=tensor([1791, 2178, 4941, ..., 8437, 8977, 5726]), + values=tensor([0.7542, 0.7473, 0.0826, ..., 0.7863, 0.2178, 0.9123]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.8453, 0.7973, 0.9010, ..., 0.7504, 0.8828, 0.5942]) +tensor([0.0349, 0.7342, 0.7720, ..., 0.6458, 0.8179, 0.1428]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,15 +53,15 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 10.436183214187622 seconds +Time: 10.409894704818726 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 3, ..., 10000, 10000, 10000]), - col_indices=tensor([2255, 7580, 9802, ..., 6433, 5292, 8461]), - values=tensor([0.3444, 0.5478, 0.9067, ..., 0.7957, 0.9972, 0.7349]), +tensor(crow_indices=tensor([ 0, 4, 5, ..., 9999, 10000, 10000]), + col_indices=tensor([1791, 2178, 4941, ..., 8437, 8977, 5726]), + values=tensor([0.7542, 0.7473, 0.0826, ..., 0.7863, 0.2178, 0.9123]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.8453, 0.7973, 0.9010, ..., 0.7504, 0.8828, 0.5942]) +tensor([0.0349, 0.7342, 0.7720, ..., 0.6458, 0.8179, 0.1428]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -69,13 +69,13 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 10.436183214187622 seconds +Time: 10.409894704818726 seconds -[38.19, 37.44, 39.94, 42.89, 39.51, 39.81, 47.03, 47.24, 47.36, 43.53] -[46.56] -14.242342710494995 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 85057, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.436183214187622, 'TIME_S_1KI': 0.12269634732223829, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 663.123476600647, 'W': 46.56} -[38.19, 37.44, 39.94, 42.89, 39.51, 39.81, 47.03, 47.24, 47.36, 43.53, 45.01, 44.41, 42.1, 40.98, 40.92, 39.18, 41.09, 39.73, 39.61, 39.6] -752.405 -37.62025 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 85057, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.436183214187622, 'TIME_S_1KI': 0.12269634732223829, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 663.123476600647, 'W': 46.56, 'J_1KI': 7.796224609387199, 'W_1KI': 0.5473976274733414, 'W_D': 8.939750000000004, 'J_D': 127.32298324614769, 'W_D_1KI': 0.10510304854391765, 'J_D_1KI': 0.0012356778224475076} +[18.39, 17.85, 18.37, 17.98, 17.9, 18.07, 21.28, 18.76, 18.12, 17.67] +[50.42] +14.11481261253357 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 83764, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.409894704818726, 'TIME_S_1KI': 0.12427647563175977, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 711.6688519239426, 'W': 50.42} +[18.39, 17.85, 18.37, 17.98, 17.9, 18.07, 21.28, 18.76, 18.12, 17.67, 18.33, 17.97, 17.87, 17.66, 17.77, 17.96, 17.86, 17.77, 17.81, 17.94] +327.16499999999996 +16.358249999999998 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 83764, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.409894704818726, 'TIME_S_1KI': 0.12427647563175977, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 711.6688519239426, 'W': 50.42, 'J_1KI': 8.49611828379665, 'W_1KI': 0.6019292297407001, 'W_D': 34.06175, 'J_D': 480.7752185049654, 'W_D_1KI': 0.40663948713050957, 'J_D_1KI': 0.004854585348485144} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.001.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.001.json index aaafc55..b3d9f1e 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.001.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 34558, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.457140684127808, "TIME_S_1KI": 0.3025968135924477, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 669.6752694511413, "W": 47.19, "J_1KI": 19.37829936486895, "W_1KI": 1.3655304126396204, "W_D": 30.795499999999997, "J_D": 437.020232260704, "W_D_1KI": 0.8911250651079344, "J_D_1KI": 0.02578636104832266} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 33076, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.000443935394287, "TIME_S_1KI": 0.30234744030095195, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 713.14643699646, "W": 51.71, "J_1KI": 21.560842816436693, "W_1KI": 1.5633692103035435, "W_D": 35.256, "J_D": 486.2249232788086, "W_D_1KI": 1.065908816059983, "J_D_1KI": 0.03222604958459254} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.001.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.001.output index 0fdae4e..108ea7e 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.001.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '0.001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.3263256549835205} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.31745004653930664} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 10, 22, ..., 99981, 99992, +tensor(crow_indices=tensor([ 0, 13, 25, ..., 99974, 99988, 100000]), - col_indices=tensor([ 85, 1274, 1422, ..., 6599, 6784, 7278]), - values=tensor([0.2164, 0.2550, 1.0000, ..., 0.9260, 0.0708, 0.0725]), + col_indices=tensor([ 189, 1046, 1680, ..., 7652, 7822, 9876]), + values=tensor([0.3200, 0.6172, 0.8426, ..., 0.6310, 0.2892, 0.4983]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.6027, 0.7133, 0.6416, ..., 0.5356, 0.1307, 0.5576]) +tensor([0.5979, 0.0691, 0.5787, ..., 0.1637, 0.0173, 0.7657]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 0.3263256549835205 seconds +Time: 0.31745004653930664 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '32176', '-ss', '10000', '-sd', '0.001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 9.776132822036743} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '33076', '-ss', '10000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.000443935394287} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 20, ..., 99981, 99994, +tensor(crow_indices=tensor([ 0, 5, 16, ..., 99975, 99984, 100000]), - col_indices=tensor([ 544, 706, 2472, ..., 6055, 7261, 9945]), - values=tensor([0.4979, 0.3488, 0.7538, ..., 0.1989, 0.3068, 0.3191]), + col_indices=tensor([2058, 2088, 2648, ..., 8443, 9183, 9230]), + values=tensor([0.6058, 0.3120, 0.6569, ..., 0.6120, 0.0868, 0.9498]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.5709, 0.1010, 0.9044, ..., 0.7157, 0.3275, 0.4556]) +tensor([0.4574, 0.0884, 0.9388, ..., 0.4572, 0.8159, 0.8640]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,19 +36,16 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 9.776132822036743 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '34558', '-ss', '10000', '-sd', '0.001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.457140684127808} +Time: 10.000443935394287 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 10, 24, ..., 99980, 99989, +tensor(crow_indices=tensor([ 0, 5, 16, ..., 99975, 99984, 100000]), - col_indices=tensor([ 44, 4326, 6855, ..., 8487, 8731, 9188]), - values=tensor([0.5894, 0.7815, 0.8660, ..., 0.0108, 0.2427, 0.5894]), + col_indices=tensor([2058, 2088, 2648, ..., 8443, 9183, 9230]), + values=tensor([0.6058, 0.3120, 0.6569, ..., 0.6120, 0.0868, 0.9498]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.0531, 0.8679, 0.3068, ..., 0.5318, 0.1294, 0.3589]) +tensor([0.4574, 0.0884, 0.9388, ..., 0.4572, 0.8159, 0.8640]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -56,30 +53,13 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 10.457140684127808 seconds +Time: 10.000443935394287 seconds -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 10, 24, ..., 99980, 99989, - 100000]), - col_indices=tensor([ 44, 4326, 6855, ..., 8487, 8731, 9188]), - values=tensor([0.5894, 0.7815, 0.8660, ..., 0.0108, 0.2427, 0.5894]), - size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.0531, 0.8679, 0.3068, ..., 0.5318, 0.1294, 0.3589]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 100000 -Density: 0.001 -Time: 10.457140684127808 seconds - -[19.5, 18.03, 18.32, 17.98, 18.23, 18.17, 18.6, 17.99, 18.33, 18.09] -[47.19] -14.191041946411133 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 34558, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.457140684127808, 'TIME_S_1KI': 0.3025968135924477, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 669.6752694511413, 'W': 47.19} -[19.5, 18.03, 18.32, 17.98, 18.23, 18.17, 18.6, 17.99, 18.33, 18.09, 18.43, 17.95, 18.1, 18.53, 18.09, 18.56, 18.03, 17.91, 18.04, 18.04] -327.89 -16.3945 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 34558, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.457140684127808, 'TIME_S_1KI': 0.3025968135924477, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 669.6752694511413, 'W': 47.19, 'J_1KI': 19.37829936486895, 'W_1KI': 1.3655304126396204, 'W_D': 30.795499999999997, 'J_D': 437.020232260704, 'W_D_1KI': 0.8911250651079344, 'J_D_1KI': 0.02578636104832266} +[18.4, 17.89, 17.86, 18.15, 21.93, 17.6, 17.74, 17.84, 17.81, 17.8] +[51.71] +13.791267395019531 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 33076, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.000443935394287, 'TIME_S_1KI': 0.30234744030095195, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 713.14643699646, 'W': 51.71} +[18.4, 17.89, 17.86, 18.15, 21.93, 17.6, 17.74, 17.84, 17.81, 17.8, 22.42, 17.97, 17.98, 17.67, 18.14, 18.06, 18.09, 18.36, 17.77, 17.82] +329.08000000000004 +16.454 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 33076, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.000443935394287, 'TIME_S_1KI': 0.30234744030095195, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 713.14643699646, 'W': 51.71, 'J_1KI': 21.560842816436693, 'W_1KI': 1.5633692103035435, 'W_D': 35.256, 'J_D': 486.2249232788086, 'W_D_1KI': 1.065908816059983, 'J_D_1KI': 0.03222604958459254} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.01.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.01.json index b18c025..ec86a79 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.01.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.01.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 5537, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.417654037475586, "TIME_S_1KI": 1.8814618091882944, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 687.7580058908463, "W": 47.74000000000001, "J_1KI": 124.21130682514833, "W_1KI": 8.621997471554996, "W_D": 31.48425000000001, "J_D": 453.5723711137177, "W_D_1KI": 5.686156763590393, "J_D_1KI": 1.0269381910042248} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 5536, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.43858790397644, "TIME_S_1KI": 1.885583075140253, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 757.4033546972274, "W": 52.18, "J_1KI": 136.81418979357431, "W_1KI": 9.425578034682081, "W_D": 35.855000000000004, "J_D": 520.4426462757588, "W_D_1KI": 6.476697976878613, "J_D_1KI": 1.1699237674997496} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.01.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.01.output index 42d7009..cdf759d 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.01.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.01.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '0.01', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 1.8961181640625} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 1.8966615200042725} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 96, 195, ..., 999806, - 999906, 1000000]), - col_indices=tensor([ 19, 113, 151, ..., 9681, 9759, 9836]), - values=tensor([0.1144, 0.7732, 0.9749, ..., 0.1321, 0.3947, 0.2714]), +tensor(crow_indices=tensor([ 0, 98, 196, ..., 999798, + 999896, 1000000]), + col_indices=tensor([ 136, 346, 355, ..., 9896, 9907, 9979]), + values=tensor([0.5884, 0.9037, 0.2601, ..., 0.4944, 0.5993, 0.9598]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.1354, 0.8257, 0.6569, ..., 0.0257, 0.7874, 0.8457]) +tensor([0.5307, 0.6978, 0.6134, ..., 0.5179, 0.0970, 0.9420]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 1.8961181640625 seconds +Time: 1.8966615200042725 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '5537', '-ss', '10000', '-sd', '0.01', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.417654037475586} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '5536', '-ss', '10000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.43858790397644} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 103, 198, ..., 999795, - 999893, 1000000]), - col_indices=tensor([ 194, 313, 451, ..., 9690, 9776, 9879]), - values=tensor([0.2779, 0.8250, 0.2083, ..., 0.7384, 0.0572, 0.6638]), +tensor(crow_indices=tensor([ 0, 90, 180, ..., 999793, + 999892, 1000000]), + col_indices=tensor([ 10, 80, 127, ..., 9954, 9956, 9988]), + values=tensor([0.2975, 0.8577, 0.6251, ..., 0.1783, 0.1753, 0.5886]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.5010, 0.3969, 0.7780, ..., 0.5969, 0.2345, 0.7915]) +tensor([0.4757, 0.0822, 0.0813, ..., 0.4411, 0.1352, 0.6104]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,16 +36,16 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 10.417654037475586 seconds +Time: 10.43858790397644 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 103, 198, ..., 999795, - 999893, 1000000]), - col_indices=tensor([ 194, 313, 451, ..., 9690, 9776, 9879]), - values=tensor([0.2779, 0.8250, 0.2083, ..., 0.7384, 0.0572, 0.6638]), +tensor(crow_indices=tensor([ 0, 90, 180, ..., 999793, + 999892, 1000000]), + col_indices=tensor([ 10, 80, 127, ..., 9954, 9956, 9988]), + values=tensor([0.2975, 0.8577, 0.6251, ..., 0.1783, 0.1753, 0.5886]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.5010, 0.3969, 0.7780, ..., 0.5969, 0.2345, 0.7915]) +tensor([0.4757, 0.0822, 0.0813, ..., 0.4411, 0.1352, 0.6104]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,13 +53,13 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 10.417654037475586 seconds +Time: 10.43858790397644 seconds -[18.42, 18.05, 18.03, 18.24, 18.1, 18.01, 17.83, 17.89, 18.16, 18.06] -[47.74] -14.406326055526733 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 5537, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.417654037475586, 'TIME_S_1KI': 1.8814618091882944, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 687.7580058908463, 'W': 47.74000000000001} -[18.42, 18.05, 18.03, 18.24, 18.1, 18.01, 17.83, 17.89, 18.16, 18.06, 18.38, 17.81, 18.07, 18.25, 18.24, 17.94, 18.02, 18.06, 18.02, 17.93] -325.115 -16.25575 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 5537, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.417654037475586, 'TIME_S_1KI': 1.8814618091882944, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 687.7580058908463, 'W': 47.74000000000001, 'J_1KI': 124.21130682514833, 'W_1KI': 8.621997471554996, 'W_D': 31.48425000000001, 'J_D': 453.5723711137177, 'W_D_1KI': 5.686156763590393, 'J_D_1KI': 1.0269381910042248} +[18.3, 18.15, 17.72, 17.62, 17.65, 18.51, 19.15, 17.59, 17.78, 18.09] +[52.18] +14.515204191207886 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 5536, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.43858790397644, 'TIME_S_1KI': 1.885583075140253, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 757.4033546972274, 'W': 52.18} +[18.3, 18.15, 17.72, 17.62, 17.65, 18.51, 19.15, 17.59, 17.78, 18.09, 18.11, 18.06, 18.06, 18.01, 19.44, 18.82, 18.14, 17.87, 17.86, 17.64] +326.5 +16.325 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 5536, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.43858790397644, 'TIME_S_1KI': 1.885583075140253, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 757.4033546972274, 'W': 52.18, 'J_1KI': 136.81418979357431, 'W_1KI': 9.425578034682081, 'W_D': 35.855000000000004, 'J_D': 520.4426462757588, 'W_D_1KI': 6.476697976878613, 'J_D_1KI': 1.1699237674997496} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.05.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.05.json index 8a616ae..4abbf3f 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.05.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.884310483932495, "TIME_S_1KI": 10.884310483932495, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 825.0359781861306, "W": 47.81, "J_1KI": 825.0359781861306, "W_1KI": 47.81, "W_D": 31.353, "J_D": 541.0448237621785, "W_D_1KI": 31.352999999999998, "J_D_1KI": 31.352999999999998} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.659594058990479, "TIME_S_1KI": 10.659594058990479, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 944.4377518177032, "W": 52.04, "J_1KI": 944.4377518177032, "W_1KI": 52.04, "W_D": 22.411249999999995, "J_D": 406.72618304044, "W_D_1KI": 22.411249999999995, "J_D_1KI": 22.411249999999995} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.05.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.05.output index aad2263..e447139 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.05.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '0.05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.884310483932495} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.659594058990479} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 518, 1026, ..., 4999007, - 4999483, 5000000]), - col_indices=tensor([ 3, 39, 78, ..., 9968, 9975, 9994]), - values=tensor([0.2142, 0.4373, 0.1249, ..., 0.9529, 0.9095, 0.5518]), +tensor(crow_indices=tensor([ 0, 491, 987, ..., 4999032, + 4999549, 5000000]), + col_indices=tensor([ 2, 62, 63, ..., 9943, 9957, 9997]), + values=tensor([0.7700, 0.3306, 0.4646, ..., 0.1296, 0.2152, 0.2390]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.2404, 0.3133, 0.0015, ..., 0.7254, 0.6117, 0.4995]) +tensor([0.3463, 0.4470, 0.4445, ..., 0.6886, 0.1263, 0.4488]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,16 +16,16 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 10.884310483932495 seconds +Time: 10.659594058990479 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 518, 1026, ..., 4999007, - 4999483, 5000000]), - col_indices=tensor([ 3, 39, 78, ..., 9968, 9975, 9994]), - values=tensor([0.2142, 0.4373, 0.1249, ..., 0.9529, 0.9095, 0.5518]), +tensor(crow_indices=tensor([ 0, 491, 987, ..., 4999032, + 4999549, 5000000]), + col_indices=tensor([ 2, 62, 63, ..., 9943, 9957, 9997]), + values=tensor([0.7700, 0.3306, 0.4646, ..., 0.1296, 0.2152, 0.2390]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.2404, 0.3133, 0.0015, ..., 0.7254, 0.6117, 0.4995]) +tensor([0.3463, 0.4470, 0.4445, ..., 0.6886, 0.1263, 0.4488]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -33,13 +33,13 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 10.884310483932495 seconds +Time: 10.659594058990479 seconds -[18.07, 17.88, 18.1, 21.37, 18.24, 18.16, 18.18, 18.01, 18.03, 17.87] -[47.81] -17.256556749343872 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.884310483932495, 'TIME_S_1KI': 10.884310483932495, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 825.0359781861306, 'W': 47.81} -[18.07, 17.88, 18.1, 21.37, 18.24, 18.16, 18.18, 18.01, 18.03, 17.87, 18.49, 18.1, 18.43, 18.06, 18.42, 17.93, 18.03, 17.8, 18.13, 18.11] -329.14 -16.457 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.884310483932495, 'TIME_S_1KI': 10.884310483932495, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 825.0359781861306, 'W': 47.81, 'J_1KI': 825.0359781861306, 'W_1KI': 47.81, 'W_D': 31.353, 'J_D': 541.0448237621785, 'W_D_1KI': 31.352999999999998, 'J_D_1KI': 31.352999999999998} +[18.53, 17.91, 17.8, 17.55, 17.82, 18.2, 17.7, 17.96, 22.04, 18.01] +[52.04] +18.148304224014282 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.659594058990479, 'TIME_S_1KI': 10.659594058990479, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 944.4377518177032, 'W': 52.04} +[18.53, 17.91, 17.8, 17.55, 17.82, 18.2, 17.7, 17.96, 22.04, 18.01, 43.17, 47.09, 51.95, 51.61, 51.61, 46.81, 50.13, 50.7, 46.45, 18.78] +592.575 +29.628750000000004 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.659594058990479, 'TIME_S_1KI': 10.659594058990479, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 944.4377518177032, 'W': 52.04, 'J_1KI': 944.4377518177032, 'W_1KI': 52.04, 'W_D': 22.411249999999995, 'J_D': 406.72618304044, 'W_D_1KI': 22.411249999999995, 'J_D_1KI': 22.411249999999995} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.1.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.1.json new file mode 100644 index 0000000..3316ce4 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 26.374675512313843, "TIME_S_1KI": 26.374675512313843, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1870.3769276547432, "W": 52.87, "J_1KI": 1870.3769276547432, "W_1KI": 52.87, "W_D": 36.2005, "J_D": 1280.661622272849, "W_D_1KI": 36.2005, "J_D_1KI": 36.2005} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.1.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.1.output new file mode 100644 index 0000000..fb57672 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.1.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '0.1', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 26.374675512313843} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1071, 2053, ..., 9998024, + 9999000, 10000000]), + col_indices=tensor([ 3, 4, 5, ..., 9980, 9985, 9995]), + values=tensor([0.3665, 0.1961, 0.0802, ..., 0.1951, 0.2808, 0.5332]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.6172, 0.4719, 0.5685, ..., 0.7751, 0.3390, 0.5446]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 26.374675512313843 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1071, 2053, ..., 9998024, + 9999000, 10000000]), + col_indices=tensor([ 3, 4, 5, ..., 9980, 9985, 9995]), + values=tensor([0.3665, 0.1961, 0.0802, ..., 0.1951, 0.2808, 0.5332]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.6172, 0.4719, 0.5685, ..., 0.7751, 0.3390, 0.5446]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 26.374675512313843 seconds + +[18.71, 18.02, 18.12, 18.01, 22.7, 19.43, 18.17, 18.48, 18.54, 18.36] +[52.87] +35.376904249191284 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 26.374675512313843, 'TIME_S_1KI': 26.374675512313843, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1870.3769276547432, 'W': 52.87} +[18.71, 18.02, 18.12, 18.01, 22.7, 19.43, 18.17, 18.48, 18.54, 18.36, 18.41, 17.86, 17.86, 18.01, 17.87, 17.66, 17.87, 17.6, 18.07, 22.76] +333.39 +16.6695 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 26.374675512313843, 'TIME_S_1KI': 26.374675512313843, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1870.3769276547432, 'W': 52.87, 'J_1KI': 1870.3769276547432, 'W_1KI': 52.87, 'W_D': 36.2005, 'J_D': 1280.661622272849, 'W_D_1KI': 36.2005, 'J_D_1KI': 36.2005} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_1e-05.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_1e-05.json index a0720a2..bb313c5 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_1e-05.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_1e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 225343, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.386851072311401, "TIME_S_1KI": 0.04609351553991649, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 661.4945677185059, "W": 46.24, "J_1KI": 2.935500848566434, "W_1KI": 0.20519829770616352, "W_D": 30.072000000000003, "J_D": 430.2003598709107, "W_D_1KI": 0.1334498963801849, "J_D_1KI": 0.0005922078625925141} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 225815, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.501307487487793, "TIME_S_1KI": 0.046504029792032386, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 725.5277880001069, "W": 50.86, "J_1KI": 3.2129300002218932, "W_1KI": 0.22522861634523836, "W_D": 34.5345, "J_D": 492.64135656094555, "W_D_1KI": 0.15293271040453468, "J_D_1KI": 0.0006772477931250567} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_1e-05.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_1e-05.output index 6742334..9f5c396 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_1e-05.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_1e-05.output @@ -1,1131 +1,373 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.06630802154541016} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([6116, 9123, 2230, 4007, 2708, 6506, 8700, 3316, 2761, - 1361, 1958, 5168, 9287, 8535, 3319, 5344, 902, 1975, - 488, 7509, 3585, 6731, 5003, 3621, 9227, 602, 6973, - 8702, 9039, 2485, 4067, 2477, 9061, 2388, 1777, 7081, - 5954, 215, 9598, 6942, 5591, 9010, 7196, 7714, 5337, - 1993, 6247, 2446, 6356, 9820, 7249, 3274, 1887, 2531, - 135, 4906, 4233, 322, 743, 3780, 3850, 995, 6910, - 9173, 1426, 5060, 4803, 1325, 8341, 4031, 7649, 3889, - 2513, 8971, 7759, 3358, 2558, 8091, 7627, 5455, 9323, - 4647, 1893, 5017, 4607, 6431, 7258, 1502, 6846, 4712, - 6760, 477, 7596, 524, 2899, 8608, 9797, 2612, 3584, - 7461, 3936, 7937, 8808, 4443, 6151, 2930, 8931, 2432, - 2320, 4314, 7498, 4175, 5649, 4525, 3428, 2414, 2246, - 8311, 112, 444, 1035, 4057, 4976, 2482, 1046, 9577, - 2837, 2113, 6259, 917, 5852, 6925, 5508, 4578, 5723, - 9556, 8979, 7160, 5267, 3922, 1922, 2596, 9081, 757, - 772, 3039, 9566, 4359, 6540, 8900, 662, 5830, 4448, - 605, 4778, 5765, 2865, 7567, 2462, 7522, 2141, 8074, - 6915, 2553, 6285, 1865, 4856, 9508, 5786, 2622, 772, - 7170, 5197, 3410, 8239, 635, 8957, 2007, 378, 8804, - 324, 4438, 5879, 6394, 7346, 8499, 1491, 6973, 9493, - 6934, 7564, 4608, 611, 1904, 5276, 8435, 5763, 6936, - 7524, 3677, 8083, 5405, 3909, 5138, 3565, 6302, 8355, - 9878, 6658, 8098, 5492, 4667, 3600, 435, 3485, 5110, - 5400, 7331, 3046, 771, 8076, 8253, 925, 6498, 1888, - 6020, 629, 8119, 4089, 6057, 4670, 6181, 4064, 7866, - 5463, 9176, 7650, 1100, 663, 4965, 6767, 7260, 4483, - 8084, 1545, 1791, 5532, 805, 3597, 2559, 1895, 2659, - 9098, 2448, 1014, 4415, 7809, 1273, 2238, 2647, 3696, - 5133, 3262, 8595, 4825, 4418, 8681, 3451, 6551, 4396, - 9283, 5009, 5175, 4601, 5413, 1313, 4805, 9367, 8911, - 7493, 3270, 4398, 5992, 9663, 6315, 5793, 6224, 9291, - 3783, 8917, 9634, 3445, 7019, 2536, 2368, 8219, 3595, - 23, 3502, 2962, 8019, 7473, 393, 190, 1589, 354, - 421, 8045, 1755, 5639, 7761, 5386, 5069, 5542, 8965, - 5927, 3847, 2964, 869, 4371, 2320, 9236, 6638, 1008, - 6453, 2815, 2880, 9144, 8967, 2748, 3389, 389, 3962, - 9143, 4322, 4180, 6736, 4718, 241, 2062, 33, 7546, - 1341, 3003, 357, 5780, 5018, 1298, 8692, 264, 3354, - 5052, 1461, 3543, 2731, 5615, 3803, 4521, 4194, 1495, - 5020, 5937, 7198, 48, 9071, 2680, 527, 4924, 603, - 8901, 7030, 3950, 9444, 1090, 2958, 8064, 9214, 1497, - 6814, 7285, 2474, 3729, 4898, 1679, 9556, 9438, 6495, - 465, 1893, 294, 3214, 8299, 5873, 2230, 5817, 7990, - 2168, 9309, 7987, 8274, 5938, 435, 4649, 3960, 4215, - 1498, 9365, 332, 6793, 4740, 6775, 9445, 2955, 1861, - 5114, 9359, 6453, 1653, 2620, 1677, 9057, 7245, 3148, - 9808, 3603, 7182, 9616, 2668, 6950, 3580, 2228, 9825, - 1975, 8036, 4804, 5680, 4088, 61, 9590, 1512, 881, - 4266, 8720, 4260, 9052, 7548, 3975, 1985, 5354, 9292, - 6028, 4459, 8614, 9302, 7355, 5136, 4232, 794, 3208, - 9008, 5430, 4587, 2688, 536, 5794, 319, 4309, 7870, - 7743, 5154, 9925, 9472, 381, 2331, 5810, 8907, 8351, - 204, 845, 4770, 6471, 6978, 2770, 3097, 912, 1195, - 3427, 9600, 6282, 5328, 1541, 3058, 8533, 2647, 4897, - 3771, 4338, 1308, 4810, 7849, 4548, 3988, 5788, 6866, - 2785, 971, 9156, 7115, 9269, 8400, 811, 7446, 1919, - 7380, 6442, 4826, 5591, 9322, 9800, 5043, 2093, 7573, - 5766, 8810, 551, 6920, 3350, 1995, 899, 7606, 7900, - 5362, 3168, 6232, 3279, 1780, 4131, 7640, 2283, 9115, - 9698, 675, 5864, 4274, 7254, 4409, 1918, 8317, 35, - 3785, 7903, 7315, 8852, 6747, 807, 8576, 8906, 691, - 708, 6138, 6393, 2318, 2878, 2137, 7541, 3877, 1155, - 3556, 2641, 6169, 302, 8956, 5326, 6536, 5200, 412, - 6163, 7006, 3525, 2868, 5384, 6923, 3304, 6397, 2096, - 5354, 9686, 8274, 6558, 6562, 390, 1816, 3737, 906, - 4664, 2719, 5710, 310, 8612, 9508, 9122, 9007, 9401, - 1823, 9881, 6071, 796, 9171, 8620, 4054, 9568, 7418, - 1371, 7178, 7465, 5873, 8086, 1945, 2932, 4795, 4874, - 4361, 1566, 1859, 6801, 889, 1530, 8341, 526, 5690, - 993, 7020, 4621, 254, 6955, 8349, 4162, 2379, 7334, - 5526, 2880, 6973, 3255, 9449, 6690, 9887, 2367, 9592, - 2579, 4989, 210, 1033, 646, 1002, 8725, 9305, 5186, - 4944, 2755, 4073, 8430, 314, 1754, 580, 3005, 8962, - 1021, 7176, 9239, 8170, 6447, 8806, 6643, 9503, 507, - 8012, 9409, 4496, 8897, 6044, 6733, 6611, 1828, 9900, - 3040, 1872, 1549, 3695, 7165, 2451, 6269, 9445, 8195, - 2903, 9122, 4531, 2056, 8807, 3151, 2547, 1657, 2273, - 1590, 4281, 6979, 1159, 3231, 8829, 5167, 4499, 5400, - 7648, 5702, 2986, 5866, 7356, 1464, 7970, 6150, 9827, - 2152, 1626, 1304, 9381, 4432, 1369, 7126, 1255, 5108, - 4506, 234, 4239, 132, 3138, 9487, 6305, 1172, 9024, - 4948, 2728, 1129, 7172, 1126, 5029, 2929, 1979, 8307, - 8506, 9558, 5890, 4859, 8500, 2366, 9475, 5070, 9045, - 2751, 233, 5241, 3389, 3674, 4678, 5172, 9659, 400, - 1980, 5834, 8329, 7689, 2697, 3046, 7936, 6546, 7332, - 6777, 5367, 9296, 5637, 7409, 5271, 5843, 1628, 38, - 1744, 5001, 1131, 801, 8117, 3676, 785, 4693, 5487, - 6285, 1190, 3167, 8654, 8822, 2339, 47, 2664, 6031, - 8050, 6869, 9573, 6678, 1322, 9064, 6480, 3937, 6511, - 6143, 8334, 9131, 2370, 8996, 1669, 5226, 9766, 1824, - 3992, 5018, 23, 8217, 1028, 6132, 9099, 304, 1850, - 6441, 8232, 6512, 4372, 1929, 5266, 8857, 264, 9346, - 4432, 5493, 121, 8839, 2095, 7843, 7190, 4607, 4890, - 3219, 1710, 9271, 3380, 6743, 9430, 1641, 6592, 2928, - 5995, 4066, 1771, 1180, 600, 6746, 9407, 4354, 8033, - 9794, 1271, 2958, 5326, 2059, 2853, 5833, 5370, 4952, - 8664, 8669, 8180, 9495, 6108, 666, 6438, 881, 440, - 5024, 9721, 7337, 2980, 132, 1124, 6697, 3531, 1354, - 981, 689, 6023, 2054, 5122, 7310, 3624, 6808, 8340, - 8753, 3317, 9169, 9486, 7113, 3170, 2177, 6450, 1559, - 6842, 9080, 3467, 5262, 2559, 9529, 7907, 6164, 3026, - 7921, 3217, 9438, 3022, 6316, 342, 4475, 8175, 7752, - 9356, 7486, 8041, 991, 9993, 4182, 6884, 6225, 1247, - 9300, 5234, 1257, 2288, 5243, 6258, 6794, 4713, 2122, - 8191, 769, 8990, 1254, 2344, 141, 5772, 6214, 9001, - 3153, 6464, 2996, 5805, 9634, 1592, 5527, 2658, 1883, - 8395, 7135, 2541, 5087, 9538, 3540, 7749, 7973, 831, - 4913, 2841, 4647, 9817, 2556, 9375, 853, 5124, 5509, - 2704, 840, 4367, 9711, 9079, 1260, 2057, 1725, 949, - 96, 5237, 1185, 9212, 8567, 4357, 1257, 1227, 4061, - 8093]), - values=tensor([7.6292e-02, 1.3733e-01, 8.6340e-01, 9.4424e-01, - 1.9235e-01, 5.6371e-01, 7.5005e-01, 9.6035e-01, - 5.4652e-01, 7.2704e-01, 2.0061e-01, 3.8431e-01, - 4.9008e-01, 2.3402e-01, 9.6473e-01, 4.0271e-01, - 3.9719e-01, 1.8499e-01, 5.7396e-01, 3.4007e-01, - 8.9125e-01, 4.8184e-02, 9.6291e-01, 8.7623e-01, - 8.1434e-01, 9.1212e-01, 8.3738e-01, 7.4692e-01, - 8.6232e-01, 5.9658e-01, 3.8390e-01, 1.2173e-01, - 1.6088e-01, 5.3462e-01, 4.0381e-01, 5.1533e-01, - 6.9512e-01, 3.4221e-01, 8.2979e-01, 7.4057e-01, - 4.4480e-01, 4.8069e-01, 1.1078e-01, 4.3458e-01, - 4.4134e-01, 1.6455e-02, 3.6857e-01, 6.8906e-01, - 8.1974e-01, 2.2236e-01, 4.7233e-01, 7.6053e-01, - 9.2475e-02, 1.6473e-02, 6.1086e-01, 6.6880e-01, - 5.2316e-01, 4.7895e-01, 1.6209e-01, 6.0722e-01, - 2.1034e-01, 1.8663e-01, 1.8910e-01, 5.1840e-01, - 7.8493e-01, 5.5073e-01, 2.6377e-02, 6.9171e-01, - 2.8827e-01, 9.3324e-01, 6.2362e-01, 8.2941e-01, - 3.2976e-01, 9.4037e-01, 7.1596e-01, 9.2905e-01, - 9.2469e-01, 2.9364e-01, 1.6241e-01, 8.9817e-01, - 6.3859e-01, 9.0965e-01, 3.3103e-02, 9.0378e-01, - 6.2088e-01, 6.7137e-01, 8.8445e-01, 1.3306e-01, - 8.3919e-01, 7.1363e-01, 3.6161e-01, 4.1206e-01, - 4.2193e-02, 6.4785e-01, 8.6616e-01, 8.2678e-01, - 7.9753e-01, 6.5357e-01, 3.6461e-02, 2.4495e-01, - 1.9720e-01, 2.2564e-01, 8.0147e-01, 9.3425e-01, - 4.4951e-01, 6.8110e-01, 1.3599e-01, 5.7298e-01, - 9.3012e-01, 7.4797e-01, 5.9241e-01, 3.2273e-01, - 5.9752e-01, 9.2767e-01, 5.1155e-01, 7.1364e-01, - 1.2931e-01, 2.8869e-01, 2.1716e-01, 4.0307e-02, - 7.1452e-01, 5.4570e-02, 8.2235e-01, 9.9671e-01, - 7.4898e-01, 9.3414e-01, 5.8253e-01, 7.9204e-01, - 8.2828e-01, 4.4751e-01, 8.7535e-01, 2.2996e-01, - 4.7605e-01, 2.9067e-01, 5.8135e-01, 6.2743e-01, - 1.0374e-02, 7.8085e-01, 2.8711e-01, 1.9268e-01, - 4.2163e-01, 9.3098e-04, 9.6071e-01, 4.5789e-01, - 3.0781e-01, 7.5996e-02, 7.1203e-01, 4.6133e-01, - 1.9179e-01, 9.2485e-01, 6.0405e-01, 3.2170e-01, - 1.8621e-01, 7.8704e-01, 2.0949e-01, 7.9217e-01, - 1.6256e-01, 5.6096e-01, 2.4176e-01, 1.9077e-01, - 4.5650e-01, 6.8304e-01, 6.8049e-01, 4.1863e-01, - 5.2766e-01, 7.0753e-01, 3.9208e-01, 7.1875e-01, - 5.1901e-01, 7.6835e-01, 7.9472e-01, 5.7001e-01, - 5.4355e-01, 2.3401e-01, 4.2292e-01, 2.7282e-01, - 1.5011e-01, 3.6825e-01, 8.5572e-01, 4.2280e-01, - 1.5819e-01, 3.0753e-01, 7.9383e-01, 9.9868e-01, - 5.9618e-01, 8.2913e-01, 5.4785e-01, 7.7623e-01, - 3.7890e-02, 6.8649e-01, 6.5935e-01, 6.6648e-01, - 2.8165e-01, 5.0125e-01, 8.1625e-02, 2.7851e-01, - 7.3229e-01, 5.8045e-01, 7.8022e-01, 6.5332e-01, - 4.9722e-01, 8.5537e-01, 7.9195e-01, 8.2148e-01, - 2.7819e-01, 9.8535e-01, 6.7735e-01, 6.6299e-01, - 9.7569e-01, 2.6440e-01, 2.8812e-01, 3.7260e-01, - 6.1383e-02, 9.8745e-01, 8.9501e-01, 7.9012e-01, - 3.0046e-03, 6.9384e-01, 9.1280e-01, 6.9629e-02, - 2.7866e-01, 6.0304e-01, 2.2870e-01, 5.2804e-02, - 8.0674e-01, 3.2916e-01, 9.5116e-01, 1.3652e-01, - 5.8970e-01, 2.3871e-01, 9.0168e-01, 2.6913e-01, - 6.6687e-01, 1.9441e-01, 2.1488e-01, 7.4845e-01, - 1.1506e-01, 7.1025e-01, 1.3343e-01, 4.3147e-01, - 7.3254e-01, 9.6925e-01, 5.0310e-01, 4.0862e-01, - 2.7675e-02, 6.2677e-01, 3.0785e-02, 8.7414e-01, - 2.1732e-01, 8.6132e-01, 8.5208e-01, 8.6776e-01, - 9.5881e-01, 3.4451e-01, 7.4389e-01, 1.7165e-02, - 6.1416e-02, 3.1576e-01, 7.1681e-02, 1.3700e-03, - 5.4638e-01, 3.8174e-02, 3.4709e-01, 1.4305e-01, - 7.7577e-01, 1.5800e-01, 2.5315e-01, 1.7669e-01, - 3.0251e-01, 9.2423e-01, 1.4954e-01, 4.5657e-01, - 2.2978e-02, 3.6579e-01, 1.3830e-01, 3.1186e-01, - 6.5761e-01, 1.7644e-01, 8.1005e-01, 7.9112e-01, - 7.6500e-01, 5.0518e-02, 9.8594e-01, 9.3822e-01, - 7.8622e-01, 2.1085e-01, 5.5649e-01, 7.6701e-01, - 8.6285e-01, 1.8183e-01, 3.2740e-02, 4.8134e-01, - 3.3495e-01, 4.9922e-02, 6.1406e-02, 7.9639e-01, - 9.6820e-02, 1.9509e-01, 7.9436e-01, 3.4949e-01, - 5.5507e-01, 9.3760e-02, 8.4082e-01, 7.4911e-02, - 2.0244e-01, 8.4158e-01, 4.5437e-01, 4.6200e-01, - 7.8281e-01, 5.6430e-01, 2.1152e-01, 1.1629e-01, - 1.9897e-01, 3.1425e-01, 3.5026e-01, 5.1941e-01, - 9.8623e-01, 8.0512e-01, 3.4513e-02, 6.7857e-01, - 4.1816e-01, 7.6297e-01, 9.2547e-01, 2.6104e-01, - 8.1682e-01, 4.3099e-01, 5.8041e-01, 3.4073e-02, - 5.2862e-01, 5.1975e-02, 4.2264e-01, 1.1665e-01, - 3.8810e-01, 4.1540e-01, 6.7858e-01, 2.2419e-01, - 4.2870e-01, 3.3181e-01, 7.8883e-02, 8.3469e-01, - 1.0084e-01, 5.1010e-01, 3.6026e-01, 5.8228e-01, - 5.6810e-01, 2.8248e-01, 5.0647e-01, 4.0258e-01, - 4.2743e-01, 8.4632e-01, 5.3554e-01, 6.5090e-01, - 3.5715e-01, 9.7601e-01, 3.3872e-01, 1.5496e-01, - 8.3006e-01, 5.0418e-01, 6.9200e-01, 8.0583e-01, - 1.9650e-01, 8.8935e-01, 8.0422e-02, 6.9236e-01, - 4.5778e-01, 1.3102e-01, 1.8904e-01, 1.5958e-01, - 3.8435e-01, 2.6345e-01, 3.8901e-01, 1.8237e-01, - 8.3207e-01, 7.2942e-01, 3.4569e-01, 6.7292e-01, - 7.6331e-01, 8.0788e-01, 1.0877e-02, 8.1853e-01, - 8.3246e-01, 3.6368e-01, 8.0749e-01, 6.7944e-01, - 9.3962e-01, 7.1481e-01, 8.2467e-01, 2.3720e-01, - 2.7542e-02, 6.8095e-01, 1.0368e-01, 6.7194e-01, - 1.0479e-01, 7.9165e-01, 2.1297e-02, 2.0730e-02, - 3.8141e-01, 9.6166e-01, 8.6930e-01, 4.2073e-01, - 2.5932e-01, 2.3961e-01, 9.3855e-01, 1.2536e-01, - 9.9198e-01, 5.0149e-01, 1.4459e-01, 3.6921e-01, - 9.4060e-01, 6.9765e-01, 6.5926e-01, 9.4826e-02, - 1.4297e-01, 9.7199e-01, 9.2023e-01, 8.9138e-01, - 3.7324e-01, 1.0321e-01, 1.9921e-01, 8.3564e-01, - 2.4986e-01, 4.9795e-01, 2.8622e-02, 1.7386e-01, - 1.4263e-01, 8.9473e-01, 6.6143e-01, 3.1516e-01, - 1.7354e-01, 5.6727e-01, 7.7193e-01, 3.5273e-01, - 1.8512e-01, 4.1658e-01, 5.3853e-01, 8.3578e-01, - 6.3036e-01, 9.6190e-01, 6.8052e-01, 4.9802e-01, - 9.9218e-01, 9.3350e-01, 4.9292e-01, 7.9521e-01, - 6.8003e-01, 8.8502e-02, 5.6164e-01, 5.8685e-01, - 2.2649e-01, 4.7049e-01, 8.8696e-01, 5.7076e-01, - 8.7441e-01, 7.3233e-01, 4.1924e-01, 8.2306e-01, - 8.0621e-01, 1.1291e-01, 3.6505e-01, 8.5140e-01, - 6.1966e-01, 4.3601e-01, 4.7155e-01, 7.7953e-01, - 2.1177e-02, 9.7688e-02, 6.5088e-01, 4.5174e-01, - 4.5730e-01, 3.5247e-01, 5.3817e-01, 1.8409e-01, - 4.3721e-01, 3.6312e-01, 1.8724e-01, 4.5425e-01, - 3.5614e-01, 3.7101e-01, 4.7265e-01, 6.3708e-01, - 1.3279e-01, 2.9473e-01, 4.6383e-01, 5.4977e-01, - 7.3313e-01, 3.2890e-01, 2.0081e-01, 5.9486e-01, - 8.9168e-01, 8.4497e-01, 2.3011e-01, 5.5048e-01, - 8.6752e-02, 1.3523e-01, 3.5439e-01, 7.4537e-01, - 3.6407e-01, 7.5417e-01, 8.9886e-01, 9.3496e-01, - 4.7596e-01, 2.8324e-01, 3.0609e-01, 7.2095e-01, - 9.8370e-01, 8.3011e-01, 4.0133e-01, 1.6791e-01, - 1.1896e-01, 4.7232e-02, 8.1253e-01, 4.1000e-01, - 6.4639e-01, 9.0279e-01, 6.6295e-02, 9.1535e-01, - 7.5554e-01, 8.0943e-01, 1.2045e-02, 2.3788e-01, - 2.7486e-01, 1.8747e-01, 7.5191e-02, 7.4175e-01, - 8.0994e-01, 1.0639e-01, 1.2756e-01, 5.2657e-01, - 8.7900e-01, 7.2713e-01, 7.1680e-01, 3.5256e-01, - 2.8073e-01, 5.4246e-01, 9.2780e-01, 4.8756e-01, - 4.6708e-01, 3.5820e-01, 1.7134e-02, 8.1107e-01, - 6.3185e-01, 4.7038e-01, 9.7016e-01, 3.2185e-01, - 4.5108e-01, 6.8084e-01, 2.2407e-01, 2.0779e-01, - 1.4545e-01, 1.3683e-01, 9.1549e-01, 9.4544e-01, - 2.0136e-01, 9.8733e-01, 8.2640e-01, 2.6373e-01, - 2.2831e-01, 4.1784e-01, 4.8973e-02, 7.6857e-03, - 5.6025e-01, 3.4747e-01, 5.2796e-02, 2.0327e-01, - 3.2370e-01, 8.2799e-01, 2.0355e-03, 9.6161e-01, - 2.7352e-01, 3.2839e-01, 3.4822e-01, 3.7765e-02, - 4.8491e-01, 5.5830e-01, 3.9370e-01, 6.4758e-01, - 1.0243e-01, 1.9377e-01, 1.2500e-01, 8.4209e-01, - 5.3488e-01, 5.7637e-02, 7.5094e-01, 3.8499e-01, - 6.2593e-01, 8.2551e-01, 9.6206e-02, 9.0218e-01, - 4.3345e-01, 3.5948e-01, 4.2098e-01, 8.6707e-01, - 1.8134e-01, 3.3927e-01, 7.3745e-01, 4.5590e-01, - 6.7830e-01, 6.4065e-01, 9.7528e-01, 4.9618e-01, - 8.9165e-01, 3.4304e-02, 8.0689e-02, 6.2198e-01, - 9.2500e-01, 5.7231e-01, 5.4601e-01, 7.1177e-01, - 7.7343e-02, 7.5183e-01, 3.6338e-01, 2.9592e-01, - 8.9967e-01, 8.1056e-01, 9.8945e-01, 1.1409e-01, - 6.8590e-01, 4.2383e-01, 2.3391e-01, 4.0650e-01, - 4.4290e-01, 3.1725e-02, 4.4467e-01, 5.8922e-01, - 1.4654e-01, 2.2579e-01, 6.2783e-01, 7.7082e-02, - 3.2716e-01, 5.5930e-01, 1.1019e-01, 6.9282e-01, - 5.5649e-01, 5.8669e-01, 7.4220e-01, 5.8864e-01, - 6.0411e-01, 7.6133e-01, 6.2205e-01, 6.2570e-01, - 7.2584e-01, 5.1946e-01, 5.6949e-01, 7.4947e-01, - 2.5956e-01, 4.2420e-01, 5.3398e-01, 5.2985e-02, - 2.9698e-01, 9.7598e-01, 2.2000e-01, 2.1776e-01, - 8.9574e-01, 2.7824e-01, 4.6983e-01, 1.9870e-01, - 4.3874e-01, 3.9832e-01, 3.2144e-01, 7.9946e-01, - 2.7243e-01, 6.0530e-01, 4.2165e-01, 2.7238e-01, - 8.9068e-02, 5.5144e-01, 1.2004e-01, 7.9008e-01, - 5.7050e-01, 5.5040e-01, 6.4786e-01, 5.9082e-01, - 2.8414e-01, 5.8125e-01, 4.9365e-01, 8.0514e-01, - 5.9486e-01, 3.8399e-01, 1.3782e-01, 1.0333e-01, - 6.7468e-01, 2.0844e-01, 8.8832e-01, 1.9593e-01, - 6.2889e-01, 3.3247e-01, 2.1187e-01, 1.0866e-01, - 2.0868e-01, 2.9680e-02, 4.5547e-01, 8.9422e-01, - 3.9077e-01, 9.9746e-02, 6.2239e-01, 2.3448e-02, - 2.4295e-01, 8.7667e-02, 5.8614e-01, 8.2636e-01, - 5.1072e-01, 7.6563e-01, 3.5627e-01, 6.3236e-01, - 3.7013e-01, 4.1772e-01, 7.5071e-01, 8.5104e-01, - 8.4592e-01, 8.7538e-01, 2.2850e-01, 2.1287e-01, - 4.2400e-01, 8.6322e-01, 7.6443e-01, 2.5537e-01, - 3.8058e-02, 2.5064e-01, 3.0419e-01, 2.4691e-01, - 7.9434e-01, 5.8579e-02, 3.7704e-01, 2.7202e-01, - 3.3513e-01, 7.6560e-01, 3.1665e-01, 9.0649e-01, - 1.2467e-01, 6.1186e-01, 7.5907e-01, 7.2652e-01, - 6.4693e-01, 3.8269e-01, 7.9164e-01, 9.1999e-01, - 1.9315e-01, 1.7714e-01, 9.3746e-01, 3.9480e-02, - 8.9898e-01, 2.2585e-01, 2.7630e-01, 9.8067e-01, - 5.0769e-01, 3.5560e-01, 4.0877e-01, 8.3725e-01, - 9.4753e-01, 3.9868e-02, 2.2712e-01, 6.4876e-01, - 2.0055e-01, 7.8593e-01, 9.6043e-01, 6.8935e-01, - 4.1430e-01, 6.7393e-01, 7.4050e-02, 6.2638e-02, - 6.5713e-01, 4.9647e-01, 1.3031e-01, 7.4834e-01, - 1.5344e-01, 2.4760e-01, 4.2542e-01, 7.5322e-01, - 6.8415e-01, 4.7731e-01, 4.6063e-02, 7.8762e-01, - 9.8819e-01, 1.5537e-02, 2.0399e-01, 8.6580e-01, - 3.8878e-01, 4.1408e-01, 7.1853e-01, 5.4475e-01, - 1.1841e-01, 9.9366e-02, 5.5889e-01, 3.4656e-01, - 4.7116e-01, 7.6149e-01, 6.8969e-01, 8.6509e-01, - 4.7826e-01, 3.1348e-01, 1.3528e-02, 9.7726e-01, - 8.7322e-01, 2.1379e-01, 1.1895e-01, 9.4370e-01, - 8.8868e-01, 5.1800e-02, 1.4262e-01, 9.4395e-03, - 6.6922e-01, 1.2772e-01, 8.8232e-01, 1.9197e-01, - 3.3807e-01, 3.2444e-01, 4.3646e-01, 4.0366e-01, - 5.0795e-01, 5.1602e-01, 2.6266e-01, 8.6772e-01, - 3.1398e-03, 4.6534e-01, 6.6573e-01, 8.1341e-01, - 8.4108e-01, 8.7572e-01, 3.6295e-01, 1.5345e-01, - 6.9330e-01, 3.8809e-01, 8.3208e-01, 4.0353e-02, - 2.4857e-01, 9.1322e-01, 6.2572e-01, 4.6669e-01, - 9.4864e-01, 1.0186e-01, 4.3731e-01, 9.0551e-01, - 4.8981e-01, 6.3189e-01, 5.3961e-01, 1.7844e-01, - 6.5284e-01, 4.2512e-01, 1.6321e-01, 5.7377e-01, - 2.5314e-02, 5.3958e-01, 4.9314e-01, 1.8401e-01, - 2.5260e-01, 7.6402e-01, 3.6973e-01, 2.2137e-01, - 1.0628e-01, 1.0192e-01, 9.5441e-01, 9.2272e-01, - 5.2896e-01, 5.5054e-01, 8.6129e-01, 6.3769e-01, - 5.1177e-01, 7.6008e-01, 3.0811e-01, 7.8965e-01, - 2.3055e-01, 1.2552e-01, 1.6684e-01, 9.5996e-02, - 9.7730e-01, 7.6621e-01, 2.6800e-01, 5.5650e-01, - 3.5195e-01, 9.4167e-01, 2.7102e-01, 9.3503e-01, - 6.2169e-01, 1.2476e-01, 1.0551e-01, 8.2415e-01, - 1.4182e-01, 7.2223e-01, 4.8949e-01, 7.9973e-01, - 5.6231e-01, 4.6153e-02, 4.4980e-02, 7.4129e-01, - 5.3866e-01, 4.8765e-01, 2.4167e-01, 2.0624e-01, - 9.9122e-01, 2.5684e-02, 6.2336e-01, 7.9183e-01, - 6.1223e-01, 5.1773e-01, 4.3163e-01, 1.9391e-01, - 4.6406e-01, 5.6346e-01, 2.6337e-01, 2.5415e-01, - 3.7639e-02, 2.6401e-01, 3.7948e-01, 1.8675e-01, - 9.4166e-02, 9.4510e-01, 4.5571e-02, 6.7933e-01, - 3.6024e-01, 4.7940e-01, 9.1696e-01, 7.1344e-01, - 2.2195e-01, 5.6368e-01, 8.2792e-01, 6.7382e-01, - 9.6245e-01, 3.8226e-01, 7.2001e-01, 8.0994e-01, - 2.4846e-01, 7.3084e-01, 1.4812e-01, 9.5053e-01, - 3.1924e-01, 4.2193e-01, 8.5927e-01, 6.3362e-01, - 7.5842e-02, 1.6576e-01, 1.6158e-01, 7.8704e-01, - 9.9236e-01, 8.8717e-01, 8.8558e-02, 9.1824e-01, - 5.4881e-01, 9.6073e-01, 6.8978e-01, 5.6150e-01, - 5.5971e-02, 1.2710e-01, 3.7060e-01, 8.4473e-01, - 8.7565e-01, 6.8438e-01, 1.2531e-01, 8.4563e-01, - 5.0923e-01, 8.0637e-01, 4.8932e-01, 6.3520e-01, - 6.7736e-02, 4.1724e-01, 9.5857e-01, 6.6929e-01, - 7.5370e-01, 7.1422e-01, 5.5127e-01, 7.0222e-02, - 6.1092e-03, 6.9753e-01, 7.7642e-01, 1.3287e-01, - 6.9913e-02, 4.3096e-01, 9.1632e-01, 3.9274e-01, - 1.5659e-02, 7.8518e-01, 6.1763e-01, 6.1145e-01, - 3.1190e-01, 5.7320e-02, 5.9041e-01, 1.3355e-01, - 3.5387e-01, 4.2908e-01, 8.7031e-01, 2.4563e-01, - 9.5923e-01, 8.7749e-01, 4.2582e-01, 2.2163e-01, - 9.4781e-01, 5.1842e-01, 5.3461e-01, 3.6847e-01, - 6.4925e-01, 8.7326e-01, 9.1968e-01, 9.8020e-01, - 9.5646e-01, 9.5035e-01, 3.2753e-02, 6.7257e-01, - 2.0325e-01, 6.5615e-01, 3.5141e-01, 5.3907e-01]), - size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.5197, 0.1343, 0.9407, ..., 0.1023, 0.5237, 0.0220]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 1000 -Density: 1e-05 -Time: 0.06630802154541016 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '158351', '-ss', '10000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 7.378459692001343} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([3030, 59, 6550, 5129, 1609, 5117, 7383, 9965, 3582, - 3502, 6345, 6436, 6545, 1264, 2983, 1876, 8807, 2513, - 1370, 9845, 3410, 1788, 9280, 9662, 1023, 5271, 4153, - 4966, 4311, 2499, 1351, 830, 9219, 80, 3996, 2842, - 7114, 2706, 7320, 1382, 918, 2923, 9877, 4768, 3727, - 9013, 967, 4451, 7441, 5152, 1538, 6863, 4268, 9001, - 4281, 9503, 6429, 8410, 3672, 4516, 1695, 339, 7612, - 3853, 503, 5817, 6729, 1224, 5432, 764, 7789, 9927, - 4207, 9375, 7672, 5553, 2923, 8869, 3033, 248, 9790, - 3596, 455, 6400, 8397, 9560, 6512, 4381, 185, 4100, - 9584, 4048, 7087, 5738, 4019, 9557, 6085, 6728, 6633, - 101, 4333, 6438, 6166, 4315, 8383, 4034, 9750, 3066, - 2471, 5789, 4395, 2815, 7182, 6690, 2540, 8742, 1904, - 5243, 4296, 5959, 4343, 4260, 4115, 6532, 9325, 6153, - 9591, 8540, 3207, 1585, 6761, 3379, 3260, 9088, 3717, - 685, 5872, 8610, 8260, 5683, 3038, 6214, 9531, 2340, - 9865, 9337, 5977, 4107, 6434, 2342, 3715, 9621, 3053, - 4316, 7097, 1902, 3448, 5792, 1219, 759, 2509, 8648, - 9649, 4677, 2456, 614, 5611, 9487, 8106, 7, 1745, - 2835, 9097, 1727, 8069, 784, 7118, 7010, 2729, 3409, - 5685, 4035, 6333, 2490, 6148, 6575, 1922, 1160, 1257, - 9009, 3350, 5822, 3312, 2407, 6575, 4614, 4540, 1789, - 1661, 4097, 2425, 4228, 4887, 9365, 8894, 7648, 2017, - 5068, 7509, 823, 195, 821, 2531, 6022, 3985, 3280, - 7691, 2509, 4294, 6786, 1816, 7402, 5559, 3397, 5943, - 858, 6125, 9032, 8306, 7874, 2186, 6459, 9673, 1977, - 16, 2819, 1027, 5589, 2216, 6997, 8074, 671, 9956, - 2431, 1532, 4240, 452, 8355, 8520, 5044, 6356, 9717, - 1142, 9795, 3277, 1923, 5307, 9303, 6719, 8279, 7511, - 1335, 4212, 3277, 6006, 1726, 4642, 9122, 5487, 2829, - 9002, 5207, 4895, 1683, 4291, 8421, 4130, 6353, 1377, - 5906, 1897, 6611, 4080, 1407, 585, 8107, 8247, 5543, - 2328, 3484, 7395, 3540, 1966, 6816, 5563, 3652, 7319, - 4142, 6363, 8838, 503, 2602, 2880, 4026, 152, 9796, - 1747, 3128, 5142, 5791, 7601, 196, 1981, 8139, 2949, - 4909, 1975, 4395, 858, 8238, 724, 3667, 9865, 3855, - 138, 5368, 6194, 9181, 8967, 8549, 3812, 2142, 4423, - 5959, 8082, 6957, 8140, 8868, 8523, 9166, 4265, 5420, - 4181, 8242, 1122, 9352, 1314, 1217, 5467, 2336, 897, - 1594, 3805, 7775, 6292, 8815, 492, 2178, 6932, 7231, - 5464, 8924, 4607, 9274, 7105, 5147, 8488, 9592, 1521, - 5225, 9395, 9857, 8855, 5329, 7366, 3988, 199, 8135, - 3842, 6474, 7231, 7178, 1580, 4649, 1813, 3718, 5061, - 7663, 248, 5825, 6001, 8654, 6399, 3504, 8170, 4599, - 1078, 9618, 2131, 5690, 1645, 262, 680, 1277, 6857, - 3961, 721, 2643, 2321, 6588, 9579, 4261, 1614, 5705, - 1191, 5048, 2942, 1583, 3120, 5053, 6565, 3767, 7025, - 1100, 4355, 6361, 541, 2760, 3441, 5809, 5135, 2105, - 7594, 3729, 666, 5685, 7057, 1778, 6063, 4639, 2534, - 3964, 3187, 5975, 5483, 2504, 9370, 7508, 6786, 929, - 5644, 5683, 1086, 8478, 6786, 9380, 8635, 7017, 7238, - 7467, 7979, 8925, 9783, 9232, 7359, 7623, 2270, 971, - 4750, 9371, 718, 2696, 6998, 8080, 3912, 5282, 3940, - 8588, 2946, 619, 1510, 2099, 3551, 4960, 9524, 3432, - 1593, 1068, 4649, 555, 1068, 1689, 4514, 7841, 1140, - 7463, 9725, 181, 9549, 3768, 6604, 6287, 127, 392, - 4432, 4768, 1077, 642, 1198, 5507, 5343, 5457, 4768, - 5963, 9306, 148, 251, 5680, 4486, 4565, 9802, 6833, - 397, 5887, 7332, 2043, 5186, 3750, 476, 9438, 655, - 3787, 9911, 7128, 1751, 9121, 5496, 4143, 8965, 6945, - 6512, 7635, 7, 1867, 9006, 6414, 9482, 4176, 8995, - 9634, 4222, 1135, 6863, 6145, 352, 8959, 7504, 3779, - 5162, 7551, 4978, 9974, 9515, 244, 1235, 3633, 8051, - 4975, 4624, 6742, 6585, 9093, 2627, 3173, 800, 3970, - 2546, 4565, 9219, 1267, 5979, 1366, 4034, 2123, 4826, - 2993, 1756, 3418, 7034, 2404, 9484, 1274, 8343, 8493, - 7313, 7383, 5797, 3324, 8794, 2261, 4095, 5386, 7256, - 1553, 2453, 7794, 2425, 1378, 923, 8521, 7054, 3251, - 9944, 222, 6060, 2597, 841, 5029, 241, 2448, 7153, - 1754, 9192, 3251, 9003, 4258, 965, 5487, 9877, 1407, - 5857, 5577, 4873, 5167, 1070, 2192, 8383, 5800, 5115, - 1550, 8430, 7929, 2829, 476, 6383, 7041, 9817, 944, - 1938, 1781, 6844, 4754, 7495, 5752, 245, 3045, 513, - 6041, 2578, 7841, 9961, 6617, 6588, 2348, 3896, 3814, - 6306, 4128, 2769, 8400, 1921, 7813, 7570, 1902, 7530, - 7840, 4843, 4855, 4851, 7007, 7884, 204, 4953, 5658, - 6148, 7130, 2138, 6793, 341, 4971, 1598, 8475, 2999, - 3160, 6501, 9297, 2442, 4932, 3812, 5473, 1295, 7195, - 7968, 9486, 6434, 5443, 9007, 3854, 3015, 7589, 2580, - 2496, 3065, 3343, 7236, 6924, 8826, 6177, 4552, 5816, - 2114, 1485, 144, 354, 3335, 7377, 5343, 2440, 8287, - 2294, 5366, 4663, 7797, 3151, 9315, 6355, 4810, 114, - 8681, 6794, 3524, 2601, 3956, 8045, 7908, 8928, 5559, - 1902, 5390, 9595, 6448, 6949, 5791, 9737, 4440, 5290, - 4816, 4084, 7437, 2734, 1612, 1655, 4908, 3686, 665, - 9602, 8235, 5605, 6907, 4228, 5876, 4913, 8307, 9150, - 8639, 7967, 7381, 6505, 2366, 4699, 2784, 68, 8896, - 4247, 8241, 1203, 2390, 3625, 746, 9101, 2998, 5038, - 5677, 2395, 2134, 940, 5232, 6468, 8303, 7362, 8939, - 5432, 2488, 6765, 9603, 3903, 3668, 4199, 2888, 6841, - 9474, 8211, 8523, 9117, 9034, 7995, 5270, 6466, 766, - 1434, 2417, 1016, 9405, 818, 5517, 3352, 7580, 261, - 3658, 6492, 3168, 5058, 3775, 1324, 8648, 6816, 2412, - 7397, 3598, 5056, 3440, 5958, 8049, 1771, 111, 4598, - 8679, 5370, 8364, 1742, 6048, 1505, 4355, 6804, 184, - 1681, 6781, 2514, 4329, 6441, 5616, 6551, 8936, 9061, - 2902, 3224, 1250, 1972, 6436, 1607, 2412, 4057, 265, - 2370, 7233, 8691, 7897, 796, 7944, 5701, 7304, 2458, - 2347, 2242, 1766, 6981, 8197, 4263, 8384, 6440, 5315, - 6798, 5859, 2232, 8383, 5407, 9442, 8356, 5302, 387, - 3876, 410, 1304, 1145, 4955, 1705, 7354, 9892, 1845, - 1442, 8952, 3360, 3650, 7087, 7119, 9959, 405, 4108, - 308, 5986, 1954, 3993, 824, 7444, 9450, 1888, 6858, - 3619, 8253, 5493, 5506, 2727, 7819, 3917, 3109, 5033, - 8416, 9067, 4550, 6890, 4862, 8772, 3900, 4998, 7497, - 3397, 8403, 7971, 483, 7901, 1585, 4652, 4984, 2301, - 863, 9175, 8905, 6187, 1485, 7549, 8289, 6156, 7343, - 4622, 6092, 9744, 2017, 1732, 2871, 8274, 926, 6781, - 1559, 8975, 1878, 8600, 943, 6048, 2064, 1328, 3991, - 6711, 852, 9217, 4189, 8860, 6699, 7281, 6991, 4911, - 7264, 8380, 3513, 1421, 9636, 6147, 9679, 2654, 4596, - 5819]), - values=tensor([8.5002e-01, 4.0213e-02, 9.1253e-01, 2.2431e-01, - 7.0130e-01, 1.0872e-01, 5.0717e-01, 8.6787e-01, - 3.8797e-01, 8.5474e-02, 2.2217e-02, 5.1779e-01, - 9.7056e-01, 7.3706e-02, 7.3188e-01, 7.7849e-01, - 8.2346e-01, 2.4260e-01, 7.5024e-02, 8.0911e-01, - 8.7757e-01, 6.7674e-01, 7.9534e-01, 3.9115e-02, - 6.6762e-01, 2.2564e-01, 8.6394e-01, 9.0823e-01, - 7.3267e-01, 8.0430e-01, 6.6621e-01, 8.4621e-01, - 2.5323e-01, 6.1414e-01, 2.3294e-01, 8.7489e-01, - 8.5234e-01, 6.0473e-01, 5.9725e-01, 9.5903e-01, - 5.3677e-01, 5.3216e-01, 7.3269e-01, 6.4280e-01, - 1.5468e-01, 6.6137e-01, 6.3899e-01, 3.2078e-01, - 3.6882e-01, 4.9352e-01, 9.6426e-01, 5.8507e-01, - 1.8324e-01, 1.1925e-01, 4.5652e-01, 1.0293e-01, - 4.3238e-01, 2.9597e-01, 3.8101e-01, 1.5863e-01, - 7.3804e-01, 1.0645e-01, 4.8662e-01, 4.6824e-01, - 3.2667e-01, 4.1442e-01, 2.6910e-01, 2.9546e-01, - 3.1143e-01, 7.5538e-01, 6.0373e-01, 3.6330e-01, - 2.0420e-01, 2.0736e-01, 5.6918e-01, 8.3971e-01, - 4.9280e-01, 1.1509e-01, 1.6242e-01, 3.0494e-01, - 4.1115e-01, 2.6350e-01, 8.6493e-01, 2.2754e-01, - 9.5823e-01, 1.7468e-01, 9.7716e-01, 3.3426e-03, - 5.2224e-01, 5.1504e-01, 7.6618e-01, 9.4096e-01, - 1.0026e-01, 7.2147e-01, 8.9091e-03, 6.5578e-01, - 5.7680e-01, 6.2380e-01, 1.2388e-01, 4.8371e-02, - 6.0207e-01, 5.4194e-01, 8.6673e-01, 6.8807e-01, - 9.2816e-01, 3.7517e-01, 3.0093e-01, 4.7584e-01, - 3.9720e-01, 1.4799e-01, 3.7378e-01, 6.3618e-01, - 6.9998e-01, 3.6491e-02, 8.6223e-04, 2.0838e-01, - 3.3022e-01, 5.8860e-01, 5.0472e-01, 4.5864e-01, - 6.9251e-01, 8.7311e-01, 6.0810e-01, 8.0723e-01, - 4.6900e-02, 5.2803e-01, 2.8674e-01, 6.3796e-01, - 7.6113e-01, 2.8559e-02, 1.1819e-01, 6.8456e-01, - 7.3463e-01, 7.1508e-01, 9.9122e-02, 5.2849e-01, - 6.0787e-01, 7.9490e-01, 7.6185e-01, 1.4280e-01, - 1.1419e-01, 4.2900e-01, 5.6288e-01, 9.7748e-01, - 4.2712e-01, 2.6940e-02, 8.9639e-01, 6.0532e-01, - 9.4589e-02, 8.6930e-01, 3.3279e-01, 7.2853e-01, - 9.8308e-01, 1.3934e-01, 8.0534e-01, 1.7934e-01, - 7.4819e-02, 8.3434e-01, 1.7989e-01, 6.3853e-01, - 7.4590e-01, 7.1550e-01, 1.9937e-01, 3.3854e-01, - 3.1956e-01, 1.2080e-01, 4.5433e-01, 4.1144e-01, - 2.9245e-01, 7.0957e-01, 7.9118e-01, 6.7318e-01, - 7.1152e-02, 9.6943e-01, 9.0645e-01, 9.5728e-01, - 9.2584e-02, 4.5463e-01, 8.9875e-01, 4.2892e-01, - 7.4012e-01, 9.2096e-02, 3.5517e-01, 5.1190e-02, - 6.3615e-02, 8.3725e-01, 8.0348e-01, 2.6847e-01, - 5.9974e-01, 9.9716e-03, 9.4549e-01, 4.1603e-01, - 9.4601e-01, 1.3903e-01, 9.9025e-01, 9.7393e-01, - 9.8532e-01, 6.1307e-01, 5.3268e-01, 4.6562e-01, - 9.4921e-01, 8.8215e-03, 2.0442e-01, 7.9116e-01, - 2.8725e-01, 7.4589e-01, 7.7301e-01, 7.9280e-01, - 7.5700e-01, 9.5566e-01, 2.0767e-01, 6.8747e-01, - 3.2254e-01, 6.0591e-01, 8.4403e-01, 4.9854e-01, - 6.9777e-01, 7.2884e-01, 3.1959e-01, 5.6763e-01, - 5.5468e-01, 1.0836e-01, 4.2999e-01, 8.2260e-01, - 2.5615e-01, 5.2061e-01, 5.0681e-01, 8.6740e-01, - 5.6621e-01, 6.8303e-01, 2.0291e-01, 7.6646e-01, - 6.7874e-01, 8.9417e-01, 5.9343e-02, 8.4751e-01, - 5.5985e-01, 6.8764e-01, 9.9525e-01, 2.6092e-01, - 9.1041e-01, 2.3029e-01, 3.7572e-01, 8.6059e-01, - 8.3437e-01, 6.3063e-01, 6.6503e-01, 3.3086e-01, - 3.8668e-01, 3.0906e-01, 2.7012e-01, 7.8278e-01, - 2.2158e-01, 6.9139e-01, 6.5837e-01, 1.2809e-01, - 3.5816e-01, 6.1952e-01, 8.8501e-01, 3.0352e-01, - 9.6048e-01, 7.8246e-01, 4.9874e-01, 7.3178e-01, - 8.2835e-01, 7.3414e-01, 1.6573e-01, 8.4634e-01, - 1.7703e-01, 6.2453e-01, 9.3052e-01, 2.7888e-01, - 3.2672e-02, 7.9726e-02, 3.0782e-01, 9.7174e-02, - 8.3718e-01, 2.3853e-01, 8.6109e-01, 8.2272e-01, - 6.8108e-01, 1.7347e-01, 8.2108e-01, 9.7998e-01, - 2.2742e-01, 9.0193e-02, 3.2084e-01, 2.8788e-01, - 1.5575e-01, 6.6682e-01, 8.1101e-01, 4.9994e-01, - 1.9732e-01, 4.4363e-01, 5.4428e-01, 8.9883e-01, - 2.5187e-01, 1.0721e-01, 8.9044e-01, 1.5542e-01, - 1.3403e-01, 2.2363e-01, 2.7378e-03, 8.7162e-01, - 1.6844e-01, 8.5566e-01, 6.6880e-01, 2.6291e-01, - 6.9538e-01, 6.8331e-01, 6.7787e-01, 2.3285e-01, - 4.7663e-01, 2.6981e-01, 6.4868e-01, 1.7524e-01, - 7.6707e-02, 1.8944e-01, 5.6575e-01, 1.4722e-01, - 1.5100e-01, 8.5911e-01, 9.9343e-03, 8.9457e-01, - 3.1699e-02, 3.8765e-01, 1.3483e-01, 3.6732e-01, - 9.7429e-01, 5.4296e-01, 6.7326e-01, 6.9972e-01, - 8.9702e-01, 7.0386e-01, 9.6002e-01, 8.9700e-02, - 8.9882e-01, 6.7725e-01, 4.6299e-01, 1.7159e-01, - 4.9117e-01, 8.9196e-01, 9.6851e-01, 4.7800e-01, - 9.5385e-02, 5.6297e-01, 5.6356e-01, 5.9294e-01, - 7.0319e-01, 1.3678e-01, 6.3563e-01, 9.4198e-01, - 2.3151e-02, 7.7163e-01, 3.0962e-01, 3.7165e-01, - 8.7651e-01, 9.7121e-01, 3.9310e-01, 5.2847e-02, - 3.2199e-01, 4.9537e-01, 9.5545e-01, 2.3507e-01, - 2.3767e-01, 8.1347e-01, 2.2219e-01, 9.0933e-01, - 1.9777e-01, 3.1588e-01, 7.3552e-01, 4.9202e-01, - 9.8937e-01, 6.6430e-01, 2.4427e-01, 6.2592e-01, - 5.9817e-01, 5.8423e-01, 7.5325e-01, 4.4538e-01, - 6.4641e-01, 6.7568e-01, 9.0316e-01, 5.5413e-01, - 3.8122e-01, 9.0955e-01, 7.2073e-01, 4.8867e-01, - 4.8279e-01, 9.4816e-01, 9.0381e-01, 5.4589e-01, - 7.8337e-01, 4.4529e-01, 1.8979e-01, 9.5264e-01, - 9.6492e-01, 3.9527e-01, 9.6845e-01, 9.5259e-01, - 5.5068e-01, 1.6060e-01, 4.7763e-01, 4.3552e-01, - 1.6896e-01, 5.4887e-01, 3.7587e-01, 3.8465e-02, - 3.5658e-01, 1.2049e-01, 5.7336e-01, 5.1729e-01, - 6.4706e-01, 6.5190e-01, 2.3036e-02, 6.5191e-01, - 9.0814e-01, 2.3785e-01, 2.2227e-01, 5.1431e-01, - 8.5854e-01, 2.9207e-02, 3.6869e-02, 5.3267e-01, - 8.1353e-01, 3.7039e-01, 3.4545e-01, 3.4090e-01, - 3.6383e-01, 2.7003e-01, 7.6825e-01, 2.3112e-01, - 6.1238e-01, 9.1115e-01, 4.0282e-01, 6.2263e-01, - 3.1260e-01, 3.4693e-01, 9.2334e-01, 8.4803e-01, - 2.0720e-01, 8.3284e-01, 6.1706e-01, 2.3491e-01, - 6.2029e-01, 5.0295e-01, 9.5597e-02, 8.6604e-01, - 5.4461e-01, 3.0985e-01, 2.8640e-01, 5.8299e-01, - 5.9539e-01, 7.8610e-01, 4.0644e-01, 1.9591e-01, - 9.8714e-01, 5.5422e-01, 6.6009e-01, 6.5623e-01, - 4.7211e-01, 9.7577e-01, 5.4376e-02, 4.8873e-01, - 2.5046e-01, 9.7071e-01, 4.5692e-01, 2.9522e-01, - 4.3800e-01, 4.4981e-01, 3.1010e-01, 7.2093e-01, - 2.6378e-01, 8.8650e-01, 7.2618e-01, 1.0382e-01, - 3.4808e-01, 8.4350e-01, 4.2346e-01, 7.4116e-01, - 3.2027e-01, 3.5928e-01, 2.6143e-01, 2.6928e-01, - 4.8273e-01, 8.1424e-01, 9.5944e-01, 2.4768e-01, - 2.9919e-01, 8.7165e-01, 2.5121e-01, 9.5261e-02, - 4.9475e-01, 5.9006e-01, 4.4138e-02, 1.6527e-01, - 3.6531e-01, 8.1856e-02, 7.2132e-01, 6.7393e-01, - 5.0358e-01, 4.7375e-02, 1.9737e-01, 4.0566e-01, - 8.9374e-01, 5.1706e-01, 3.3197e-01, 9.4194e-01, - 8.1128e-01, 9.0815e-01, 1.8022e-01, 5.6491e-01, - 6.4344e-01, 4.0320e-01, 3.2057e-01, 9.2835e-01, - 3.8514e-01, 4.6926e-01, 5.3728e-01, 9.9096e-01, - 2.7444e-01, 1.3943e-01, 5.0450e-02, 2.0364e-01, - 7.2101e-01, 6.4227e-01, 7.3437e-02, 3.4761e-01, - 2.5405e-01, 9.1278e-01, 4.7995e-01, 9.5488e-01, - 7.4371e-01, 6.6603e-01, 2.3974e-01, 3.4592e-02, - 7.3876e-01, 3.4801e-01, 8.8779e-01, 4.3777e-01, - 4.3845e-01, 5.7543e-01, 9.6446e-01, 6.2161e-01, - 7.6066e-01, 9.4877e-01, 5.3776e-01, 3.1055e-01, - 7.1135e-01, 1.1340e-01, 2.6750e-01, 5.9840e-01, - 6.3359e-01, 4.7302e-01, 7.2304e-01, 3.4893e-01, - 9.9141e-01, 8.1124e-01, 5.0608e-01, 2.3399e-01, - 7.4022e-01, 9.5753e-01, 2.3787e-01, 4.4086e-01, - 4.1907e-02, 8.5994e-01, 4.2848e-01, 5.6001e-02, - 1.8901e-01, 2.6258e-01, 9.2142e-01, 8.5102e-01, - 1.6385e-01, 8.9360e-01, 4.7573e-01, 8.3904e-01, - 7.2303e-01, 2.1523e-01, 9.5797e-02, 8.4965e-01, - 1.5582e-02, 6.9949e-01, 2.0990e-01, 6.6230e-01, - 3.3945e-01, 4.4961e-01, 4.7358e-01, 3.6442e-01, - 9.2621e-01, 9.1263e-01, 3.3093e-01, 9.9546e-01, - 1.2292e-01, 5.8902e-01, 4.0507e-01, 2.5828e-01, - 5.6008e-01, 1.4206e-01, 2.5683e-01, 3.6288e-01, - 6.3017e-01, 4.5414e-01, 3.9705e-01, 2.1577e-01, - 1.3292e-01, 1.8034e-01, 8.8991e-01, 7.1935e-01, - 5.9429e-01, 9.9354e-01, 1.7179e-01, 7.5835e-01, - 2.3578e-01, 2.3525e-01, 7.7626e-02, 8.0092e-01, - 7.3054e-01, 4.1056e-01, 8.3357e-01, 5.1139e-01, - 5.9262e-01, 6.1930e-01, 2.4519e-01, 7.1002e-01, - 1.0359e-01, 9.4412e-01, 2.0540e-01, 4.4538e-01, - 1.9801e-01, 8.4549e-01, 1.3250e-01, 9.6561e-01, - 3.2952e-01, 4.5267e-01, 8.1779e-01, 4.9954e-01, - 8.1181e-01, 5.0196e-01, 8.1658e-01, 5.2260e-01, - 4.8766e-01, 6.1090e-01, 2.7996e-03, 6.1966e-01, - 1.0513e-01, 5.5944e-01, 5.8645e-01, 3.3516e-01, - 6.7152e-01, 9.7838e-01, 3.8614e-01, 2.2510e-01, - 1.1594e-01, 4.2558e-01, 7.5483e-01, 2.2281e-01, - 6.4874e-01, 6.2431e-01, 7.1246e-01, 2.0051e-01, - 6.8784e-01, 7.5154e-01, 2.2389e-01, 2.9409e-01, - 6.7033e-01, 1.3873e-01, 2.2023e-01, 4.2485e-01, - 8.6454e-01, 3.0450e-01, 6.0648e-02, 1.0412e-01, - 3.9554e-01, 4.9764e-01, 4.2581e-01, 8.2417e-01, - 6.6608e-01, 8.9779e-01, 4.4620e-01, 8.4021e-01, - 4.3955e-01, 1.4273e-02, 3.7131e-01, 7.9727e-01, - 7.8979e-01, 3.4960e-02, 5.8742e-01, 1.6448e-01, - 7.6381e-01, 4.3659e-01, 2.7623e-01, 6.5986e-01, - 4.9064e-01, 4.3570e-01, 6.8144e-01, 6.4062e-01, - 1.8843e-01, 3.4080e-02, 2.0154e-01, 6.7966e-01, - 4.2961e-01, 2.9540e-01, 1.6401e-01, 3.6909e-02, - 3.3548e-01, 3.2363e-01, 2.6177e-01, 9.3368e-02, - 1.4344e-01, 9.0926e-01, 1.7042e-01, 2.4324e-02, - 6.5289e-01, 1.4628e-03, 1.9798e-01, 9.2151e-01, - 4.7723e-01, 3.7485e-01, 8.2334e-01, 8.3544e-02, - 2.4561e-01, 8.0684e-01, 7.7035e-01, 6.6984e-01, - 5.7007e-01, 6.7637e-01, 8.0901e-01, 6.7966e-01, - 8.4204e-01, 8.8033e-01, 9.1838e-02, 9.9538e-01, - 6.2287e-01, 7.6179e-01, 3.8147e-01, 3.1116e-01, - 1.9657e-01, 6.2511e-01, 1.3860e-01, 9.4396e-02, - 9.4701e-01, 1.3069e-01, 3.6507e-01, 4.5613e-01, - 8.2271e-01, 7.6979e-01, 7.9135e-01, 9.0423e-01, - 5.5221e-02, 3.2789e-01, 5.3398e-01, 7.2227e-01, - 5.5077e-01, 7.9912e-01, 3.1821e-01, 2.9968e-01, - 2.6198e-01, 4.4762e-01, 4.1259e-01, 3.5473e-01, - 3.3291e-01, 7.9301e-01, 1.9792e-01, 8.2528e-01, - 5.6413e-01, 3.8333e-01, 6.6165e-01, 9.5147e-01, - 5.6272e-01, 3.4675e-01, 8.4375e-01, 5.5159e-01, - 5.3099e-01, 4.9407e-01, 7.9268e-02, 8.1325e-01, - 3.4302e-01, 3.0490e-01, 6.8784e-01, 6.0200e-01, - 4.9785e-01, 9.9652e-01, 4.9685e-01, 3.7151e-02, - 7.1723e-01, 5.1630e-01, 1.6186e-01, 9.6119e-01, - 1.1189e-01, 1.5225e-01, 1.4287e-01, 6.2875e-01, - 2.7014e-01, 2.2493e-01, 8.1982e-01, 8.4144e-01, - 5.0012e-01, 1.4440e-01, 9.4782e-01, 4.6966e-02, - 8.0094e-02, 3.2817e-01, 6.3697e-01, 8.5243e-01, - 7.4477e-01, 6.9831e-01, 2.5635e-01, 4.9113e-01, - 9.0614e-01, 7.4352e-01, 2.9479e-01, 8.5765e-02, - 2.8369e-01, 6.9140e-01, 2.1910e-02, 9.4584e-01, - 7.8362e-01, 4.3661e-01, 7.1737e-01, 7.5240e-01, - 2.1046e-01, 5.2459e-01, 3.5669e-01, 3.3327e-01, - 6.2195e-01, 6.0351e-02, 4.2394e-01, 2.4002e-01, - 3.9737e-01, 2.4284e-01, 1.9289e-01, 1.7477e-02, - 2.0540e-01, 6.2604e-01, 7.9723e-01, 1.8385e-01, - 4.4053e-01, 8.8051e-01, 3.9557e-01, 8.8700e-01, - 5.1404e-01, 1.1995e-01, 7.9981e-01, 9.5382e-01, - 2.5272e-01, 3.5468e-01, 9.4974e-01, 8.9205e-01, - 3.6139e-01, 3.4499e-01, 2.7777e-01, 4.5664e-02, - 3.3382e-01, 1.0880e-01, 1.0491e-01, 1.3107e-01, - 8.1198e-01, 2.3710e-01, 9.5701e-01, 7.4072e-01, - 7.6013e-01, 7.4297e-01, 5.3934e-01, 3.8833e-02, - 7.9923e-01, 3.5716e-01, 4.5914e-01, 7.5921e-01, - 5.7660e-01, 8.6321e-01, 2.2011e-02, 7.0175e-02, - 8.7068e-01, 9.6840e-01, 7.8858e-01, 3.6922e-01, - 1.5989e-01, 9.9897e-01, 9.2523e-01, 9.9307e-01, - 3.7901e-01, 8.1987e-01, 3.4500e-01, 1.5604e-01, - 7.8108e-01, 4.1640e-01, 3.1537e-02, 7.8822e-01, - 1.3566e-02, 7.9552e-01, 2.6343e-01, 1.6404e-01, - 8.5056e-01, 4.8415e-01, 8.7964e-01, 4.9371e-02, - 5.3510e-01, 5.4496e-01, 6.0403e-01, 6.4549e-01, - 6.2952e-01, 3.9012e-01, 1.5224e-01, 4.3444e-01, - 8.5023e-01, 4.7839e-01, 6.2135e-01, 6.2664e-01, - 1.0599e-01, 8.8547e-01, 3.4726e-01, 6.0114e-01, - 4.3156e-01, 6.2812e-01, 8.9258e-01, 3.4239e-01, - 6.5832e-01, 4.1027e-01, 6.5773e-01, 7.6454e-01, - 9.4528e-01, 3.2822e-01, 4.7317e-01, 6.1735e-01, - 3.4826e-01, 8.6858e-01, 1.5556e-01, 8.6319e-02, - 3.9145e-01, 3.8480e-01, 5.3481e-01, 2.1462e-01, - 5.6472e-01, 9.8377e-01, 9.3747e-01, 1.9774e-01, - 1.9057e-01, 3.2741e-01, 6.2384e-01, 7.4136e-01, - 2.1178e-01, 7.3846e-02, 9.0975e-01, 3.2634e-01, - 5.8112e-01, 2.1702e-01, 2.5733e-01, 3.8455e-02, - 6.9794e-02, 5.3320e-01, 3.1507e-01, 4.9119e-01, - 3.2327e-01, 6.8532e-01, 5.8372e-01, 7.8756e-01, - 2.1891e-01, 3.5249e-01, 4.3209e-01, 4.3462e-02, - 6.6437e-01, 9.2221e-02, 7.4176e-01, 5.1241e-01, - 8.4088e-01, 7.2546e-01, 1.0085e-02, 4.4493e-01, - 7.6520e-01, 2.9433e-01, 5.2500e-01, 9.3563e-01, - 1.7180e-01, 8.6186e-01, 7.7692e-01, 9.7679e-01, - 8.5362e-01, 5.9534e-01, 4.0603e-01, 5.1477e-02, - 3.0904e-01, 3.9728e-01, 4.2540e-01, 1.5835e-01, - 8.9235e-02, 3.2287e-01, 6.8976e-01, 5.8624e-01, - 4.1334e-01, 8.0596e-01, 9.5544e-01, 5.8646e-02, - 1.3845e-01, 5.1892e-01, 4.5679e-01, 1.9582e-01, - 1.2285e-01, 6.1279e-01, 7.3482e-01, 6.7204e-01]), - size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.4203, 0.6136, 0.5758, ..., 0.4091, 0.3563, 0.6125]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 1000 -Density: 1e-05 -Time: 7.378459692001343 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '225343', '-ss', '10000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.386851072311401} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.06352877616882324} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 1, 1, ..., 1000, 1000, 1000]), - col_indices=tensor([5090, 5520, 6854, 5240, 6540, 5414, 9539, 8466, 479, - 3119, 5272, 7854, 2035, 3254, 301, 9387, 5412, 9403, - 3029, 6749, 4880, 1485, 7157, 5629, 6151, 4478, 7699, - 7698, 6954, 7468, 2219, 5639, 6353, 506, 8308, 5821, - 2487, 7627, 3842, 7369, 6744, 2867, 7111, 7921, 1986, - 380, 9961, 4202, 9024, 661, 5897, 7449, 9845, 9461, - 9917, 234, 7756, 4104, 195, 2757, 4588, 1755, 600, - 3208, 9769, 495, 8241, 6634, 6218, 247, 649, 2255, - 6934, 5056, 3570, 5404, 4033, 4528, 6168, 3330, 5154, - 6668, 8969, 4990, 5914, 7294, 7798, 8937, 1984, 811, - 8267, 6000, 8441, 2901, 6504, 2951, 6191, 5592, 9657, - 5206, 4311, 4344, 6838, 4035, 8212, 9827, 8714, 8242, - 2597, 1268, 6941, 152, 4041, 7546, 5546, 9553, 8677, - 3838, 1475, 6605, 2849, 8979, 1585, 9524, 5732, 668, - 8770, 2014, 4555, 5432, 5784, 211, 829, 5623, 601, - 2334, 1210, 8619, 2536, 8949, 1829, 5741, 5371, 2300, - 3074, 3033, 4257, 6930, 5374, 2094, 8363, 8250, 6651, - 7561, 1388, 1839, 6107, 6362, 2380, 7307, 2838, 7221, - 3519, 6205, 3890, 3976, 4925, 3385, 1072, 9414, 2059, - 8855, 6070, 1702, 9893, 388, 4416, 3865, 1170, 3494, - 5538, 3081, 189, 2709, 2842, 6301, 7706, 8161, 9291, - 2769, 7658, 3415, 1110, 8398, 478, 2076, 5495, 5446, - 8782, 6300, 9602, 7484, 2093, 9250, 2777, 8249, 4928, - 2881, 9270, 1050, 2161, 2931, 2310, 1049, 8115, 3593, - 1809, 941, 3744, 1194, 3342, 9878, 6173, 5475, 9309, - 4801, 9295, 4078, 622, 4058, 6546, 2174, 1250, 3334, - 5694, 8789, 7302, 5052, 3464, 3176, 6460, 6096, 3332, - 213, 105, 7605, 1853, 506, 1606, 6777, 7316, 5398, - 8931, 5969, 4321, 5680, 7873, 95, 6160, 4933, 6865, - 5591, 7447, 4679, 921, 3881, 2291, 4538, 5253, 6704, - 1367, 7057, 4866, 205, 2657, 1962, 5628, 663, 5095, - 1120, 7753, 3664, 5253, 1416, 3161, 6425, 5128, 3164, - 6290, 5671, 1125, 3995, 1998, 5960, 1764, 146, 2947, - 5808, 9352, 6461, 4335, 459, 5492, 7868, 7568, 1136, - 6162, 4143, 8794, 7939, 2386, 5360, 1123, 3979, 4759, - 6589, 8868, 7581, 8042, 6445, 8358, 2368, 5279, 5299, - 9066, 2927, 6449, 5126, 9410, 2256, 9577, 6938, 3964, - 1164, 9207, 482, 8277, 7320, 5132, 4346, 7324, 8992, - 5310, 2655, 3277, 5668, 2397, 2897, 550, 727, 2417, - 433, 4420, 7753, 7792, 4331, 9413, 1735, 9320, 8721, - 6954, 4204, 4649, 5618, 2016, 8183, 5689, 9063, 3738, - 2959, 3961, 4009, 3606, 4760, 1230, 2217, 5835, 899, - 5586, 9110, 8053, 9627, 2842, 2192, 3166, 4583, 8158, - 4940, 1032, 4696, 2026, 6388, 2745, 2712, 7092, 1068, - 6904, 4938, 9153, 2849, 5497, 9818, 7411, 8049, 8663, - 7400, 8022, 8738, 7227, 1194, 3739, 8622, 8207, 2749, - 9175, 8238, 9065, 9556, 3203, 9575, 8134, 2289, 9054, - 2794, 6202, 2265, 6391, 98, 2630, 7976, 8036, 3067, - 1792, 5338, 363, 1223, 6083, 1229, 6539, 7336, 9634, - 7650, 4192, 95, 4556, 6967, 3917, 3894, 9298, 4210, - 1864, 9331, 8210, 6617, 3743, 6287, 2538, 238, 3971, - 4196, 5629, 9006, 3099, 383, 6739, 7853, 1507, 4211, - 5982, 5409, 9920, 7890, 591, 2525, 3725, 6493, 3034, - 3234, 6824, 9969, 7746, 6555, 420, 331, 3665, 6915, - 9255, 6959, 1855, 8868, 53, 4402, 8159, 4409, 437, - 335, 9024, 8990, 8821, 3866, 9075, 5859, 1760, 4060, - 6735, 9065, 4222, 4171, 2823, 8557, 3508, 8301, 8364, - 1888, 6711, 7704, 7616, 700, 6570, 9066, 4571, 8351, - 9210, 7549, 1287, 2369, 7405, 5372, 3041, 8555, 2099, - 8354, 1590, 5248, 7669, 3628, 5312, 8811, 9656, 91, - 6749, 2151, 1335, 1000, 3671, 9405, 6954, 7743, 8173, - 1053, 2058, 4192, 5319, 304, 2356, 1950, 9207, 92, - 6497, 2983, 9159, 7556, 3250, 1554, 5610, 7836, 3795, - 8012, 9512, 688, 5982, 7033, 541, 5439, 6622, 5227, - 1460, 8716, 7845, 2503, 4777, 2879, 4671, 1973, 3787, - 7162, 397, 4461, 6623, 4298, 2561, 3316, 7915, 5983, - 1463, 8647, 7375, 9336, 4967, 8147, 7114, 4177, 6572, - 589, 3945, 1105, 9035, 144, 8793, 5666, 4624, 5196, - 7941, 6950, 4479, 4119, 2123, 3611, 697, 3474, 3332, - 321, 4913, 8438, 6810, 8281, 7730, 135, 2273, 9671, - 9509, 3960, 9361, 3386, 8984, 5288, 6392, 4006, 7514, - 9570, 2995, 2185, 1986, 5814, 2642, 5245, 7837, 8338, - 9556, 4510, 7836, 9711, 5626, 4884, 1758, 7786, 8389, - 1547, 8704, 9162, 1234, 5544, 8023, 7139, 2852, 716, - 3911, 326, 7817, 8217, 3818, 4420, 8158, 4153, 4601, - 7754, 6866, 7422, 4481, 6488, 984, 8828, 1955, 379, - 1624, 1303, 6290, 8219, 6816, 2475, 9043, 4031, 8584, - 1795, 4011, 965, 6538, 341, 7259, 7483, 3110, 6233, - 5102, 1097, 4896, 3220, 5529, 7988, 1701, 3588, 10, - 2698, 9881, 1569, 6687, 453, 2430, 6957, 711, 9279, - 1596, 9071, 3667, 407, 7168, 5764, 9571, 5484, 6683, - 4948, 5349, 937, 8097, 6338, 3961, 9400, 3604, 7579, - 7706, 3756, 8824, 1022, 3037, 3053, 8543, 5584, 7875, - 2400, 4692, 543, 8436, 3410, 3578, 186, 2825, 4894, - 4326, 5710, 1051, 4806, 1754, 5220, 2270, 1501, 4413, - 674, 5529, 5007, 6644, 1894, 8344, 1904, 5600, 8997, - 7614, 6042, 5049, 7997, 4898, 5487, 4108, 3312, 9233, - 8389, 608, 4442, 5243, 5032, 8614, 5739, 5802, 4120, - 8899, 99, 2193, 8861, 5225, 9792, 1275, 9868, 5618, - 2605, 6218, 8444, 2643, 1279, 9216, 6571, 5281, 8680, - 2287, 8748, 8280, 4767, 9473, 1334, 6986, 3525, 3169, - 3974, 363, 6562, 574, 5621, 1586, 9124, 9590, 3765, - 1685, 734, 386, 1332, 1723, 1290, 9672, 3553, 9301, - 1364, 307, 2456, 1290, 4339, 5612, 9566, 1808, 3981, - 2608, 1116, 9731, 3516, 6888, 8925, 556, 5146, 1699, - 9152, 2113, 5887, 9273, 781, 5425, 3828, 8470, 84, - 4795, 3201, 5889, 8067, 9356, 9322, 9600, 2467, 238, - 3969, 4521, 2069, 7924, 1028, 6107, 7144, 727, 9093, - 1824, 3094, 3039, 5739, 8995, 1898, 8954, 2989, 1413, - 3277, 5865, 6456, 1730, 4656, 2233, 195, 5096, 2376, - 7691, 1029, 8697, 3625, 5246, 3695, 431, 2145, 9293, - 6188, 3268, 7555, 1926, 4810, 5639, 5945, 3037, 1338, - 1725, 4126, 2764, 3990, 1925, 4031, 1867, 4112, 2508, - 4960, 1189, 1309, 433, 7697, 1040, 4522, 2213, 5963, - 5735, 5373, 1987, 8913, 9575, 2432, 6960, 9607, 6164, - 404, 2986, 2433, 3250, 7219, 1524, 6099, 456, 8056, - 975, 4344, 3991, 3094, 9425, 8383, 1533, 6432, 3860, - 1956, 7982, 5506, 5690, 5861, 7053, 2115, 4411, 4091, - 6524, 4624, 2372, 7084, 4769, 3223, 9544, 2559, 9666, - 5145, 4409, 2858, 6595, 9077, 3107, 5138, 7219, 5582, - 386, 8287, 8166, 6451, 1770, 6684, 1635, 9826, 113, - 951]), - values=tensor([8.6107e-01, 6.9271e-01, 4.8368e-01, 9.8101e-01, - 9.6646e-01, 7.4405e-01, 9.3604e-01, 4.7222e-01, - 3.6113e-01, 7.3932e-02, 7.2732e-01, 5.3118e-02, - 7.3120e-01, 9.0814e-01, 8.9645e-01, 6.7127e-01, - 7.8345e-01, 4.4017e-02, 8.0039e-01, 4.4358e-01, - 2.7672e-01, 1.8812e-01, 1.5166e-01, 3.1698e-01, - 4.1803e-01, 2.4922e-01, 3.5443e-01, 5.8008e-01, - 5.2338e-01, 1.6648e-01, 7.3597e-01, 8.1915e-01, - 1.9507e-01, 5.2186e-01, 6.8481e-01, 3.0218e-01, - 3.0589e-01, 6.4079e-01, 5.7065e-01, 5.6508e-02, - 2.2168e-01, 9.3404e-01, 5.9158e-01, 2.2703e-01, - 6.3212e-01, 1.4065e-01, 3.4281e-01, 9.7137e-01, - 4.7761e-02, 5.1358e-01, 7.9179e-01, 2.7935e-01, - 1.5324e-01, 5.4348e-01, 2.7165e-01, 9.6685e-01, - 1.0560e-01, 3.3051e-01, 5.8331e-02, 1.6726e-01, - 6.8070e-01, 9.7579e-01, 1.7781e-01, 1.5625e-01, - 5.7095e-01, 2.6864e-01, 2.1547e-01, 5.5164e-01, - 8.3829e-01, 5.1841e-01, 7.4553e-01, 5.9540e-01, - 5.4239e-01, 3.0666e-01, 8.0495e-01, 4.9085e-01, - 5.0594e-01, 3.5705e-01, 8.9499e-01, 1.7606e-01, - 4.2988e-01, 7.8070e-01, 7.1563e-01, 8.7994e-01, - 8.0990e-01, 6.9514e-02, 9.3172e-01, 6.6499e-01, - 8.3186e-01, 3.9669e-01, 6.3901e-01, 3.7501e-01, - 2.7733e-01, 7.5182e-01, 9.9888e-01, 6.0397e-01, - 9.4930e-01, 1.5247e-02, 7.5542e-01, 8.3683e-01, - 8.1190e-01, 5.5766e-01, 3.1907e-03, 2.4997e-02, - 5.9621e-01, 6.8092e-01, 7.4604e-01, 3.6392e-01, - 7.3749e-01, 7.5782e-01, 9.0594e-01, 3.8310e-02, - 7.0973e-01, 2.0418e-03, 1.5030e-01, 6.4446e-01, - 2.9902e-01, 3.9235e-01, 4.6097e-01, 9.6647e-01, - 7.6297e-01, 6.5696e-01, 9.6598e-01, 2.9419e-01, - 7.4848e-01, 5.4178e-01, 8.6408e-01, 3.0556e-01, - 9.5660e-02, 9.8731e-01, 2.3206e-01, 3.0445e-01, - 2.6357e-01, 8.1990e-01, 4.5005e-01, 4.1712e-01, - 4.7212e-01, 9.0916e-01, 6.7266e-01, 8.1816e-01, - 5.4044e-01, 3.8804e-01, 3.8725e-01, 9.6701e-01, - 9.6611e-01, 5.3198e-01, 4.3312e-01, 3.1391e-01, - 7.3887e-01, 4.4383e-01, 1.0121e-01, 2.9852e-01, - 9.4549e-01, 2.7228e-02, 5.7431e-01, 2.2691e-01, - 6.5419e-01, 3.0407e-01, 1.7702e-01, 8.9772e-01, - 5.5873e-01, 2.6340e-01, 7.9858e-01, 8.4542e-01, - 5.6910e-01, 9.8542e-03, 9.3160e-01, 7.7917e-01, - 4.4325e-01, 4.6030e-01, 3.1062e-01, 5.2371e-01, - 3.6051e-01, 6.6182e-01, 7.9523e-01, 7.4930e-01, - 8.5091e-01, 1.0500e-01, 9.1884e-01, 6.4083e-01, - 6.1612e-01, 6.9416e-01, 3.3945e-01, 6.8962e-01, - 5.5544e-01, 7.6810e-01, 2.7725e-02, 2.1387e-01, - 6.7922e-01, 6.5621e-01, 5.8679e-01, 3.5568e-01, - 3.4572e-01, 6.7690e-01, 2.0641e-02, 5.9726e-01, - 9.7926e-01, 7.6343e-01, 6.6564e-01, 7.0179e-01, - 7.5190e-01, 1.6662e-01, 6.3861e-01, 8.6067e-01, - 4.9108e-01, 1.6626e-01, 2.8908e-01, 8.6825e-01, - 2.2293e-01, 6.4829e-01, 9.6432e-01, 3.1724e-01, - 6.7990e-01, 3.1823e-01, 6.4497e-01, 2.9139e-01, - 7.7949e-01, 8.9843e-01, 7.5448e-01, 8.9106e-01, - 2.3395e-02, 6.9438e-02, 5.5248e-01, 6.4159e-01, - 7.5739e-01, 5.0563e-01, 2.0469e-01, 5.6201e-01, - 1.6082e-01, 2.6184e-01, 9.9103e-01, 3.2170e-01, - 6.8832e-01, 8.7008e-01, 2.2665e-01, 7.3082e-01, - 6.2194e-01, 9.0876e-02, 3.0153e-01, 4.0718e-01, - 6.1137e-01, 6.7016e-01, 4.5055e-01, 2.4307e-01, - 8.2010e-01, 9.4870e-01, 5.4014e-01, 1.8658e-01, - 2.9253e-01, 5.5667e-01, 5.6304e-01, 5.0195e-01, - 7.6675e-01, 3.8278e-01, 3.9606e-01, 2.2406e-01, - 5.1845e-01, 3.4771e-01, 1.0772e-01, 8.8694e-01, - 4.5765e-01, 8.5899e-01, 5.2209e-01, 2.6814e-01, - 5.9179e-01, 4.5857e-01, 5.5010e-01, 1.5982e-01, - 3.6937e-01, 7.2005e-01, 9.3900e-01, 9.7016e-01, - 8.1019e-01, 4.1027e-01, 3.1753e-01, 5.0028e-01, - 8.1838e-01, 5.3940e-03, 3.0170e-01, 1.2265e-01, - 5.4341e-01, 7.3566e-01, 8.7193e-01, 3.7031e-03, - 8.0014e-01, 9.7195e-01, 8.1259e-01, 5.2355e-01, - 7.6219e-01, 5.2540e-01, 1.8142e-01, 3.3116e-01, - 6.0314e-01, 4.4442e-01, 1.1620e-01, 7.2375e-01, - 9.5370e-03, 5.4697e-01, 8.8240e-01, 6.6165e-01, - 7.6202e-02, 8.3667e-01, 2.1379e-01, 4.3083e-02, - 6.9313e-01, 4.5761e-01, 7.4008e-01, 1.8568e-01, - 6.9331e-01, 9.7012e-01, 6.0634e-01, 9.1290e-01, - 9.6677e-01, 6.0285e-01, 7.8798e-01, 4.4562e-01, - 9.8873e-01, 7.8753e-01, 8.4151e-01, 8.1693e-01, - 9.9477e-01, 3.4089e-01, 9.8351e-01, 1.7497e-01, - 1.9361e-01, 2.2489e-01, 5.4954e-02, 8.8049e-04, - 5.9184e-03, 2.1956e-01, 5.7859e-01, 5.6967e-02, - 8.3599e-01, 3.4445e-01, 8.6109e-01, 7.3992e-01, - 7.5953e-01, 5.6059e-01, 6.7471e-01, 6.6328e-02, - 1.5833e-01, 8.0087e-02, 3.7828e-01, 2.3249e-01, - 6.4356e-01, 8.9505e-01, 4.6458e-01, 2.4000e-01, - 2.6068e-01, 2.4811e-01, 2.0334e-01, 6.3954e-02, - 8.7523e-01, 3.8213e-01, 2.3114e-01, 9.2747e-01, - 5.2709e-01, 7.3354e-01, 6.9112e-02, 1.0962e-01, - 8.7768e-01, 3.2023e-01, 6.6020e-01, 8.1433e-01, - 6.3947e-02, 2.0035e-01, 6.9677e-01, 2.2128e-01, - 2.9268e-03, 8.2932e-01, 5.1282e-01, 3.5723e-01, - 8.8333e-01, 5.5722e-01, 5.3919e-01, 1.9816e-01, - 1.7851e-01, 7.3178e-01, 8.1068e-01, 1.8684e-01, - 7.6447e-01, 7.5820e-01, 4.8014e-01, 6.4445e-01, - 1.4520e-01, 3.2844e-01, 9.5413e-01, 3.8770e-01, - 6.9308e-01, 1.3905e-01, 7.6471e-01, 8.8390e-02, - 9.1643e-01, 3.2628e-01, 1.9768e-01, 3.9635e-01, - 7.9628e-02, 7.4905e-01, 7.8489e-01, 8.1331e-01, - 7.9842e-01, 2.5383e-01, 1.3706e-01, 9.4530e-01, - 9.0168e-01, 8.2331e-01, 9.3669e-01, 8.7520e-01, - 7.8717e-01, 7.1291e-02, 6.0640e-01, 7.5422e-01, - 8.9628e-01, 3.7460e-01, 1.4416e-01, 9.2438e-01, - 2.7479e-01, 3.2385e-01, 2.7961e-01, 7.5659e-02, - 5.7388e-01, 3.8352e-01, 2.7746e-01, 2.8999e-01, - 5.0480e-01, 9.5417e-01, 4.2093e-01, 7.8579e-02, - 8.2103e-02, 5.4792e-01, 9.2395e-01, 4.2211e-01, - 3.9568e-01, 7.4897e-01, 6.2380e-01, 4.3780e-01, - 5.1349e-01, 7.4234e-01, 5.1775e-01, 9.9153e-01, - 1.4757e-01, 9.3558e-01, 6.3949e-02, 9.7963e-01, - 5.1970e-01, 8.2542e-01, 3.3289e-01, 3.3816e-01, - 2.3966e-01, 9.0664e-01, 7.4839e-01, 2.4259e-01, - 3.2730e-01, 5.5331e-01, 6.0686e-01, 1.2405e-01, - 7.5585e-01, 5.2917e-02, 2.4485e-01, 4.4680e-01, - 8.6753e-01, 1.7906e-02, 9.9252e-01, 1.8042e-01, - 9.9853e-02, 1.3381e-01, 2.0378e-01, 3.5700e-01, - 3.9791e-01, 2.3120e-01, 4.6213e-01, 6.2256e-01, - 3.6533e-01, 9.9886e-01, 1.9408e-02, 8.2334e-01, - 1.5185e-01, 5.4053e-01, 7.5198e-01, 6.8683e-01, - 5.8939e-01, 6.5246e-01, 5.6503e-02, 1.3252e-01, - 2.3554e-01, 3.6623e-01, 1.8050e-01, 8.3236e-01, - 4.6166e-01, 1.2595e-02, 8.1989e-01, 5.7657e-01, - 6.5694e-01, 9.1911e-01, 7.3100e-01, 8.1813e-02, - 4.5850e-01, 4.8408e-01, 9.3086e-02, 1.2574e-01, - 2.8876e-02, 9.5164e-01, 1.4650e-01, 4.1090e-01, - 9.2514e-01, 6.1633e-01, 1.0809e-01, 5.0875e-01, - 1.3530e-02, 3.6496e-01, 6.6914e-01, 8.9151e-01, - 8.2377e-01, 5.2662e-01, 3.2652e-02, 2.7964e-01, - 4.7944e-01, 1.5140e-01, 8.4471e-01, 2.6876e-01, - 3.2371e-01, 6.9635e-01, 8.6435e-01, 4.4276e-01, - 8.9744e-01, 6.4418e-02, 7.8116e-01, 6.8714e-01, - 1.3143e-01, 1.5614e-01, 1.8519e-01, 2.6732e-01, - 6.0465e-01, 3.3625e-01, 4.9463e-01, 8.6001e-01, - 9.9209e-01, 5.3462e-01, 7.9512e-01, 6.5280e-02, - 6.7335e-01, 7.7646e-02, 4.3713e-01, 5.3228e-02, - 7.0476e-02, 3.6280e-01, 1.3298e-01, 4.7192e-01, - 4.1713e-01, 9.6744e-01, 7.2283e-01, 8.1171e-01, - 5.8368e-01, 2.1220e-01, 2.6946e-01, 6.8910e-01, - 3.9849e-01, 2.7316e-01, 4.4482e-01, 8.1241e-02, - 1.9653e-01, 9.6384e-01, 1.1372e-01, 1.9097e-01, - 7.6998e-01, 9.9375e-01, 1.4568e-02, 9.0302e-01, - 4.8512e-01, 9.9514e-01, 9.1484e-01, 9.2485e-02, - 9.0416e-01, 4.7279e-01, 4.9457e-01, 5.4557e-01, - 4.6614e-01, 7.4250e-01, 1.4241e-02, 8.0018e-02, - 7.8231e-01, 4.7850e-02, 3.7642e-01, 5.9940e-01, - 5.9926e-01, 2.0479e-01, 8.6681e-01, 4.4220e-02, - 1.0096e-01, 2.3569e-02, 1.8524e-01, 5.1257e-01, - 5.2509e-01, 8.0403e-01, 1.2724e-01, 7.1898e-02, - 8.3280e-01, 7.8982e-01, 9.2134e-01, 1.2335e-02, - 6.7136e-02, 8.5018e-01, 4.7597e-01, 7.4717e-01, - 6.2179e-02, 4.4811e-01, 8.3920e-01, 6.0345e-01, - 8.1684e-01, 6.9675e-01, 9.9445e-01, 2.7290e-01, - 1.2717e-01, 4.1549e-02, 9.2287e-01, 4.5480e-01, - 5.5821e-01, 9.2880e-01, 5.5301e-01, 5.4505e-01, - 3.7060e-01, 2.4044e-01, 7.1787e-01, 6.8616e-01, - 4.4501e-01, 6.3975e-02, 1.2135e-01, 3.1465e-01, - 7.8125e-01, 6.9943e-01, 9.7250e-01, 6.1706e-01, - 2.0429e-01, 4.2341e-02, 8.0929e-01, 9.7416e-01, - 6.1808e-01, 7.7504e-01, 6.7942e-01, 2.7196e-02, - 7.9603e-02, 3.2581e-01, 6.0908e-02, 1.5004e-02, - 5.8326e-01, 9.4906e-01, 2.3072e-01, 3.5085e-01, - 9.4853e-01, 7.2332e-01, 1.1940e-01, 4.9236e-01, - 7.7478e-01, 4.5736e-01, 6.2936e-01, 3.5934e-01, - 1.0097e-01, 8.1085e-01, 2.4341e-01, 3.2735e-01, - 3.3189e-01, 5.8497e-01, 1.7734e-01, 6.3375e-01, - 1.4097e-01, 1.5644e-01, 8.1609e-01, 7.2549e-01, - 5.9378e-02, 1.5413e-01, 5.3232e-02, 6.1339e-01, - 7.0552e-01, 7.7829e-01, 7.5404e-01, 7.7450e-01, - 8.7883e-01, 5.6085e-01, 4.0932e-01, 8.9438e-01, - 9.1787e-02, 8.4228e-01, 3.0927e-01, 1.6037e-01, - 2.2670e-01, 7.6787e-01, 1.4606e-01, 2.3816e-01, - 3.4066e-01, 2.0990e-01, 5.8881e-01, 8.9502e-01, - 4.3356e-01, 9.0184e-01, 6.5518e-01, 4.6940e-01, - 5.7142e-01, 9.6034e-01, 2.1592e-01, 7.5007e-01, - 3.7195e-01, 3.4826e-01, 1.3116e-01, 8.3464e-01, - 6.8307e-01, 9.5445e-01, 8.1954e-01, 7.2306e-01, - 2.5420e-01, 1.3167e-01, 6.4915e-01, 5.6853e-01, - 9.0585e-02, 1.3848e-01, 8.8329e-01, 8.4826e-01, - 2.1122e-01, 2.4577e-01, 6.3388e-01, 5.1270e-01, - 5.1034e-01, 5.0535e-01, 7.7389e-01, 7.8660e-01, - 8.3881e-01, 4.0781e-01, 7.3903e-01, 4.6446e-03, - 3.7737e-01, 5.3757e-01, 6.2755e-01, 2.1755e-01, - 6.0600e-01, 6.8931e-01, 7.2083e-01, 8.5321e-01, - 7.4348e-01, 4.7003e-01, 8.0017e-01, 2.0961e-01, - 3.9155e-01, 4.7019e-01, 8.2793e-01, 1.0848e-01, - 5.0885e-01, 4.4031e-01, 4.3596e-01, 6.5756e-01, - 6.3068e-01, 3.1965e-02, 6.1304e-01, 6.2773e-01, - 7.5422e-01, 2.1203e-01, 3.7413e-01, 8.1662e-01, - 2.6392e-01, 6.2770e-01, 3.1850e-01, 8.8449e-01, - 4.1231e-01, 7.3768e-01, 6.2162e-01, 6.3949e-02, - 8.6686e-01, 7.9535e-01, 1.7100e-01, 9.0592e-01, - 7.7475e-01, 2.2756e-02, 1.9513e-01, 2.3374e-01, - 5.8883e-01, 1.5431e-01, 3.2038e-01, 9.7862e-01, - 2.1914e-02, 1.1835e-02, 4.2111e-01, 8.9160e-01, - 4.6853e-01, 6.7863e-01, 8.5162e-01, 8.9675e-02, - 8.4382e-01, 8.6282e-01, 2.6677e-01, 1.6343e-01, - 4.8528e-02, 9.4412e-01, 6.3315e-01, 4.3330e-02, - 4.6024e-01, 4.2822e-02, 6.1742e-01, 9.5830e-01, - 9.5318e-01, 8.0255e-01, 3.3261e-01, 4.8987e-01, - 5.2325e-01, 4.5380e-01, 1.7553e-01, 3.8748e-01, - 3.4164e-02, 5.6323e-01, 6.9522e-01, 1.2731e-01, - 2.0882e-01, 2.9892e-01, 4.1201e-01, 1.0616e-02, - 8.2951e-01, 1.5012e-01, 2.5441e-01, 5.8813e-01, - 6.0251e-01, 3.5572e-01, 1.3375e-01, 7.4197e-01, - 7.6556e-01, 9.4426e-01, 1.6704e-01, 4.2013e-01, - 9.9049e-01, 5.8137e-01, 6.4977e-01, 8.2789e-01, - 2.1064e-01, 7.6906e-01, 6.0039e-01, 5.7468e-01, - 6.0443e-02, 5.0163e-01, 3.7936e-01, 1.3277e-01, - 6.4513e-01, 5.0021e-01, 9.3869e-01, 2.2699e-01, - 8.5101e-01, 9.1350e-01, 3.2096e-01, 6.4035e-01, - 2.1549e-01, 3.9428e-01, 2.3003e-01, 6.3791e-02, - 5.3362e-02, 2.8508e-01, 5.1790e-01, 2.7213e-01, - 8.4037e-01, 3.8949e-03, 2.2458e-02, 6.6033e-01, - 8.4001e-01, 8.8969e-01, 9.5652e-01, 4.0968e-01, - 5.6649e-01, 3.8455e-01, 7.7402e-01, 7.7569e-01, - 6.9995e-01, 5.3609e-01, 4.7322e-01, 2.5078e-01, - 3.1545e-01, 2.3820e-01, 2.6935e-01, 5.4291e-01, - 9.8952e-02, 4.1834e-01, 6.8148e-01, 3.4508e-01, - 5.6829e-01, 9.5116e-01, 7.0814e-01, 7.8640e-01, - 9.8928e-01, 9.4988e-01, 1.3932e-01, 8.0564e-01, - 5.7439e-01, 3.3367e-02, 6.8127e-01, 9.8039e-01, - 9.7767e-01, 8.0552e-02, 5.3572e-01, 5.2619e-01, - 3.8475e-01, 3.3846e-01, 8.3436e-01, 9.1328e-01, - 3.3929e-01, 5.4203e-01, 5.6960e-01, 2.7232e-01, - 1.3736e-01, 5.3665e-01, 4.1730e-01, 5.5670e-01, - 3.2961e-01, 7.4186e-01, 7.7502e-01, 1.9667e-01, - 1.7768e-01, 2.1674e-01, 5.9639e-02, 1.5805e-01, - 6.5950e-01, 7.7801e-01, 1.1876e-01, 2.8471e-02, - 8.1482e-02, 6.1569e-01, 7.2271e-01, 4.3063e-01, - 3.3714e-01, 5.9950e-01, 2.4036e-01, 9.4548e-01, - 6.8225e-02, 3.8594e-01, 5.5316e-01, 8.3555e-01, - 7.0504e-01, 6.3175e-02, 9.5028e-01, 5.9192e-01, - 8.3808e-01, 9.3302e-01, 4.6343e-01, 5.1120e-01, - 3.2675e-01, 8.7011e-01, 9.8451e-01, 3.3422e-01, - 9.2713e-01, 2.0238e-01, 9.1217e-01, 5.1309e-01, - 6.2678e-01, 1.1738e-01, 1.2722e-03, 4.9728e-01, - 9.3930e-01, 8.9876e-01, 3.3336e-01, 6.4927e-01, - 5.3282e-01, 1.7894e-01, 4.5240e-01, 6.4025e-01, - 5.8421e-01, 3.9615e-01, 3.3808e-01, 8.3123e-01, - 6.8721e-01, 7.6244e-01, 4.4166e-01, 1.7219e-01, - 5.6471e-01, 9.1698e-01, 3.6272e-01, 4.0208e-01, - 4.7886e-01, 6.1182e-01, 9.0649e-01, 7.3848e-01, - 1.7994e-01, 1.0582e-01, 7.1936e-01, 9.9727e-01, - 5.8436e-01, 8.9529e-01, 1.4226e-03, 5.5845e-01, - 1.2202e-01, 4.2654e-01, 7.4178e-02, 3.6640e-01, - 8.3884e-01, 1.1291e-01, 8.7969e-01, 1.4058e-01, - 8.6455e-02, 8.3188e-01, 1.8866e-01, 9.3291e-01, - 8.1895e-01, 7.6617e-01, 3.9340e-01, 5.2325e-01, - 8.5361e-01, 1.4133e-01, 1.4430e-01, 7.9132e-01]), + col_indices=tensor([8818, 4997, 6295, 3180, 5518, 5172, 746, 5244, 2210, + 743, 966, 3996, 5220, 1403, 3538, 8509, 4502, 3785, + 3874, 817, 2474, 8625, 1826, 7378, 3372, 3487, 3692, + 2823, 4014, 5568, 2853, 458, 6380, 403, 9884, 6452, + 9461, 3174, 4128, 3727, 4746, 3692, 3559, 764, 7725, + 9740, 2443, 7797, 959, 9783, 3882, 305, 8658, 3439, + 5219, 6204, 295, 2674, 5653, 2515, 9433, 6942, 4787, + 2622, 8901, 7171, 1978, 5705, 8547, 5754, 1645, 8716, + 7164, 3964, 7058, 652, 9812, 2558, 4701, 5177, 98, + 4410, 1873, 4795, 9496, 1552, 8229, 5835, 111, 9027, + 4842, 5493, 1576, 7272, 2867, 9784, 7469, 4609, 150, + 9289, 6828, 2031, 511, 5367, 206, 9469, 1196, 6, + 6915, 5850, 9888, 478, 1163, 4552, 2977, 4780, 2098, + 8590, 2673, 6656, 6488, 6316, 4862, 2997, 7612, 1952, + 2112, 7509, 4782, 1126, 7937, 6476, 235, 4623, 3076, + 3989, 6113, 2417, 6645, 7779, 1104, 5344, 4301, 6259, + 1752, 8337, 8322, 7835, 1908, 7672, 4448, 5034, 60, + 7057, 2837, 8532, 8189, 5873, 1420, 6709, 490, 6751, + 8214, 372, 7996, 3737, 3617, 4217, 2087, 3924, 715, + 7031, 927, 4709, 9269, 6784, 9714, 1522, 6663, 5719, + 4381, 6868, 7211, 9670, 8059, 6497, 7248, 8162, 2084, + 7034, 9399, 6368, 805, 1972, 1288, 6192, 6971, 8158, + 839, 2511, 8665, 9067, 2039, 5911, 3156, 7465, 1110, + 1361, 5583, 6290, 6815, 9577, 8273, 5080, 5829, 4439, + 4517, 7692, 6404, 3416, 4236, 9336, 4572, 6801, 7025, + 3277, 3132, 7625, 3681, 2623, 3841, 8373, 5724, 5264, + 4286, 5692, 9344, 6955, 4289, 9500, 6700, 8199, 366, + 3802, 521, 2483, 8095, 5552, 6282, 9877, 4904, 2556, + 5298, 3693, 2380, 4385, 4412, 1834, 9121, 5659, 8347, + 2368, 6268, 3224, 9192, 3527, 71, 5299, 3747, 9127, + 7913, 3584, 6170, 5219, 9969, 8558, 7471, 1869, 6249, + 6314, 8214, 9604, 4989, 9678, 9005, 899, 327, 9154, + 420, 7636, 9194, 2023, 6652, 3649, 2734, 7531, 6034, + 6367, 2297, 2439, 8543, 8665, 2261, 7803, 6026, 5718, + 1460, 3052, 5066, 4649, 5088, 3888, 7634, 3125, 1031, + 357, 4186, 379, 1160, 3285, 8329, 8278, 5375, 6589, + 3332, 2502, 5635, 2777, 4131, 8236, 5364, 4119, 4773, + 7426, 3813, 7568, 4300, 251, 7007, 6592, 520, 5616, + 184, 5912, 159, 2430, 7561, 1581, 8783, 1948, 4824, + 8861, 1027, 7384, 7064, 3762, 9883, 5055, 9889, 2136, + 6878, 134, 439, 8931, 7319, 4846, 7379, 9994, 2529, + 7477, 8567, 4006, 6282, 7117, 311, 9326, 7719, 5997, + 9451, 2406, 6518, 3299, 9691, 2738, 7672, 4868, 7911, + 5072, 1199, 7673, 5307, 9429, 3333, 4281, 9947, 8085, + 8514, 3607, 8741, 6796, 1858, 1016, 1779, 480, 6356, + 7534, 7821, 5353, 6971, 7947, 3649, 3547, 6610, 7945, + 8983, 2902, 2404, 3640, 3470, 7115, 4331, 3001, 7217, + 6067, 6675, 9033, 113, 8511, 2978, 3828, 5547, 6128, + 5943, 9133, 7396, 340, 5013, 3866, 5429, 3843, 4232, + 4224, 5836, 9527, 5393, 5619, 3765, 7060, 7312, 8751, + 9882, 3644, 2436, 1894, 9420, 4432, 3471, 689, 2661, + 8498, 9018, 5541, 8913, 8885, 2798, 7235, 4793, 2889, + 22, 4074, 7934, 4211, 9089, 1688, 2377, 3141, 1578, + 736, 6921, 8872, 2373, 2510, 5342, 5747, 7007, 5681, + 4089, 3866, 3383, 4848, 2721, 2600, 8073, 2987, 4596, + 8224, 7575, 1778, 7724, 7661, 9433, 1516, 5355, 8800, + 9206, 6526, 8742, 5734, 1218, 9890, 6353, 6953, 9936, + 5027, 6686, 7991, 7405, 4870, 3524, 2831, 4892, 3114, + 8610, 5079, 7688, 3999, 6035, 4304, 578, 5348, 8238, + 6971, 6365, 7448, 5465, 1168, 9574, 125, 360, 8670, + 1031, 3631, 9612, 6050, 4553, 5025, 7956, 621, 7664, + 5616, 5980, 3672, 5851, 1472, 5912, 974, 7527, 6936, + 3930, 214, 2108, 966, 7534, 3091, 9626, 1342, 2734, + 6495, 7050, 5771, 9438, 4075, 4885, 2053, 5699, 1233, + 1947, 9922, 2071, 7358, 4113, 4493, 1101, 5450, 1849, + 5005, 4209, 7972, 9327, 8652, 1919, 9648, 2341, 6003, + 5957, 8870, 4062, 7506, 7950, 234, 5860, 9966, 6514, + 573, 417, 3838, 134, 311, 3466, 651, 6202, 7365, + 7013, 2031, 8636, 3247, 4845, 4825, 8910, 614, 665, + 3996, 6221, 9001, 1244, 1489, 3803, 788, 9532, 7970, + 902, 9287, 5113, 9135, 8459, 3179, 9001, 7566, 887, + 7657, 5624, 5814, 4435, 543, 3492, 150, 4412, 7874, + 9601, 4359, 2434, 2790, 8199, 7856, 9975, 7047, 450, + 7905, 9286, 7834, 1567, 5836, 541, 7574, 6346, 2293, + 1724, 4573, 8919, 2574, 7423, 9764, 900, 9870, 4793, + 6813, 5501, 6220, 5209, 4493, 9942, 3899, 1850, 9402, + 4899, 967, 1495, 5010, 551, 8509, 1348, 6653, 5443, + 38, 5351, 7236, 6333, 2748, 6974, 5517, 5271, 2671, + 2099, 2279, 5546, 7405, 6267, 4157, 6136, 7672, 7732, + 2068, 5252, 730, 2034, 517, 6990, 3368, 1064, 8937, + 3732, 9471, 232, 46, 2596, 29, 6547, 1283, 4106, + 3755, 441, 8420, 2767, 4347, 8108, 6894, 3712, 9224, + 5559, 3154, 1897, 9474, 5775, 9076, 9330, 6478, 4842, + 5819, 4627, 3308, 8268, 8302, 7939, 3972, 2816, 1671, + 654, 3190, 1461, 8667, 2709, 4129, 6596, 5313, 8247, + 8111, 6549, 9761, 4895, 7369, 7306, 5691, 6366, 6366, + 1632, 1006, 1565, 8757, 8307, 7957, 6655, 8497, 6101, + 789, 5192, 8243, 4023, 8662, 8468, 184, 4890, 8196, + 4665, 195, 8889, 4145, 6127, 6961, 1552, 9768, 9245, + 9336, 2960, 3225, 8928, 1942, 7053, 4234, 7272, 4267, + 2453, 9231, 6135, 9573, 8580, 5701, 9618, 625, 1243, + 8497, 1674, 290, 8086, 6464, 782, 4299, 375, 1982, + 1301, 2328, 2966, 9273, 2957, 5839, 2534, 9510, 3891, + 6678, 118, 6417, 2293, 7076, 8464, 8118, 1840, 6882, + 1749, 2518, 1180, 5249, 3941, 7393, 4780, 2412, 1116, + 3560, 4248, 9195, 3261, 7262, 8391, 4018, 9884, 3637, + 708, 6472, 8166, 9273, 6239, 3301, 2615, 8039, 9504, + 2277, 7369, 9363, 169, 2266, 7615, 341, 752, 1755, + 3496, 7393, 1717, 3157, 4288, 6859, 4916, 1821, 2886, + 3627, 7457, 2496, 3885, 6234, 4545, 6510, 9711, 3708, + 9103, 6471, 9205, 9987, 390, 8226, 9671, 9512, 4146, + 2072, 4493, 8377, 7389, 6002, 5643, 6690, 6821, 7003, + 639, 833, 3000, 7750, 1465, 2933, 1680, 1789, 3599, + 7744, 1866, 406, 4679, 8328, 3373, 8553, 137, 3824, + 647, 9373, 6325, 3601, 826, 7215, 3418, 1045, 2194, + 8870, 2266, 5573, 2823, 855, 4619, 4378, 9555, 8804, + 1524, 5955, 362, 470, 9979, 9421, 5896, 6593, 8227, + 1504, 9829, 7218, 9534, 7521, 4807, 5986, 9951, 2099, + 1648, 4145, 9786, 8587, 4042, 9042, 868, 4796, 7815, + 9322, 5225, 6497, 1773, 8345, 1473, 4874, 5455, 8676, + 956, 3326, 6466, 4895, 6950, 9662, 956, 5289, 402, + 9854]), + values=tensor([3.3413e-01, 9.6154e-01, 9.2428e-01, 4.3740e-01, + 5.0672e-01, 6.7183e-01, 1.7642e-01, 6.2591e-01, + 1.8806e-01, 5.1543e-01, 2.7472e-01, 8.7819e-01, + 3.0617e-01, 1.9266e-01, 2.5253e-01, 1.7604e-01, + 9.6939e-01, 9.3065e-01, 6.8263e-01, 3.3567e-01, + 6.7566e-01, 7.2445e-01, 8.4903e-01, 6.8276e-01, + 8.9068e-01, 9.4827e-01, 2.9850e-03, 4.8301e-01, + 7.5776e-01, 2.0438e-01, 6.8328e-01, 9.5097e-01, + 5.5850e-01, 5.8555e-01, 4.8689e-01, 7.4699e-01, + 3.5636e-01, 6.3398e-02, 7.2154e-01, 6.4758e-01, + 7.1128e-01, 5.3896e-01, 4.4886e-01, 8.3528e-01, + 8.0431e-01, 4.5542e-01, 8.4234e-01, 9.1079e-01, + 8.9773e-01, 5.2581e-01, 1.7418e-01, 2.7136e-01, + 7.3759e-01, 5.3422e-01, 7.0519e-01, 9.5438e-02, + 5.0885e-01, 2.5722e-01, 5.8427e-01, 4.4767e-01, + 1.3592e-01, 8.3711e-01, 2.3871e-01, 3.4095e-01, + 5.8466e-01, 9.8314e-01, 7.3868e-01, 7.4283e-01, + 1.9128e-01, 7.5101e-01, 5.7700e-01, 2.9889e-01, + 4.5959e-01, 4.2145e-01, 2.5057e-01, 7.5680e-01, + 4.6999e-01, 8.4990e-01, 6.7075e-02, 5.3521e-01, + 5.4071e-01, 6.9243e-01, 1.3893e-01, 1.6825e-01, + 1.4559e-01, 2.9361e-01, 4.0267e-01, 4.6702e-02, + 7.4439e-01, 9.2611e-01, 5.3385e-01, 5.8174e-01, + 8.3240e-01, 3.8714e-02, 3.6481e-02, 9.8895e-01, + 3.5439e-01, 4.9898e-01, 7.1920e-01, 5.2686e-01, + 3.8192e-01, 3.2391e-01, 4.2722e-01, 2.9358e-01, + 3.8674e-01, 9.0560e-01, 5.9147e-02, 7.5979e-01, + 2.1172e-01, 7.8685e-01, 7.1357e-01, 3.5759e-01, + 5.5162e-01, 4.4878e-01, 1.8497e-01, 6.2065e-01, + 2.6438e-01, 8.4614e-02, 2.3238e-01, 9.6334e-01, + 3.0934e-01, 3.6143e-01, 1.2818e-01, 4.2707e-01, + 5.8897e-01, 6.2555e-01, 1.5839e-01, 1.1931e-01, + 6.0302e-01, 4.7008e-01, 3.4107e-01, 8.7125e-01, + 3.7647e-01, 3.7313e-01, 6.9521e-01, 9.1572e-01, + 9.9820e-01, 5.3099e-01, 3.5865e-01, 6.3793e-01, + 9.8776e-01, 4.9652e-01, 4.7539e-01, 7.7584e-01, + 3.3199e-01, 3.9594e-01, 1.9223e-01, 5.0865e-01, + 7.8381e-01, 6.4350e-01, 3.3288e-01, 1.5435e-02, + 3.2685e-01, 5.0507e-01, 6.4444e-02, 2.3900e-01, + 6.9439e-01, 9.2867e-01, 6.7916e-01, 9.8867e-02, + 6.4876e-01, 9.1622e-01, 2.6086e-01, 5.7986e-01, + 8.7320e-01, 5.1780e-01, 1.4542e-01, 3.4482e-01, + 5.5641e-01, 5.0377e-01, 4.4889e-01, 4.4496e-01, + 9.2119e-01, 3.7454e-01, 4.5180e-02, 5.2673e-01, + 7.3967e-01, 9.0447e-02, 2.8158e-02, 5.3280e-01, + 2.0659e-01, 9.2580e-01, 2.4951e-02, 9.0341e-01, + 9.1063e-01, 6.9181e-01, 9.9889e-01, 7.1868e-01, + 7.3530e-01, 3.2916e-01, 4.5052e-01, 6.6286e-01, + 7.5088e-01, 2.9739e-01, 8.4962e-01, 7.8855e-01, + 6.7181e-02, 2.5548e-01, 5.1131e-01, 8.0204e-01, + 7.8482e-02, 9.2621e-01, 4.2595e-01, 4.0279e-01, + 2.1659e-01, 4.6389e-01, 5.1602e-01, 2.4603e-01, + 8.4689e-02, 1.5638e-01, 4.8617e-01, 3.0228e-01, + 1.2974e-02, 7.0727e-01, 4.6162e-02, 6.9480e-01, + 5.3946e-01, 5.4407e-01, 7.1006e-01, 7.4908e-01, + 9.1735e-02, 6.7248e-01, 8.6916e-02, 6.5322e-01, + 6.3607e-01, 3.9262e-01, 5.9178e-01, 2.6405e-01, + 2.5459e-01, 3.5071e-01, 1.0168e-01, 6.2308e-01, + 2.9867e-01, 5.7428e-01, 5.3193e-01, 4.2365e-01, + 1.2457e-01, 2.4644e-01, 9.1203e-01, 1.6859e-01, + 1.6178e-01, 8.6987e-02, 9.2550e-01, 5.4948e-01, + 7.0272e-01, 4.1025e-01, 6.9905e-01, 9.3853e-01, + 3.8478e-01, 2.7231e-01, 8.7451e-01, 1.6185e-01, + 2.1615e-01, 8.3915e-01, 9.3369e-01, 4.6497e-02, + 1.3231e-01, 8.2041e-01, 1.8464e-01, 6.1141e-01, + 1.7747e-01, 6.2373e-02, 3.1910e-01, 4.0769e-01, + 9.2121e-01, 5.5775e-01, 9.9630e-01, 4.8526e-01, + 8.6126e-01, 8.4215e-01, 6.3503e-01, 2.3170e-01, + 9.8023e-01, 6.3946e-01, 5.6702e-01, 7.7645e-01, + 5.4873e-01, 9.3943e-01, 8.6454e-01, 6.2600e-01, + 1.5287e-01, 3.9319e-02, 8.5149e-01, 5.6868e-02, + 7.1982e-01, 9.1464e-01, 5.2991e-01, 6.2555e-01, + 4.1212e-01, 5.2963e-01, 1.2844e-01, 9.9335e-01, + 9.6476e-01, 3.7956e-01, 9.2300e-01, 4.6692e-01, + 4.0223e-01, 5.0942e-01, 7.1786e-01, 2.0918e-02, + 5.8405e-02, 6.9935e-01, 9.4314e-01, 7.2617e-01, + 7.6302e-01, 9.5543e-01, 5.2211e-01, 5.1296e-01, + 7.7800e-01, 1.2704e-01, 5.8003e-01, 9.0919e-01, + 8.2907e-01, 1.6851e-01, 2.8117e-01, 7.6064e-01, + 3.8642e-01, 5.8016e-02, 1.5644e-01, 6.8292e-01, + 4.0441e-01, 2.8108e-01, 8.4916e-01, 2.2913e-02, + 3.8522e-01, 7.0529e-01, 1.0784e-01, 8.9236e-02, + 5.4299e-02, 4.4178e-01, 4.4945e-01, 3.8154e-02, + 1.8942e-01, 3.8126e-01, 4.0344e-01, 8.3251e-01, + 2.9117e-01, 8.3230e-01, 3.7680e-02, 4.8312e-01, + 2.3530e-01, 3.8031e-02, 4.8939e-01, 8.2329e-01, + 5.6083e-01, 4.9221e-01, 2.8147e-01, 8.2134e-01, + 8.8106e-01, 9.6345e-01, 8.5673e-02, 8.3601e-01, + 9.0416e-01, 3.2669e-01, 6.6247e-01, 3.7821e-01, + 6.2048e-01, 3.8196e-01, 4.1013e-01, 4.6001e-01, + 5.2872e-02, 2.2035e-01, 8.4768e-01, 6.9547e-01, + 5.2226e-01, 8.0056e-01, 6.1057e-01, 8.9496e-01, + 7.9516e-02, 8.6518e-02, 9.6564e-01, 7.5278e-01, + 3.0018e-01, 3.1016e-01, 3.2145e-01, 1.3818e-01, + 1.5719e-02, 9.3330e-01, 1.1458e-01, 7.9328e-01, + 6.1409e-01, 7.9912e-01, 8.1155e-01, 8.1719e-01, + 9.5110e-01, 9.0720e-01, 9.0330e-01, 3.5557e-01, + 8.9821e-01, 3.2569e-01, 8.2973e-01, 5.7229e-01, + 4.6707e-01, 1.6807e-01, 6.1908e-01, 5.3227e-01, + 1.6841e-01, 7.5433e-01, 1.5976e-01, 8.1569e-01, + 9.8174e-01, 8.0700e-01, 8.0758e-01, 2.5587e-01, + 8.0716e-01, 1.4534e-01, 6.4834e-01, 4.8418e-01, + 4.2206e-01, 5.1555e-01, 6.2372e-01, 1.8478e-01, + 6.2172e-01, 7.6542e-01, 2.0146e-01, 3.4389e-01, + 2.6014e-01, 5.4968e-01, 2.2724e-01, 7.3462e-02, + 4.9419e-01, 2.8888e-01, 6.1621e-01, 7.8133e-01, + 1.2898e-01, 3.1743e-01, 9.4942e-01, 5.0322e-01, + 1.1519e-01, 8.9371e-01, 9.9778e-01, 1.1507e-01, + 4.7869e-01, 1.5039e-01, 5.0710e-01, 1.1949e-01, + 7.1555e-01, 8.6586e-01, 9.9749e-01, 9.7706e-01, + 9.7198e-01, 6.4097e-01, 6.5528e-01, 4.5529e-01, + 7.2320e-01, 5.6966e-01, 8.2813e-01, 3.0838e-01, + 5.8695e-01, 5.3551e-01, 6.6446e-01, 6.4615e-01, + 2.8057e-01, 4.2723e-01, 8.8389e-01, 3.2057e-01, + 2.5664e-01, 4.9971e-01, 8.2847e-01, 9.1802e-01, + 3.9503e-03, 1.7917e-01, 5.6674e-01, 5.9594e-02, + 2.9573e-01, 7.5324e-01, 9.8288e-01, 9.6041e-01, + 2.5792e-01, 6.3506e-01, 9.4713e-01, 5.5160e-01, + 8.7668e-01, 5.4248e-01, 8.4704e-01, 3.5258e-01, + 5.4899e-01, 1.7759e-01, 8.5356e-01, 1.8781e-01, + 8.5644e-01, 9.6625e-01, 9.8773e-01, 1.7599e-01, + 9.3049e-03, 1.1167e-01, 5.5962e-01, 4.1542e-01, + 8.6926e-01, 8.5629e-01, 7.4983e-01, 9.6038e-02, + 8.0905e-01, 2.8552e-01, 8.2475e-01, 2.8115e-01, + 6.2389e-02, 9.2871e-01, 1.8106e-01, 8.8332e-01, + 5.6800e-01, 4.5719e-01, 1.6232e-01, 6.9521e-01, + 5.9495e-01, 5.5107e-01, 3.7398e-02, 2.3840e-01, + 7.6451e-01, 1.6915e-01, 4.2375e-01, 8.5118e-01, + 5.8113e-01, 6.7525e-02, 6.7553e-01, 5.1583e-01, + 1.7408e-01, 5.3300e-01, 5.0256e-01, 8.0241e-01, + 5.4870e-01, 1.7516e-02, 8.0299e-01, 8.7076e-01, + 1.9071e-01, 9.0709e-01, 9.9797e-01, 4.1422e-01, + 2.4372e-01, 6.7107e-02, 1.4709e-01, 3.1995e-01, + 7.7356e-01, 7.0764e-01, 7.2801e-01, 8.7525e-02, + 4.9250e-01, 7.7304e-01, 8.0663e-01, 2.7071e-02, + 4.8988e-02, 4.0919e-02, 7.3029e-01, 6.4353e-01, + 7.8818e-01, 9.4565e-01, 7.5804e-01, 7.6888e-01, + 9.3870e-01, 8.2402e-01, 3.9508e-01, 5.9546e-01, + 5.4638e-02, 5.9375e-01, 9.1096e-01, 3.5543e-01, + 5.0710e-02, 1.0874e-01, 7.5845e-01, 6.2126e-01, + 2.1767e-01, 5.2358e-01, 3.5926e-01, 5.4564e-02, + 7.9008e-01, 6.1479e-01, 4.7427e-01, 2.2081e-01, + 4.3878e-01, 9.0154e-01, 2.2267e-01, 2.3302e-02, + 6.2728e-01, 7.2256e-01, 8.4482e-01, 4.3277e-01, + 4.1845e-01, 6.0946e-01, 9.9849e-01, 6.8789e-02, + 3.2314e-01, 1.1939e-01, 2.8754e-02, 8.9296e-01, + 9.7598e-01, 2.7956e-01, 7.5135e-01, 5.2820e-01, + 7.3256e-01, 7.1043e-01, 2.3368e-01, 6.4732e-01, + 5.7605e-01, 9.2871e-02, 5.8319e-02, 5.9050e-01, + 6.6527e-01, 1.5119e-01, 3.9390e-01, 5.3447e-01, + 8.9445e-02, 6.4077e-01, 6.2757e-01, 9.4870e-01, + 5.2767e-02, 5.3053e-01, 9.8813e-01, 6.1004e-01, + 6.2611e-01, 5.3778e-01, 6.3170e-01, 8.3799e-01, + 3.9200e-01, 2.0112e-01, 9.2461e-01, 1.9096e-02, + 2.1625e-01, 2.4502e-01, 8.2892e-01, 9.6210e-01, + 2.7158e-01, 6.5096e-01, 2.0859e-01, 7.9354e-01, + 3.6660e-01, 8.9057e-01, 7.1135e-01, 5.5623e-03, + 7.5296e-01, 6.8111e-01, 7.7528e-02, 5.9745e-02, + 9.5325e-01, 7.4659e-01, 2.7298e-01, 1.2532e-01, + 4.4855e-01, 3.5666e-01, 8.7559e-01, 8.0018e-01, + 4.3854e-02, 4.8713e-01, 7.0706e-01, 1.4292e-01, + 6.6608e-01, 4.3445e-01, 8.3296e-01, 6.4635e-01, + 1.8399e-01, 7.7778e-01, 5.9029e-02, 9.1834e-02, + 9.3319e-01, 2.9954e-01, 3.1840e-01, 1.8707e-01, + 1.6916e-02, 9.6409e-01, 7.4541e-01, 7.9703e-02, + 9.7434e-01, 1.0020e-01, 2.7892e-01, 7.1368e-01, + 4.7674e-01, 6.4478e-01, 5.2522e-01, 8.6860e-01, + 9.4748e-01, 6.5723e-02, 8.7105e-01, 7.5428e-01, + 3.6689e-01, 8.0115e-03, 4.1276e-01, 1.5636e-01, + 9.1725e-01, 6.9545e-01, 1.3289e-02, 8.9995e-01, + 2.0639e-01, 1.9138e-01, 9.4419e-01, 2.5837e-01, + 8.7362e-01, 3.4040e-01, 7.2384e-01, 3.3064e-01, + 8.9279e-01, 4.2820e-01, 5.7483e-01, 5.3771e-01, + 2.4929e-01, 3.1792e-01, 7.1197e-01, 5.0460e-01, + 2.6674e-01, 9.4672e-02, 3.4302e-01, 8.1671e-01, + 4.8547e-01, 2.8213e-01, 1.6782e-01, 9.6716e-01, + 3.0221e-01, 1.3908e-01, 6.7492e-01, 2.2244e-01, + 2.8707e-02, 5.1882e-01, 4.1038e-01, 9.9815e-01, + 9.6759e-01, 6.7923e-01, 3.4796e-01, 5.0600e-01, + 5.5888e-04, 2.9575e-01, 5.7722e-01, 1.9569e-01, + 3.7543e-01, 7.8717e-01, 2.5436e-01, 9.0124e-01, + 4.0404e-01, 2.7663e-01, 7.6533e-01, 3.5933e-01, + 7.5821e-01, 1.6975e-01, 9.4532e-01, 2.3711e-01, + 8.0138e-01, 3.2955e-01, 5.6015e-01, 9.2748e-01, + 8.4579e-01, 5.1898e-01, 4.0725e-01, 5.7184e-01, + 6.5866e-01, 3.2435e-01, 1.5454e-01, 2.7207e-02, + 1.2155e-01, 8.1870e-01, 7.1106e-01, 4.8694e-01, + 9.8429e-01, 8.0534e-01, 9.9535e-01, 7.2043e-01, + 1.8504e-01, 1.3436e-01, 1.5534e-02, 1.3554e-01, + 6.6736e-01, 6.5617e-01, 3.4409e-01, 6.1274e-01, + 1.2646e-01, 5.9901e-03, 7.0636e-01, 1.2071e-01, + 3.6276e-01, 5.6897e-01, 1.5409e-01, 3.7871e-01, + 7.3209e-01, 4.9497e-01, 8.0639e-01, 3.4676e-01, + 7.5146e-01, 3.7860e-01, 2.0107e-01, 1.6325e-01, + 3.1191e-02, 3.5857e-02, 4.0769e-01, 8.7428e-01, + 2.4569e-01, 1.4399e-01, 6.9912e-01, 2.7792e-01, + 5.5729e-01, 3.7241e-01, 8.6404e-01, 3.7650e-01, + 9.4704e-01, 9.7985e-01, 8.1096e-01, 9.4533e-01, + 8.1956e-01, 8.7672e-01, 2.7292e-01, 1.9319e-01, + 4.6081e-01, 1.7064e-01, 2.6865e-02, 5.2624e-01, + 2.5369e-02, 5.9603e-01, 6.9433e-02, 3.8668e-01, + 3.3346e-01, 9.8204e-01, 3.6774e-01, 6.5913e-01, + 7.0554e-01, 9.4439e-01, 2.4802e-01, 9.2182e-01, + 6.1579e-01, 3.3392e-01, 1.2702e-02, 8.4271e-01, + 9.1801e-01, 4.5016e-01, 6.1777e-01, 2.0341e-01, + 8.9939e-01, 9.5834e-01, 2.5519e-01, 5.2982e-01, + 1.1985e-01, 6.6381e-03, 9.7890e-01, 1.7337e-01, + 8.9119e-01, 1.5043e-01, 4.6040e-01, 6.9538e-01, + 9.2558e-01, 5.7281e-01, 6.3030e-01, 3.8118e-01, + 2.3229e-01, 3.4195e-01, 7.7987e-01, 8.1058e-01, + 2.0181e-01, 7.2688e-01, 9.4281e-01, 2.3447e-01, + 9.7234e-01, 8.0731e-01, 6.3180e-01, 7.0109e-01, + 3.5266e-01, 8.5632e-01, 7.4654e-01, 4.3216e-01, + 8.7903e-01, 6.9429e-01, 8.1585e-01, 8.9722e-01, + 6.7023e-01, 6.4708e-01, 6.5524e-01, 9.6414e-01, + 2.0021e-01, 7.2708e-01, 3.7007e-01, 7.7144e-01, + 9.4777e-01, 9.2591e-01, 4.2859e-01, 2.3407e-01, + 8.4389e-01, 9.4705e-01, 7.1472e-01, 3.5134e-01, + 4.9152e-01, 3.1197e-01, 8.8038e-01, 3.8248e-01, + 5.9123e-01, 4.3307e-01, 4.4155e-02, 5.2431e-01, + 8.9782e-01, 2.7591e-01, 9.8361e-02, 9.4181e-01, + 7.8504e-01, 4.7174e-01, 6.9580e-01, 6.5364e-01, + 5.2368e-02, 9.6495e-01, 2.4019e-01, 1.6985e-01, + 5.8895e-01, 4.9707e-01, 6.6860e-01, 1.5414e-01, + 7.6333e-01, 1.9743e-01, 5.6171e-01, 2.0281e-01, + 6.4197e-01, 5.8021e-02, 4.9562e-02, 9.5483e-01, + 4.3674e-01, 5.2614e-01, 2.4113e-01, 3.4303e-01, + 3.9772e-01, 3.9798e-01, 1.3877e-02, 7.9278e-01, + 6.5379e-01, 4.2851e-01, 3.9171e-01, 8.2409e-01, + 2.3082e-01, 5.4049e-01, 3.2908e-01, 5.0166e-01, + 9.3871e-01, 9.6652e-01, 1.5463e-01, 3.0043e-01, + 9.3627e-01, 2.4720e-01, 3.6368e-01, 2.6467e-01, + 8.8606e-02, 4.2096e-01, 6.4731e-01, 2.2082e-01, + 9.3578e-02, 9.4451e-01, 8.8364e-01, 1.9562e-02, + 4.3969e-01, 7.3790e-01, 1.6640e-01, 4.2768e-01, + 9.8714e-01, 7.6766e-01, 1.2558e-01, 7.3721e-01, + 6.0219e-01, 3.5863e-01, 4.8847e-01, 4.5578e-01, + 1.6342e-01, 1.1963e-01, 4.7580e-01, 6.3141e-01, + 6.9461e-01, 5.9385e-01, 4.9160e-01, 3.8014e-01, + 5.8303e-01, 6.1575e-01, 8.0137e-01, 6.6537e-01, + 2.6899e-01, 6.0576e-01, 1.7340e-01, 1.3601e-01, + 6.8659e-02, 9.3027e-01, 9.1185e-01, 5.6535e-01, + 7.2279e-01, 1.0745e-01, 3.4131e-01, 6.6057e-01, + 6.1837e-02, 2.9305e-01, 2.6054e-01, 9.2548e-01, + 9.7730e-02, 3.3059e-01, 5.6727e-01, 5.3952e-01, + 5.6284e-01, 6.6863e-01, 1.4912e-01, 3.1011e-01, + 3.8308e-01, 7.6274e-01, 5.0556e-01, 9.7555e-02, + 1.2835e-02, 2.3082e-01, 9.3417e-01, 2.9390e-01, + 8.9799e-01, 9.0230e-01, 4.8453e-01, 2.6455e-02, + 7.3056e-01, 6.0896e-01, 8.5559e-01, 8.0240e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.0633, 0.2712, 0.1613, ..., 0.7795, 0.8074, 0.9414]) +tensor([0.7941, 0.9355, 0.0308, ..., 0.8188, 0.6700, 0.4642]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -1133,375 +375,378 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 10.386851072311401 seconds +Time: 0.06352877616882324 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '165279', '-ss', '10000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 7.6851487159729} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 1000, 1000, 1000]), - col_indices=tensor([5090, 5520, 6854, 5240, 6540, 5414, 9539, 8466, 479, - 3119, 5272, 7854, 2035, 3254, 301, 9387, 5412, 9403, - 3029, 6749, 4880, 1485, 7157, 5629, 6151, 4478, 7699, - 7698, 6954, 7468, 2219, 5639, 6353, 506, 8308, 5821, - 2487, 7627, 3842, 7369, 6744, 2867, 7111, 7921, 1986, - 380, 9961, 4202, 9024, 661, 5897, 7449, 9845, 9461, - 9917, 234, 7756, 4104, 195, 2757, 4588, 1755, 600, - 3208, 9769, 495, 8241, 6634, 6218, 247, 649, 2255, - 6934, 5056, 3570, 5404, 4033, 4528, 6168, 3330, 5154, - 6668, 8969, 4990, 5914, 7294, 7798, 8937, 1984, 811, - 8267, 6000, 8441, 2901, 6504, 2951, 6191, 5592, 9657, - 5206, 4311, 4344, 6838, 4035, 8212, 9827, 8714, 8242, - 2597, 1268, 6941, 152, 4041, 7546, 5546, 9553, 8677, - 3838, 1475, 6605, 2849, 8979, 1585, 9524, 5732, 668, - 8770, 2014, 4555, 5432, 5784, 211, 829, 5623, 601, - 2334, 1210, 8619, 2536, 8949, 1829, 5741, 5371, 2300, - 3074, 3033, 4257, 6930, 5374, 2094, 8363, 8250, 6651, - 7561, 1388, 1839, 6107, 6362, 2380, 7307, 2838, 7221, - 3519, 6205, 3890, 3976, 4925, 3385, 1072, 9414, 2059, - 8855, 6070, 1702, 9893, 388, 4416, 3865, 1170, 3494, - 5538, 3081, 189, 2709, 2842, 6301, 7706, 8161, 9291, - 2769, 7658, 3415, 1110, 8398, 478, 2076, 5495, 5446, - 8782, 6300, 9602, 7484, 2093, 9250, 2777, 8249, 4928, - 2881, 9270, 1050, 2161, 2931, 2310, 1049, 8115, 3593, - 1809, 941, 3744, 1194, 3342, 9878, 6173, 5475, 9309, - 4801, 9295, 4078, 622, 4058, 6546, 2174, 1250, 3334, - 5694, 8789, 7302, 5052, 3464, 3176, 6460, 6096, 3332, - 213, 105, 7605, 1853, 506, 1606, 6777, 7316, 5398, - 8931, 5969, 4321, 5680, 7873, 95, 6160, 4933, 6865, - 5591, 7447, 4679, 921, 3881, 2291, 4538, 5253, 6704, - 1367, 7057, 4866, 205, 2657, 1962, 5628, 663, 5095, - 1120, 7753, 3664, 5253, 1416, 3161, 6425, 5128, 3164, - 6290, 5671, 1125, 3995, 1998, 5960, 1764, 146, 2947, - 5808, 9352, 6461, 4335, 459, 5492, 7868, 7568, 1136, - 6162, 4143, 8794, 7939, 2386, 5360, 1123, 3979, 4759, - 6589, 8868, 7581, 8042, 6445, 8358, 2368, 5279, 5299, - 9066, 2927, 6449, 5126, 9410, 2256, 9577, 6938, 3964, - 1164, 9207, 482, 8277, 7320, 5132, 4346, 7324, 8992, - 5310, 2655, 3277, 5668, 2397, 2897, 550, 727, 2417, - 433, 4420, 7753, 7792, 4331, 9413, 1735, 9320, 8721, - 6954, 4204, 4649, 5618, 2016, 8183, 5689, 9063, 3738, - 2959, 3961, 4009, 3606, 4760, 1230, 2217, 5835, 899, - 5586, 9110, 8053, 9627, 2842, 2192, 3166, 4583, 8158, - 4940, 1032, 4696, 2026, 6388, 2745, 2712, 7092, 1068, - 6904, 4938, 9153, 2849, 5497, 9818, 7411, 8049, 8663, - 7400, 8022, 8738, 7227, 1194, 3739, 8622, 8207, 2749, - 9175, 8238, 9065, 9556, 3203, 9575, 8134, 2289, 9054, - 2794, 6202, 2265, 6391, 98, 2630, 7976, 8036, 3067, - 1792, 5338, 363, 1223, 6083, 1229, 6539, 7336, 9634, - 7650, 4192, 95, 4556, 6967, 3917, 3894, 9298, 4210, - 1864, 9331, 8210, 6617, 3743, 6287, 2538, 238, 3971, - 4196, 5629, 9006, 3099, 383, 6739, 7853, 1507, 4211, - 5982, 5409, 9920, 7890, 591, 2525, 3725, 6493, 3034, - 3234, 6824, 9969, 7746, 6555, 420, 331, 3665, 6915, - 9255, 6959, 1855, 8868, 53, 4402, 8159, 4409, 437, - 335, 9024, 8990, 8821, 3866, 9075, 5859, 1760, 4060, - 6735, 9065, 4222, 4171, 2823, 8557, 3508, 8301, 8364, - 1888, 6711, 7704, 7616, 700, 6570, 9066, 4571, 8351, - 9210, 7549, 1287, 2369, 7405, 5372, 3041, 8555, 2099, - 8354, 1590, 5248, 7669, 3628, 5312, 8811, 9656, 91, - 6749, 2151, 1335, 1000, 3671, 9405, 6954, 7743, 8173, - 1053, 2058, 4192, 5319, 304, 2356, 1950, 9207, 92, - 6497, 2983, 9159, 7556, 3250, 1554, 5610, 7836, 3795, - 8012, 9512, 688, 5982, 7033, 541, 5439, 6622, 5227, - 1460, 8716, 7845, 2503, 4777, 2879, 4671, 1973, 3787, - 7162, 397, 4461, 6623, 4298, 2561, 3316, 7915, 5983, - 1463, 8647, 7375, 9336, 4967, 8147, 7114, 4177, 6572, - 589, 3945, 1105, 9035, 144, 8793, 5666, 4624, 5196, - 7941, 6950, 4479, 4119, 2123, 3611, 697, 3474, 3332, - 321, 4913, 8438, 6810, 8281, 7730, 135, 2273, 9671, - 9509, 3960, 9361, 3386, 8984, 5288, 6392, 4006, 7514, - 9570, 2995, 2185, 1986, 5814, 2642, 5245, 7837, 8338, - 9556, 4510, 7836, 9711, 5626, 4884, 1758, 7786, 8389, - 1547, 8704, 9162, 1234, 5544, 8023, 7139, 2852, 716, - 3911, 326, 7817, 8217, 3818, 4420, 8158, 4153, 4601, - 7754, 6866, 7422, 4481, 6488, 984, 8828, 1955, 379, - 1624, 1303, 6290, 8219, 6816, 2475, 9043, 4031, 8584, - 1795, 4011, 965, 6538, 341, 7259, 7483, 3110, 6233, - 5102, 1097, 4896, 3220, 5529, 7988, 1701, 3588, 10, - 2698, 9881, 1569, 6687, 453, 2430, 6957, 711, 9279, - 1596, 9071, 3667, 407, 7168, 5764, 9571, 5484, 6683, - 4948, 5349, 937, 8097, 6338, 3961, 9400, 3604, 7579, - 7706, 3756, 8824, 1022, 3037, 3053, 8543, 5584, 7875, - 2400, 4692, 543, 8436, 3410, 3578, 186, 2825, 4894, - 4326, 5710, 1051, 4806, 1754, 5220, 2270, 1501, 4413, - 674, 5529, 5007, 6644, 1894, 8344, 1904, 5600, 8997, - 7614, 6042, 5049, 7997, 4898, 5487, 4108, 3312, 9233, - 8389, 608, 4442, 5243, 5032, 8614, 5739, 5802, 4120, - 8899, 99, 2193, 8861, 5225, 9792, 1275, 9868, 5618, - 2605, 6218, 8444, 2643, 1279, 9216, 6571, 5281, 8680, - 2287, 8748, 8280, 4767, 9473, 1334, 6986, 3525, 3169, - 3974, 363, 6562, 574, 5621, 1586, 9124, 9590, 3765, - 1685, 734, 386, 1332, 1723, 1290, 9672, 3553, 9301, - 1364, 307, 2456, 1290, 4339, 5612, 9566, 1808, 3981, - 2608, 1116, 9731, 3516, 6888, 8925, 556, 5146, 1699, - 9152, 2113, 5887, 9273, 781, 5425, 3828, 8470, 84, - 4795, 3201, 5889, 8067, 9356, 9322, 9600, 2467, 238, - 3969, 4521, 2069, 7924, 1028, 6107, 7144, 727, 9093, - 1824, 3094, 3039, 5739, 8995, 1898, 8954, 2989, 1413, - 3277, 5865, 6456, 1730, 4656, 2233, 195, 5096, 2376, - 7691, 1029, 8697, 3625, 5246, 3695, 431, 2145, 9293, - 6188, 3268, 7555, 1926, 4810, 5639, 5945, 3037, 1338, - 1725, 4126, 2764, 3990, 1925, 4031, 1867, 4112, 2508, - 4960, 1189, 1309, 433, 7697, 1040, 4522, 2213, 5963, - 5735, 5373, 1987, 8913, 9575, 2432, 6960, 9607, 6164, - 404, 2986, 2433, 3250, 7219, 1524, 6099, 456, 8056, - 975, 4344, 3991, 3094, 9425, 8383, 1533, 6432, 3860, - 1956, 7982, 5506, 5690, 5861, 7053, 2115, 4411, 4091, - 6524, 4624, 2372, 7084, 4769, 3223, 9544, 2559, 9666, - 5145, 4409, 2858, 6595, 9077, 3107, 5138, 7219, 5582, - 386, 8287, 8166, 6451, 1770, 6684, 1635, 9826, 113, - 951]), - values=tensor([8.6107e-01, 6.9271e-01, 4.8368e-01, 9.8101e-01, - 9.6646e-01, 7.4405e-01, 9.3604e-01, 4.7222e-01, - 3.6113e-01, 7.3932e-02, 7.2732e-01, 5.3118e-02, - 7.3120e-01, 9.0814e-01, 8.9645e-01, 6.7127e-01, - 7.8345e-01, 4.4017e-02, 8.0039e-01, 4.4358e-01, - 2.7672e-01, 1.8812e-01, 1.5166e-01, 3.1698e-01, - 4.1803e-01, 2.4922e-01, 3.5443e-01, 5.8008e-01, - 5.2338e-01, 1.6648e-01, 7.3597e-01, 8.1915e-01, - 1.9507e-01, 5.2186e-01, 6.8481e-01, 3.0218e-01, - 3.0589e-01, 6.4079e-01, 5.7065e-01, 5.6508e-02, - 2.2168e-01, 9.3404e-01, 5.9158e-01, 2.2703e-01, - 6.3212e-01, 1.4065e-01, 3.4281e-01, 9.7137e-01, - 4.7761e-02, 5.1358e-01, 7.9179e-01, 2.7935e-01, - 1.5324e-01, 5.4348e-01, 2.7165e-01, 9.6685e-01, - 1.0560e-01, 3.3051e-01, 5.8331e-02, 1.6726e-01, - 6.8070e-01, 9.7579e-01, 1.7781e-01, 1.5625e-01, - 5.7095e-01, 2.6864e-01, 2.1547e-01, 5.5164e-01, - 8.3829e-01, 5.1841e-01, 7.4553e-01, 5.9540e-01, - 5.4239e-01, 3.0666e-01, 8.0495e-01, 4.9085e-01, - 5.0594e-01, 3.5705e-01, 8.9499e-01, 1.7606e-01, - 4.2988e-01, 7.8070e-01, 7.1563e-01, 8.7994e-01, - 8.0990e-01, 6.9514e-02, 9.3172e-01, 6.6499e-01, - 8.3186e-01, 3.9669e-01, 6.3901e-01, 3.7501e-01, - 2.7733e-01, 7.5182e-01, 9.9888e-01, 6.0397e-01, - 9.4930e-01, 1.5247e-02, 7.5542e-01, 8.3683e-01, - 8.1190e-01, 5.5766e-01, 3.1907e-03, 2.4997e-02, - 5.9621e-01, 6.8092e-01, 7.4604e-01, 3.6392e-01, - 7.3749e-01, 7.5782e-01, 9.0594e-01, 3.8310e-02, - 7.0973e-01, 2.0418e-03, 1.5030e-01, 6.4446e-01, - 2.9902e-01, 3.9235e-01, 4.6097e-01, 9.6647e-01, - 7.6297e-01, 6.5696e-01, 9.6598e-01, 2.9419e-01, - 7.4848e-01, 5.4178e-01, 8.6408e-01, 3.0556e-01, - 9.5660e-02, 9.8731e-01, 2.3206e-01, 3.0445e-01, - 2.6357e-01, 8.1990e-01, 4.5005e-01, 4.1712e-01, - 4.7212e-01, 9.0916e-01, 6.7266e-01, 8.1816e-01, - 5.4044e-01, 3.8804e-01, 3.8725e-01, 9.6701e-01, - 9.6611e-01, 5.3198e-01, 4.3312e-01, 3.1391e-01, - 7.3887e-01, 4.4383e-01, 1.0121e-01, 2.9852e-01, - 9.4549e-01, 2.7228e-02, 5.7431e-01, 2.2691e-01, - 6.5419e-01, 3.0407e-01, 1.7702e-01, 8.9772e-01, - 5.5873e-01, 2.6340e-01, 7.9858e-01, 8.4542e-01, - 5.6910e-01, 9.8542e-03, 9.3160e-01, 7.7917e-01, - 4.4325e-01, 4.6030e-01, 3.1062e-01, 5.2371e-01, - 3.6051e-01, 6.6182e-01, 7.9523e-01, 7.4930e-01, - 8.5091e-01, 1.0500e-01, 9.1884e-01, 6.4083e-01, - 6.1612e-01, 6.9416e-01, 3.3945e-01, 6.8962e-01, - 5.5544e-01, 7.6810e-01, 2.7725e-02, 2.1387e-01, - 6.7922e-01, 6.5621e-01, 5.8679e-01, 3.5568e-01, - 3.4572e-01, 6.7690e-01, 2.0641e-02, 5.9726e-01, - 9.7926e-01, 7.6343e-01, 6.6564e-01, 7.0179e-01, - 7.5190e-01, 1.6662e-01, 6.3861e-01, 8.6067e-01, - 4.9108e-01, 1.6626e-01, 2.8908e-01, 8.6825e-01, - 2.2293e-01, 6.4829e-01, 9.6432e-01, 3.1724e-01, - 6.7990e-01, 3.1823e-01, 6.4497e-01, 2.9139e-01, - 7.7949e-01, 8.9843e-01, 7.5448e-01, 8.9106e-01, - 2.3395e-02, 6.9438e-02, 5.5248e-01, 6.4159e-01, - 7.5739e-01, 5.0563e-01, 2.0469e-01, 5.6201e-01, - 1.6082e-01, 2.6184e-01, 9.9103e-01, 3.2170e-01, - 6.8832e-01, 8.7008e-01, 2.2665e-01, 7.3082e-01, - 6.2194e-01, 9.0876e-02, 3.0153e-01, 4.0718e-01, - 6.1137e-01, 6.7016e-01, 4.5055e-01, 2.4307e-01, - 8.2010e-01, 9.4870e-01, 5.4014e-01, 1.8658e-01, - 2.9253e-01, 5.5667e-01, 5.6304e-01, 5.0195e-01, - 7.6675e-01, 3.8278e-01, 3.9606e-01, 2.2406e-01, - 5.1845e-01, 3.4771e-01, 1.0772e-01, 8.8694e-01, - 4.5765e-01, 8.5899e-01, 5.2209e-01, 2.6814e-01, - 5.9179e-01, 4.5857e-01, 5.5010e-01, 1.5982e-01, - 3.6937e-01, 7.2005e-01, 9.3900e-01, 9.7016e-01, - 8.1019e-01, 4.1027e-01, 3.1753e-01, 5.0028e-01, - 8.1838e-01, 5.3940e-03, 3.0170e-01, 1.2265e-01, - 5.4341e-01, 7.3566e-01, 8.7193e-01, 3.7031e-03, - 8.0014e-01, 9.7195e-01, 8.1259e-01, 5.2355e-01, - 7.6219e-01, 5.2540e-01, 1.8142e-01, 3.3116e-01, - 6.0314e-01, 4.4442e-01, 1.1620e-01, 7.2375e-01, - 9.5370e-03, 5.4697e-01, 8.8240e-01, 6.6165e-01, - 7.6202e-02, 8.3667e-01, 2.1379e-01, 4.3083e-02, - 6.9313e-01, 4.5761e-01, 7.4008e-01, 1.8568e-01, - 6.9331e-01, 9.7012e-01, 6.0634e-01, 9.1290e-01, - 9.6677e-01, 6.0285e-01, 7.8798e-01, 4.4562e-01, - 9.8873e-01, 7.8753e-01, 8.4151e-01, 8.1693e-01, - 9.9477e-01, 3.4089e-01, 9.8351e-01, 1.7497e-01, - 1.9361e-01, 2.2489e-01, 5.4954e-02, 8.8049e-04, - 5.9184e-03, 2.1956e-01, 5.7859e-01, 5.6967e-02, - 8.3599e-01, 3.4445e-01, 8.6109e-01, 7.3992e-01, - 7.5953e-01, 5.6059e-01, 6.7471e-01, 6.6328e-02, - 1.5833e-01, 8.0087e-02, 3.7828e-01, 2.3249e-01, - 6.4356e-01, 8.9505e-01, 4.6458e-01, 2.4000e-01, - 2.6068e-01, 2.4811e-01, 2.0334e-01, 6.3954e-02, - 8.7523e-01, 3.8213e-01, 2.3114e-01, 9.2747e-01, - 5.2709e-01, 7.3354e-01, 6.9112e-02, 1.0962e-01, - 8.7768e-01, 3.2023e-01, 6.6020e-01, 8.1433e-01, - 6.3947e-02, 2.0035e-01, 6.9677e-01, 2.2128e-01, - 2.9268e-03, 8.2932e-01, 5.1282e-01, 3.5723e-01, - 8.8333e-01, 5.5722e-01, 5.3919e-01, 1.9816e-01, - 1.7851e-01, 7.3178e-01, 8.1068e-01, 1.8684e-01, - 7.6447e-01, 7.5820e-01, 4.8014e-01, 6.4445e-01, - 1.4520e-01, 3.2844e-01, 9.5413e-01, 3.8770e-01, - 6.9308e-01, 1.3905e-01, 7.6471e-01, 8.8390e-02, - 9.1643e-01, 3.2628e-01, 1.9768e-01, 3.9635e-01, - 7.9628e-02, 7.4905e-01, 7.8489e-01, 8.1331e-01, - 7.9842e-01, 2.5383e-01, 1.3706e-01, 9.4530e-01, - 9.0168e-01, 8.2331e-01, 9.3669e-01, 8.7520e-01, - 7.8717e-01, 7.1291e-02, 6.0640e-01, 7.5422e-01, - 8.9628e-01, 3.7460e-01, 1.4416e-01, 9.2438e-01, - 2.7479e-01, 3.2385e-01, 2.7961e-01, 7.5659e-02, - 5.7388e-01, 3.8352e-01, 2.7746e-01, 2.8999e-01, - 5.0480e-01, 9.5417e-01, 4.2093e-01, 7.8579e-02, - 8.2103e-02, 5.4792e-01, 9.2395e-01, 4.2211e-01, - 3.9568e-01, 7.4897e-01, 6.2380e-01, 4.3780e-01, - 5.1349e-01, 7.4234e-01, 5.1775e-01, 9.9153e-01, - 1.4757e-01, 9.3558e-01, 6.3949e-02, 9.7963e-01, - 5.1970e-01, 8.2542e-01, 3.3289e-01, 3.3816e-01, - 2.3966e-01, 9.0664e-01, 7.4839e-01, 2.4259e-01, - 3.2730e-01, 5.5331e-01, 6.0686e-01, 1.2405e-01, - 7.5585e-01, 5.2917e-02, 2.4485e-01, 4.4680e-01, - 8.6753e-01, 1.7906e-02, 9.9252e-01, 1.8042e-01, - 9.9853e-02, 1.3381e-01, 2.0378e-01, 3.5700e-01, - 3.9791e-01, 2.3120e-01, 4.6213e-01, 6.2256e-01, - 3.6533e-01, 9.9886e-01, 1.9408e-02, 8.2334e-01, - 1.5185e-01, 5.4053e-01, 7.5198e-01, 6.8683e-01, - 5.8939e-01, 6.5246e-01, 5.6503e-02, 1.3252e-01, - 2.3554e-01, 3.6623e-01, 1.8050e-01, 8.3236e-01, - 4.6166e-01, 1.2595e-02, 8.1989e-01, 5.7657e-01, - 6.5694e-01, 9.1911e-01, 7.3100e-01, 8.1813e-02, - 4.5850e-01, 4.8408e-01, 9.3086e-02, 1.2574e-01, - 2.8876e-02, 9.5164e-01, 1.4650e-01, 4.1090e-01, - 9.2514e-01, 6.1633e-01, 1.0809e-01, 5.0875e-01, - 1.3530e-02, 3.6496e-01, 6.6914e-01, 8.9151e-01, - 8.2377e-01, 5.2662e-01, 3.2652e-02, 2.7964e-01, - 4.7944e-01, 1.5140e-01, 8.4471e-01, 2.6876e-01, - 3.2371e-01, 6.9635e-01, 8.6435e-01, 4.4276e-01, - 8.9744e-01, 6.4418e-02, 7.8116e-01, 6.8714e-01, - 1.3143e-01, 1.5614e-01, 1.8519e-01, 2.6732e-01, - 6.0465e-01, 3.3625e-01, 4.9463e-01, 8.6001e-01, - 9.9209e-01, 5.3462e-01, 7.9512e-01, 6.5280e-02, - 6.7335e-01, 7.7646e-02, 4.3713e-01, 5.3228e-02, - 7.0476e-02, 3.6280e-01, 1.3298e-01, 4.7192e-01, - 4.1713e-01, 9.6744e-01, 7.2283e-01, 8.1171e-01, - 5.8368e-01, 2.1220e-01, 2.6946e-01, 6.8910e-01, - 3.9849e-01, 2.7316e-01, 4.4482e-01, 8.1241e-02, - 1.9653e-01, 9.6384e-01, 1.1372e-01, 1.9097e-01, - 7.6998e-01, 9.9375e-01, 1.4568e-02, 9.0302e-01, - 4.8512e-01, 9.9514e-01, 9.1484e-01, 9.2485e-02, - 9.0416e-01, 4.7279e-01, 4.9457e-01, 5.4557e-01, - 4.6614e-01, 7.4250e-01, 1.4241e-02, 8.0018e-02, - 7.8231e-01, 4.7850e-02, 3.7642e-01, 5.9940e-01, - 5.9926e-01, 2.0479e-01, 8.6681e-01, 4.4220e-02, - 1.0096e-01, 2.3569e-02, 1.8524e-01, 5.1257e-01, - 5.2509e-01, 8.0403e-01, 1.2724e-01, 7.1898e-02, - 8.3280e-01, 7.8982e-01, 9.2134e-01, 1.2335e-02, - 6.7136e-02, 8.5018e-01, 4.7597e-01, 7.4717e-01, - 6.2179e-02, 4.4811e-01, 8.3920e-01, 6.0345e-01, - 8.1684e-01, 6.9675e-01, 9.9445e-01, 2.7290e-01, - 1.2717e-01, 4.1549e-02, 9.2287e-01, 4.5480e-01, - 5.5821e-01, 9.2880e-01, 5.5301e-01, 5.4505e-01, - 3.7060e-01, 2.4044e-01, 7.1787e-01, 6.8616e-01, - 4.4501e-01, 6.3975e-02, 1.2135e-01, 3.1465e-01, - 7.8125e-01, 6.9943e-01, 9.7250e-01, 6.1706e-01, - 2.0429e-01, 4.2341e-02, 8.0929e-01, 9.7416e-01, - 6.1808e-01, 7.7504e-01, 6.7942e-01, 2.7196e-02, - 7.9603e-02, 3.2581e-01, 6.0908e-02, 1.5004e-02, - 5.8326e-01, 9.4906e-01, 2.3072e-01, 3.5085e-01, - 9.4853e-01, 7.2332e-01, 1.1940e-01, 4.9236e-01, - 7.7478e-01, 4.5736e-01, 6.2936e-01, 3.5934e-01, - 1.0097e-01, 8.1085e-01, 2.4341e-01, 3.2735e-01, - 3.3189e-01, 5.8497e-01, 1.7734e-01, 6.3375e-01, - 1.4097e-01, 1.5644e-01, 8.1609e-01, 7.2549e-01, - 5.9378e-02, 1.5413e-01, 5.3232e-02, 6.1339e-01, - 7.0552e-01, 7.7829e-01, 7.5404e-01, 7.7450e-01, - 8.7883e-01, 5.6085e-01, 4.0932e-01, 8.9438e-01, - 9.1787e-02, 8.4228e-01, 3.0927e-01, 1.6037e-01, - 2.2670e-01, 7.6787e-01, 1.4606e-01, 2.3816e-01, - 3.4066e-01, 2.0990e-01, 5.8881e-01, 8.9502e-01, - 4.3356e-01, 9.0184e-01, 6.5518e-01, 4.6940e-01, - 5.7142e-01, 9.6034e-01, 2.1592e-01, 7.5007e-01, - 3.7195e-01, 3.4826e-01, 1.3116e-01, 8.3464e-01, - 6.8307e-01, 9.5445e-01, 8.1954e-01, 7.2306e-01, - 2.5420e-01, 1.3167e-01, 6.4915e-01, 5.6853e-01, - 9.0585e-02, 1.3848e-01, 8.8329e-01, 8.4826e-01, - 2.1122e-01, 2.4577e-01, 6.3388e-01, 5.1270e-01, - 5.1034e-01, 5.0535e-01, 7.7389e-01, 7.8660e-01, - 8.3881e-01, 4.0781e-01, 7.3903e-01, 4.6446e-03, - 3.7737e-01, 5.3757e-01, 6.2755e-01, 2.1755e-01, - 6.0600e-01, 6.8931e-01, 7.2083e-01, 8.5321e-01, - 7.4348e-01, 4.7003e-01, 8.0017e-01, 2.0961e-01, - 3.9155e-01, 4.7019e-01, 8.2793e-01, 1.0848e-01, - 5.0885e-01, 4.4031e-01, 4.3596e-01, 6.5756e-01, - 6.3068e-01, 3.1965e-02, 6.1304e-01, 6.2773e-01, - 7.5422e-01, 2.1203e-01, 3.7413e-01, 8.1662e-01, - 2.6392e-01, 6.2770e-01, 3.1850e-01, 8.8449e-01, - 4.1231e-01, 7.3768e-01, 6.2162e-01, 6.3949e-02, - 8.6686e-01, 7.9535e-01, 1.7100e-01, 9.0592e-01, - 7.7475e-01, 2.2756e-02, 1.9513e-01, 2.3374e-01, - 5.8883e-01, 1.5431e-01, 3.2038e-01, 9.7862e-01, - 2.1914e-02, 1.1835e-02, 4.2111e-01, 8.9160e-01, - 4.6853e-01, 6.7863e-01, 8.5162e-01, 8.9675e-02, - 8.4382e-01, 8.6282e-01, 2.6677e-01, 1.6343e-01, - 4.8528e-02, 9.4412e-01, 6.3315e-01, 4.3330e-02, - 4.6024e-01, 4.2822e-02, 6.1742e-01, 9.5830e-01, - 9.5318e-01, 8.0255e-01, 3.3261e-01, 4.8987e-01, - 5.2325e-01, 4.5380e-01, 1.7553e-01, 3.8748e-01, - 3.4164e-02, 5.6323e-01, 6.9522e-01, 1.2731e-01, - 2.0882e-01, 2.9892e-01, 4.1201e-01, 1.0616e-02, - 8.2951e-01, 1.5012e-01, 2.5441e-01, 5.8813e-01, - 6.0251e-01, 3.5572e-01, 1.3375e-01, 7.4197e-01, - 7.6556e-01, 9.4426e-01, 1.6704e-01, 4.2013e-01, - 9.9049e-01, 5.8137e-01, 6.4977e-01, 8.2789e-01, - 2.1064e-01, 7.6906e-01, 6.0039e-01, 5.7468e-01, - 6.0443e-02, 5.0163e-01, 3.7936e-01, 1.3277e-01, - 6.4513e-01, 5.0021e-01, 9.3869e-01, 2.2699e-01, - 8.5101e-01, 9.1350e-01, 3.2096e-01, 6.4035e-01, - 2.1549e-01, 3.9428e-01, 2.3003e-01, 6.3791e-02, - 5.3362e-02, 2.8508e-01, 5.1790e-01, 2.7213e-01, - 8.4037e-01, 3.8949e-03, 2.2458e-02, 6.6033e-01, - 8.4001e-01, 8.8969e-01, 9.5652e-01, 4.0968e-01, - 5.6649e-01, 3.8455e-01, 7.7402e-01, 7.7569e-01, - 6.9995e-01, 5.3609e-01, 4.7322e-01, 2.5078e-01, - 3.1545e-01, 2.3820e-01, 2.6935e-01, 5.4291e-01, - 9.8952e-02, 4.1834e-01, 6.8148e-01, 3.4508e-01, - 5.6829e-01, 9.5116e-01, 7.0814e-01, 7.8640e-01, - 9.8928e-01, 9.4988e-01, 1.3932e-01, 8.0564e-01, - 5.7439e-01, 3.3367e-02, 6.8127e-01, 9.8039e-01, - 9.7767e-01, 8.0552e-02, 5.3572e-01, 5.2619e-01, - 3.8475e-01, 3.3846e-01, 8.3436e-01, 9.1328e-01, - 3.3929e-01, 5.4203e-01, 5.6960e-01, 2.7232e-01, - 1.3736e-01, 5.3665e-01, 4.1730e-01, 5.5670e-01, - 3.2961e-01, 7.4186e-01, 7.7502e-01, 1.9667e-01, - 1.7768e-01, 2.1674e-01, 5.9639e-02, 1.5805e-01, - 6.5950e-01, 7.7801e-01, 1.1876e-01, 2.8471e-02, - 8.1482e-02, 6.1569e-01, 7.2271e-01, 4.3063e-01, - 3.3714e-01, 5.9950e-01, 2.4036e-01, 9.4548e-01, - 6.8225e-02, 3.8594e-01, 5.5316e-01, 8.3555e-01, - 7.0504e-01, 6.3175e-02, 9.5028e-01, 5.9192e-01, - 8.3808e-01, 9.3302e-01, 4.6343e-01, 5.1120e-01, - 3.2675e-01, 8.7011e-01, 9.8451e-01, 3.3422e-01, - 9.2713e-01, 2.0238e-01, 9.1217e-01, 5.1309e-01, - 6.2678e-01, 1.1738e-01, 1.2722e-03, 4.9728e-01, - 9.3930e-01, 8.9876e-01, 3.3336e-01, 6.4927e-01, - 5.3282e-01, 1.7894e-01, 4.5240e-01, 6.4025e-01, - 5.8421e-01, 3.9615e-01, 3.3808e-01, 8.3123e-01, - 6.8721e-01, 7.6244e-01, 4.4166e-01, 1.7219e-01, - 5.6471e-01, 9.1698e-01, 3.6272e-01, 4.0208e-01, - 4.7886e-01, 6.1182e-01, 9.0649e-01, 7.3848e-01, - 1.7994e-01, 1.0582e-01, 7.1936e-01, 9.9727e-01, - 5.8436e-01, 8.9529e-01, 1.4226e-03, 5.5845e-01, - 1.2202e-01, 4.2654e-01, 7.4178e-02, 3.6640e-01, - 8.3884e-01, 1.1291e-01, 8.7969e-01, 1.4058e-01, - 8.6455e-02, 8.3188e-01, 1.8866e-01, 9.3291e-01, - 8.1895e-01, 7.6617e-01, 3.9340e-01, 5.2325e-01, - 8.5361e-01, 1.4133e-01, 1.4430e-01, 7.9132e-01]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), + col_indices=tensor([2315, 6513, 5907, 3877, 2077, 4860, 5985, 658, 3064, + 8525, 1554, 9709, 6171, 3486, 5590, 6539, 972, 2273, + 5380, 8899, 6597, 4928, 5344, 7693, 7209, 5497, 5006, + 3737, 6805, 5, 330, 5450, 3346, 4927, 7186, 6849, + 6070, 1061, 6235, 2264, 2970, 871, 39, 7564, 7449, + 540, 713, 6220, 3148, 1912, 3294, 2937, 9597, 7507, + 3495, 6431, 2416, 1295, 7098, 5465, 1017, 9139, 435, + 8876, 8838, 4376, 9730, 4842, 7867, 8061, 9281, 1949, + 7658, 8915, 1137, 8442, 5195, 2389, 1625, 4385, 9310, + 997, 7413, 1331, 821, 9963, 9388, 5618, 5626, 5767, + 8489, 4236, 8701, 4618, 4742, 7946, 9243, 4145, 2575, + 9322, 4983, 502, 5054, 8120, 5509, 950, 1703, 8746, + 5612, 5946, 6745, 899, 2969, 5501, 795, 6775, 3060, + 4478, 4772, 111, 6722, 9089, 3583, 9218, 5182, 7957, + 723, 1921, 8516, 8853, 9391, 3891, 3586, 408, 9632, + 1991, 4664, 4499, 6491, 7245, 6684, 5166, 6649, 2730, + 3764, 3979, 6730, 3057, 5867, 8792, 4125, 2218, 1533, + 5113, 7306, 5629, 5666, 5756, 131, 1440, 4944, 1266, + 5547, 6384, 2635, 2386, 8693, 1504, 7092, 8653, 7778, + 2636, 3625, 8515, 4909, 1332, 7115, 46, 3082, 8953, + 2740, 4800, 4283, 1774, 4225, 292, 3512, 7009, 2971, + 6652, 8465, 9733, 2887, 6111, 7989, 2817, 4663, 1923, + 5670, 8962, 6085, 7418, 2581, 1436, 7330, 4785, 3036, + 3497, 8572, 5589, 5748, 168, 146, 4212, 1605, 6080, + 6547, 7776, 8665, 7614, 4514, 7188, 5813, 1254, 83, + 3317, 1019, 7137, 6527, 9364, 1207, 9501, 3408, 5904, + 5098, 9112, 3018, 9136, 275, 3010, 3146, 4521, 9038, + 8663, 5537, 7480, 4120, 6347, 3307, 3876, 1966, 4488, + 7356, 8804, 7499, 2292, 1821, 2366, 679, 7496, 3642, + 7851, 3895, 5333, 6043, 3040, 2060, 7503, 9570, 6468, + 9921, 6762, 8463, 431, 9394, 9548, 5002, 143, 6444, + 4188, 2769, 1658, 754, 9146, 35, 8308, 9618, 6981, + 8171, 4639, 4311, 7585, 2093, 5417, 6050, 4800, 3966, + 68, 3535, 1877, 9901, 7212, 160, 9196, 7598, 8686, + 6459, 5732, 9734, 1901, 5872, 2435, 6827, 8564, 411, + 2508, 935, 5852, 782, 3300, 3365, 1207, 2855, 6266, + 6339, 1878, 2354, 4008, 2005, 8422, 8494, 1208, 6928, + 9919, 6641, 5584, 7833, 7971, 7706, 117, 7671, 6445, + 3503, 7807, 9156, 7726, 3805, 8990, 4918, 3437, 947, + 5513, 7974, 1699, 602, 2418, 6721, 9677, 7914, 8170, + 7859, 6859, 5449, 5042, 6974, 7845, 2054, 8751, 5481, + 609, 9970, 7529, 8241, 4224, 2112, 3651, 3543, 6721, + 6512, 2707, 895, 1696, 8169, 2551, 3584, 4349, 8797, + 6569, 8786, 814, 7884, 795, 4759, 2568, 8712, 6395, + 2376, 2854, 9680, 9824, 4978, 6870, 1035, 6724, 6385, + 2724, 3362, 3157, 6954, 6295, 6284, 8052, 6210, 9985, + 5704, 1387, 1737, 8196, 1817, 2478, 499, 2716, 9666, + 2649, 8326, 3038, 5612, 5747, 5253, 7760, 7762, 8190, + 5198, 7436, 5477, 908, 5864, 8768, 4487, 5721, 8514, + 2770, 3160, 858, 3764, 2567, 1045, 7482, 9942, 4484, + 714, 7788, 2437, 1928, 5540, 9718, 4746, 5999, 4394, + 2451, 4934, 4618, 7277, 8390, 5242, 5664, 4531, 5254, + 2664, 6785, 3111, 8855, 1575, 7155, 2279, 3860, 7237, + 4317, 6030, 6927, 6636, 5874, 451, 7617, 8535, 8288, + 5524, 9627, 4794, 1623, 1947, 8686, 8221, 9300, 3618, + 8878, 6915, 8820, 6629, 7951, 106, 3146, 9602, 5845, + 2110, 4299, 3613, 3887, 6227, 9498, 3595, 9744, 6732, + 8028, 2558, 8469, 8400, 336, 8305, 3221, 313, 9784, + 8593, 2965, 4984, 1558, 8138, 8231, 188, 8906, 4075, + 3180, 8176, 6316, 6167, 1757, 7807, 9343, 8645, 3483, + 3083, 954, 6927, 4269, 9259, 7661, 1288, 2678, 349, + 8128, 4950, 288, 6455, 4211, 5346, 9619, 4911, 6243, + 5585, 6049, 7705, 5262, 4934, 687, 6806, 7031, 7189, + 8152, 1593, 1512, 7777, 4562, 5778, 188, 165, 851, + 9217, 9696, 387, 1187, 4686, 9168, 428, 443, 5429, + 1268, 1031, 8490, 5288, 3849, 8658, 9815, 6717, 2946, + 9846, 2189, 7877, 6892, 6114, 5690, 3539, 3875, 8647, + 5913, 9153, 5410, 5082, 8686, 4080, 4841, 3835, 6259, + 9692, 5253, 6567, 9889, 1892, 2598, 5841, 6642, 1912, + 2847, 4155, 9394, 45, 3186, 5518, 2725, 3823, 2828, + 3126, 1420, 1261, 1840, 8806, 1627, 8317, 8294, 8778, + 3609, 3309, 3042, 9161, 1484, 2164, 9609, 1817, 3834, + 1189, 4748, 7797, 3354, 6115, 8720, 316, 642, 3345, + 24, 6446, 8295, 8271, 3087, 558, 1408, 4598, 3725, + 5827, 5509, 9209, 5228, 1370, 3966, 9996, 2251, 3719, + 3698, 7069, 3534, 4982, 3549, 1769, 9864, 8408, 1196, + 5773, 2758, 1803, 6057, 4608, 4593, 3639, 7190, 6855, + 5704, 8269, 3483, 3815, 8231, 1102, 5330, 5479, 2933, + 2683, 9799, 5021, 4354, 5026, 26, 5990, 1519, 2981, + 2846, 6169, 7895, 7110, 7523, 1066, 2130, 2514, 2031, + 8315, 8096, 8743, 1462, 9117, 5, 8824, 1514, 8690, + 7118, 4872, 9226, 4653, 9397, 3337, 4234, 8473, 7337, + 3884, 4204, 3019, 6455, 1104, 1521, 2106, 7198, 68, + 1136, 8467, 5599, 2846, 6369, 220, 3258, 7063, 730, + 601, 4313, 8486, 1812, 8291, 7501, 8040, 26, 1851, + 4760, 1321, 6337, 767, 3219, 1341, 714, 2192, 4423, + 5541, 5328, 3308, 3321, 5762, 8360, 2556, 7025, 2849, + 4886, 7558, 3167, 165, 2861, 466, 1092, 4637, 5199, + 6366, 1107, 4052, 1221, 2488, 9112, 1215, 6851, 5671, + 2053, 5106, 15, 9782, 9430, 7179, 550, 4466, 2830, + 6796, 461, 1034, 2492, 5827, 371, 3140, 5149, 2316, + 7847, 8655, 2571, 1599, 9980, 95, 7985, 4055, 2189, + 899, 1470, 3741, 8781, 7702, 6356, 984, 9256, 9634, + 4567, 4413, 9245, 5875, 1291, 1891, 5874, 5278, 9292, + 7181, 9589, 1784, 9464, 604, 3108, 4857, 2372, 4063, + 5107, 3555, 5411, 2141, 7436, 6706, 5143, 8475, 4088, + 1500, 9848, 3062, 2678, 836, 3467, 7996, 1248, 3127, + 4401, 7913, 1336, 2190, 5928, 73, 584, 8208, 8274, + 7052, 4521, 3177, 7849, 7214, 3806, 5722, 8664, 6344, + 1619, 1332, 6519, 1084, 4436, 5718, 7294, 3470, 2911, + 3695, 9537, 8937, 3345, 3742, 4531, 6513, 6431, 26, + 2346, 6918, 8006, 920, 5177, 210, 4057, 7906, 7248, + 3947, 2905, 2235, 306, 9708, 4594, 7250, 8229, 8875, + 1622, 846, 3105, 9418, 2883, 6853, 3856, 8008, 1331, + 2240, 3167, 5030, 5185, 1340, 1723, 1644, 4799, 6542, + 1478, 2423, 8569, 5524, 5971, 649, 6159, 2769, 1175, + 4322, 6680, 7151, 7748, 6327, 2772, 5891, 6447, 7572, + 2458, 3372, 7703, 4630, 9445, 8200, 1529, 6569, 2949, + 5965, 1056, 1481, 2301, 1636, 6216, 4562, 1939, 4023, + 614, 8330, 8596, 5870, 5267, 228, 8401, 9249, 6395, + 92, 4819, 5568, 1371, 1083, 8144, 9850, 4272, 5130, + 2209]), + values=tensor([8.6258e-01, 7.0528e-01, 8.9060e-01, 1.2632e-01, + 4.5563e-01, 6.6045e-01, 3.0092e-01, 9.7052e-01, + 4.8919e-02, 4.0764e-01, 7.7300e-01, 6.5404e-02, + 5.4979e-01, 8.0336e-01, 9.5192e-01, 3.7841e-01, + 8.0208e-01, 1.0151e-01, 8.5149e-01, 7.2534e-01, + 5.5997e-01, 1.3926e-01, 9.5254e-01, 9.1344e-01, + 4.1506e-01, 2.3270e-01, 2.8990e-01, 7.5387e-01, + 7.4473e-01, 7.0246e-01, 8.3206e-01, 6.5414e-01, + 2.5564e-01, 7.6579e-01, 7.7270e-01, 4.2267e-01, + 3.1006e-01, 3.4269e-01, 3.5526e-01, 9.3057e-01, + 9.6563e-02, 4.1280e-02, 4.7926e-01, 1.2473e-01, + 4.8461e-01, 7.8876e-02, 6.4119e-02, 9.4254e-01, + 1.1308e-01, 8.8908e-01, 7.5728e-01, 5.8280e-01, + 8.8847e-01, 7.7586e-01, 1.3915e-01, 9.1771e-02, + 4.5336e-01, 1.2809e-01, 6.5940e-01, 4.2233e-01, + 8.8184e-01, 2.6616e-02, 1.0459e-01, 6.7414e-01, + 2.3815e-01, 2.0330e-01, 2.4639e-02, 9.9792e-01, + 6.7680e-01, 7.2355e-01, 5.5501e-01, 3.3324e-01, + 3.2105e-01, 2.1791e-01, 9.5292e-01, 3.8432e-01, + 9.2398e-01, 1.9293e-01, 5.4469e-01, 3.5666e-01, + 5.5969e-01, 2.7763e-01, 2.3221e-02, 3.5759e-04, + 8.0557e-01, 1.8923e-01, 5.8292e-01, 3.9627e-01, + 7.3948e-01, 1.7041e-01, 1.1789e-01, 9.3322e-01, + 1.0791e-01, 7.0921e-01, 2.0004e-01, 8.8851e-01, + 3.4656e-01, 5.4702e-01, 2.7115e-01, 5.9585e-01, + 5.2729e-01, 3.6243e-01, 1.1079e-01, 2.2523e-01, + 2.1144e-01, 8.3509e-01, 4.2526e-01, 4.9317e-01, + 7.2731e-01, 2.3582e-01, 9.3508e-01, 5.3966e-01, + 1.0993e-01, 1.5191e-01, 4.5350e-01, 2.9441e-01, + 4.7183e-01, 5.2142e-01, 1.7420e-01, 5.8166e-02, + 6.9334e-01, 5.1245e-01, 5.5905e-01, 3.9385e-01, + 8.9043e-01, 7.1150e-01, 5.7970e-01, 5.8002e-01, + 5.4458e-01, 4.0697e-01, 3.6366e-01, 2.9923e-01, + 2.1028e-01, 2.5941e-01, 1.1346e-01, 1.9881e-01, + 3.7219e-01, 6.8616e-01, 3.5857e-01, 2.8390e-01, + 8.1470e-01, 2.1647e-01, 8.2945e-01, 1.7039e-01, + 9.2725e-01, 3.7820e-01, 4.5789e-01, 3.9648e-01, + 6.1922e-01, 8.5985e-01, 7.9265e-01, 1.6149e-01, + 3.5320e-01, 5.8308e-01, 9.0209e-01, 7.4476e-01, + 4.2686e-01, 6.2860e-01, 7.2772e-01, 2.5235e-01, + 1.1409e-01, 9.2918e-01, 6.8618e-01, 4.1873e-01, + 9.3854e-01, 1.3502e-01, 5.5618e-01, 1.3702e-01, + 4.1709e-01, 9.3472e-01, 1.8129e-01, 9.4820e-01, + 8.2794e-02, 2.1226e-01, 3.6720e-03, 4.8224e-02, + 5.3950e-01, 4.0388e-01, 2.3913e-01, 5.5803e-01, + 7.3623e-02, 6.4578e-01, 4.8277e-01, 6.7790e-01, + 2.3775e-01, 8.0072e-01, 9.1382e-02, 2.8452e-01, + 2.1676e-01, 2.0981e-01, 4.4176e-01, 2.7794e-01, + 8.3250e-01, 2.4127e-01, 5.7781e-02, 2.2104e-01, + 9.3391e-01, 2.2715e-01, 6.5312e-01, 6.2274e-01, + 2.5685e-01, 4.7442e-01, 2.9903e-01, 7.7858e-01, + 5.7041e-02, 7.9584e-01, 2.6641e-01, 3.0436e-01, + 6.3655e-01, 8.2758e-01, 5.6285e-01, 3.2258e-01, + 6.7511e-02, 5.4900e-01, 1.9819e-01, 3.4191e-01, + 1.3793e-01, 7.6774e-01, 7.1828e-01, 5.7111e-02, + 1.0691e-01, 9.7809e-01, 9.2257e-01, 9.9542e-01, + 3.0046e-01, 9.8827e-01, 3.4283e-01, 1.7532e-01, + 7.7669e-02, 8.1315e-01, 6.6867e-01, 4.8788e-02, + 3.2304e-02, 7.5275e-02, 6.6643e-01, 3.6447e-01, + 6.0187e-01, 2.9229e-01, 5.8431e-01, 2.2281e-01, + 7.1742e-01, 1.0215e-01, 8.2338e-01, 7.7100e-02, + 8.5923e-01, 1.4953e-01, 2.1530e-01, 1.0293e-01, + 4.9104e-01, 1.9647e-01, 7.2264e-01, 5.8338e-01, + 5.9546e-01, 8.4965e-01, 5.2402e-01, 1.8564e-01, + 4.9718e-01, 9.6137e-01, 2.9624e-01, 5.8891e-01, + 1.7958e-01, 3.1799e-01, 7.5606e-01, 8.8768e-01, + 4.9711e-01, 4.9336e-01, 9.3411e-01, 9.0131e-01, + 8.5228e-01, 4.7423e-01, 1.6452e-01, 4.6910e-01, + 3.7699e-01, 8.7542e-01, 8.0623e-01, 2.6126e-01, + 8.5229e-01, 5.5935e-01, 2.7236e-01, 9.9458e-01, + 4.9580e-01, 3.4747e-02, 4.8415e-01, 5.1958e-01, + 6.9504e-01, 2.5548e-01, 3.0528e-01, 7.1730e-01, + 3.8145e-01, 2.0093e-02, 6.2916e-01, 3.3482e-01, + 5.2828e-01, 9.4216e-01, 3.3977e-01, 4.1521e-01, + 9.5372e-01, 7.7047e-01, 1.0023e-01, 3.5710e-01, + 7.3857e-01, 6.8809e-02, 7.4386e-01, 4.3718e-01, + 1.8658e-01, 5.7370e-01, 4.5150e-01, 7.0198e-02, + 6.8435e-01, 5.5671e-01, 8.2365e-02, 1.0064e-01, + 2.5875e-01, 6.8293e-01, 9.4793e-01, 9.9080e-01, + 4.3473e-01, 1.0381e-01, 5.0057e-01, 7.3879e-01, + 9.3982e-01, 3.4285e-01, 9.7702e-01, 3.8211e-01, + 6.6050e-01, 9.8690e-01, 7.3652e-01, 3.2843e-01, + 4.2444e-02, 2.5252e-01, 8.8356e-01, 2.2553e-01, + 1.8416e-01, 7.8172e-01, 8.0046e-01, 5.1290e-01, + 3.5366e-01, 5.9034e-01, 8.3100e-01, 6.6707e-01, + 8.4675e-02, 9.7004e-02, 3.5214e-01, 7.7826e-01, + 4.3537e-01, 2.3609e-01, 1.3677e-01, 3.2106e-01, + 7.8817e-01, 8.3369e-01, 2.7975e-01, 9.9545e-01, + 9.6692e-01, 3.9647e-01, 5.1961e-01, 2.8142e-01, + 4.5836e-01, 4.3857e-01, 4.4516e-01, 7.1314e-01, + 1.7621e-01, 9.2860e-02, 8.4755e-01, 4.3613e-01, + 7.3509e-01, 8.0642e-01, 2.9694e-02, 3.4008e-02, + 9.2426e-01, 3.2348e-01, 2.8616e-01, 4.2552e-01, + 8.4735e-01, 4.1948e-01, 3.0574e-01, 3.7392e-01, + 5.4896e-01, 1.9431e-01, 5.7118e-01, 4.6320e-01, + 3.8569e-01, 3.6416e-01, 2.2456e-02, 3.1990e-01, + 3.5266e-01, 1.2688e-01, 6.1160e-01, 7.2708e-01, + 5.6045e-01, 2.5060e-01, 4.9716e-01, 7.2309e-01, + 4.7966e-02, 1.2364e-01, 2.2141e-01, 1.3711e-01, + 3.9034e-01, 6.7146e-01, 6.2663e-01, 1.9251e-01, + 5.1129e-01, 8.1609e-02, 9.4985e-01, 3.9532e-01, + 1.6509e-02, 6.1501e-01, 2.2691e-01, 2.0927e-02, + 2.0748e-01, 6.8899e-01, 8.6433e-02, 3.9669e-01, + 6.9160e-02, 1.5549e-01, 3.8321e-01, 1.2232e-01, + 9.4562e-01, 6.8881e-01, 6.1696e-01, 4.2100e-01, + 5.4252e-01, 3.9009e-01, 6.8385e-01, 7.9578e-01, + 1.5056e-02, 7.6275e-01, 5.4209e-02, 8.7610e-01, + 8.5376e-01, 6.0599e-01, 2.8684e-01, 6.4263e-01, + 9.3010e-01, 8.0589e-01, 3.5734e-01, 2.0317e-01, + 3.3933e-01, 9.9563e-01, 2.2093e-01, 6.5015e-01, + 9.6797e-01, 5.9242e-01, 7.6016e-01, 3.0456e-01, + 4.9245e-01, 3.0952e-01, 6.6456e-01, 4.3460e-01, + 6.2089e-01, 8.4494e-01, 6.1761e-01, 2.1979e-01, + 4.7127e-01, 4.0187e-01, 1.7874e-01, 4.7424e-01, + 6.8399e-01, 6.7148e-01, 9.2348e-01, 6.8470e-02, + 2.1234e-01, 5.9731e-01, 2.1177e-02, 1.4386e-01, + 4.5326e-01, 8.1515e-01, 8.1113e-01, 2.4470e-01, + 1.8209e-01, 1.7176e-01, 5.4850e-01, 3.2891e-01, + 4.6976e-01, 8.2868e-02, 5.0932e-01, 5.1633e-01, + 4.5595e-01, 8.3960e-01, 2.0220e-01, 6.6244e-01, + 6.4238e-01, 4.7242e-01, 9.3861e-01, 2.6402e-01, + 6.6982e-01, 7.4495e-01, 9.8999e-01, 9.0582e-01, + 5.0527e-01, 2.6714e-01, 9.7670e-02, 3.6567e-01, + 3.8412e-01, 6.2592e-01, 5.2075e-01, 7.8355e-01, + 9.9082e-01, 9.8918e-01, 7.2479e-01, 8.1508e-02, + 3.3672e-01, 6.3762e-02, 5.3834e-01, 4.7174e-01, + 1.2926e-01, 9.9884e-01, 4.9716e-01, 7.3407e-01, + 7.1173e-01, 4.9329e-01, 1.6675e-01, 5.9213e-01, + 6.1003e-01, 7.7121e-01, 7.1368e-01, 6.8332e-02, + 3.2691e-01, 1.1772e-01, 1.4419e-01, 2.2637e-01, + 1.8844e-01, 3.8315e-01, 2.1113e-01, 5.9147e-01, + 4.0637e-01, 7.6620e-01, 6.3290e-01, 7.0888e-01, + 4.1142e-01, 1.8429e-01, 7.1186e-01, 4.0633e-01, + 4.3527e-01, 1.0853e-01, 1.3609e-01, 7.3986e-01, + 6.4089e-01, 8.8251e-01, 6.6967e-01, 8.3393e-01, + 9.3056e-01, 7.1752e-01, 8.6294e-01, 5.8911e-01, + 5.1638e-01, 4.2786e-01, 3.9803e-01, 7.7440e-02, + 4.3829e-01, 6.5063e-01, 5.1549e-01, 8.2076e-01, + 6.9963e-02, 1.6012e-01, 3.8326e-01, 1.3349e-01, + 9.8517e-01, 8.8342e-01, 2.5190e-01, 1.6117e-01, + 6.5414e-01, 9.0065e-01, 9.2371e-03, 3.6861e-01, + 6.8362e-01, 6.3026e-01, 4.4740e-01, 1.3820e-01, + 3.0137e-01, 2.6217e-01, 2.5735e-01, 5.6040e-01, + 8.6859e-01, 1.8929e-01, 1.6536e-01, 4.7060e-01, + 7.3877e-01, 5.4422e-01, 5.3028e-01, 7.9555e-01, + 1.2884e-01, 4.8681e-01, 2.8536e-01, 5.7157e-01, + 2.0760e-02, 7.8334e-01, 4.9999e-01, 8.1263e-01, + 1.8957e-01, 1.8857e-01, 4.2172e-01, 5.9307e-01, + 6.8538e-01, 5.4944e-01, 6.2613e-01, 4.0387e-01, + 2.1974e-01, 4.2153e-01, 4.3924e-01, 6.8757e-01, + 3.2899e-01, 4.1687e-01, 3.2791e-01, 7.9799e-01, + 2.3111e-01, 6.8751e-01, 9.6699e-01, 8.0224e-01, + 8.4370e-01, 5.4973e-01, 5.7027e-01, 6.3729e-01, + 2.8420e-01, 9.7973e-02, 7.0540e-01, 4.3504e-01, + 7.4693e-01, 1.6761e-02, 8.3801e-01, 5.6735e-01, + 4.1883e-01, 4.6145e-01, 6.7365e-01, 3.9648e-01, + 2.9496e-01, 4.0270e-01, 1.5795e-02, 8.0503e-01, + 8.6951e-01, 2.9612e-01, 4.6891e-01, 4.9634e-01, + 1.6087e-01, 6.7111e-01, 8.2315e-01, 6.0957e-01, + 1.9295e-01, 7.9043e-01, 2.4667e-01, 3.5145e-01, + 4.3326e-01, 9.2553e-01, 1.7285e-01, 4.9663e-01, + 7.5709e-01, 7.3305e-01, 6.5591e-01, 1.6922e-01, + 9.7852e-02, 7.0028e-02, 4.0960e-01, 9.6666e-01, + 9.8033e-01, 1.8625e-01, 3.3433e-01, 9.0699e-02, + 8.8832e-01, 1.1216e-01, 6.7428e-02, 6.6405e-01, + 5.6008e-01, 3.5558e-01, 3.2086e-01, 6.4313e-01, + 6.6564e-01, 8.6180e-01, 3.9292e-01, 9.9089e-01, + 4.0120e-01, 2.3650e-01, 3.6857e-01, 8.7558e-01, + 1.2504e-01, 7.5624e-01, 4.2258e-01, 3.9465e-01, + 8.1754e-01, 2.0309e-01, 4.0634e-01, 1.7315e-01, + 1.9344e-01, 7.2119e-02, 8.3024e-02, 2.9133e-01, + 4.3479e-01, 6.7282e-01, 8.6958e-02, 7.6394e-01, + 6.8277e-01, 7.9329e-01, 8.8151e-01, 8.2960e-01, + 8.1719e-01, 8.5229e-01, 5.5712e-01, 4.1049e-01, + 2.4757e-01, 9.0586e-01, 1.4366e-01, 3.1020e-01, + 3.5280e-01, 2.9786e-01, 5.0803e-01, 3.5710e-01, + 2.3543e-01, 9.6762e-01, 2.8875e-01, 8.9645e-01, + 2.4041e-01, 8.2229e-03, 7.8572e-01, 8.5687e-02, + 8.7011e-01, 1.6728e-01, 1.6690e-01, 9.1501e-01, + 2.6593e-01, 8.6687e-01, 8.6199e-02, 8.5561e-01, + 1.6618e-01, 6.8007e-01, 7.1649e-01, 2.9972e-01, + 1.2971e-03, 9.4611e-01, 7.9622e-01, 8.3496e-01, + 5.7596e-01, 3.1512e-01, 7.8760e-01, 2.0723e-01, + 5.7829e-01, 1.1079e-01, 2.5915e-01, 6.4582e-01, + 3.7963e-01, 6.6160e-01, 6.7616e-02, 4.6793e-01, + 2.6347e-01, 9.2833e-01, 5.5353e-01, 7.1987e-01, + 8.2195e-01, 4.3576e-01, 5.9578e-01, 5.4960e-01, + 7.6790e-01, 5.0350e-01, 1.6330e-01, 4.4980e-01, + 8.1475e-01, 8.2107e-01, 4.1167e-01, 5.2435e-01, + 7.9072e-01, 9.3812e-01, 2.5123e-02, 9.5534e-01, + 5.1101e-01, 1.1638e-01, 5.3786e-03, 9.8278e-01, + 2.1734e-01, 6.4647e-01, 1.6399e-01, 6.0609e-01, + 9.3545e-01, 1.5524e-01, 1.8395e-01, 5.2700e-01, + 8.8524e-01, 8.9978e-01, 3.3954e-01, 7.9557e-01, + 9.2616e-01, 7.8200e-01, 8.5199e-01, 9.1200e-01, + 8.7588e-01, 3.0321e-01, 5.6257e-01, 9.9862e-01, + 2.4910e-01, 1.7283e-01, 9.2654e-01, 9.7156e-01, + 3.6401e-01, 3.2947e-01, 3.3530e-01, 6.5351e-01, + 9.8423e-01, 3.9352e-01, 1.5021e-01, 8.9702e-01, + 8.0842e-01, 3.5295e-01, 9.6435e-01, 9.2943e-01, + 4.6951e-01, 7.4679e-01, 3.0707e-02, 7.4355e-01, + 3.2675e-01, 8.4080e-01, 9.7647e-01, 5.1300e-01, + 8.5035e-01, 8.6314e-01, 8.7472e-01, 8.3684e-01, + 3.4789e-01, 4.0210e-01, 9.2233e-01, 4.5365e-01, + 5.5333e-01, 1.8570e-01, 4.1967e-02, 1.6211e-01, + 9.2523e-01, 5.5357e-01, 8.2017e-01, 7.4827e-01, + 2.9538e-01, 8.4459e-01, 6.0769e-01, 2.7657e-01, + 9.2496e-01, 4.9726e-01, 5.6693e-01, 2.6585e-01, + 8.7549e-01, 1.1184e-02, 8.0465e-01, 2.6346e-01, + 6.5631e-02, 5.3764e-01, 9.1495e-01, 3.1091e-01, + 9.6606e-01, 6.9022e-01, 8.3325e-01, 4.6344e-01, + 5.4954e-01, 6.3646e-01, 3.4287e-01, 3.4056e-01, + 3.4792e-02, 8.2077e-01, 4.3962e-01, 9.2306e-01, + 6.3618e-01, 8.7709e-01, 2.3326e-01, 8.6772e-01, + 9.6968e-01, 1.5316e-01, 4.7019e-01, 2.3939e-01, + 8.4352e-01, 8.0994e-01, 3.2247e-01, 8.7148e-01, + 5.7326e-01, 9.1918e-01, 9.8467e-01, 2.6205e-01, + 6.0807e-01, 1.5648e-02, 4.4449e-01, 8.2402e-01, + 5.4313e-01, 5.8309e-01, 1.5459e-01, 7.0926e-01, + 8.6123e-01, 5.4907e-01, 4.5772e-01, 9.1321e-01, + 5.8474e-01, 5.9303e-01, 7.8580e-01, 2.9711e-01, + 4.6384e-01, 3.0213e-01, 8.8351e-01, 6.0581e-01, + 3.2728e-01, 4.9952e-01, 4.1434e-01, 6.3278e-01, + 4.1366e-01, 2.7552e-01, 5.4264e-01, 1.2186e-01, + 1.3860e-01, 1.4550e-01, 5.5313e-01, 3.8997e-01, + 5.2577e-01, 4.5122e-01, 1.2593e-01, 5.5669e-01, + 4.9823e-02, 2.8409e-02, 6.3327e-01, 5.1417e-01, + 6.9790e-01, 9.5954e-02, 5.4878e-01, 4.6796e-01, + 8.3287e-01, 6.9688e-01, 3.0482e-01, 5.9031e-01, + 1.3253e-01, 4.6403e-01, 2.0383e-01, 6.8453e-01, + 1.0846e-01, 2.2057e-01, 7.1628e-01, 2.1954e-01, + 7.9697e-01, 7.2314e-02, 4.1365e-01, 6.8219e-01, + 9.7902e-01, 4.7276e-01, 5.1219e-01, 1.9369e-01, + 6.1233e-01, 8.4391e-01, 5.1670e-01, 6.3502e-01, + 8.6315e-01, 4.4413e-02, 2.2785e-01, 4.8562e-01, + 2.4673e-01, 3.4092e-01, 3.8043e-01, 4.9876e-01, + 3.6324e-01, 4.0648e-01, 1.3136e-01, 2.6742e-01, + 5.5688e-01, 4.3080e-01, 3.4861e-01, 3.1019e-02, + 1.7662e-01, 2.9944e-01, 7.3278e-01, 2.5329e-01, + 4.5397e-01, 8.7188e-01, 2.7853e-01, 2.6008e-01, + 7.7165e-01, 7.1359e-01, 4.5360e-02, 3.2456e-01, + 9.2683e-02, 6.8130e-01, 1.4100e-01, 9.9045e-01, + 2.1901e-01, 9.5740e-01, 2.3954e-01, 4.0705e-01, + 6.1890e-01, 1.7349e-01, 3.4682e-01, 9.6567e-01, + 3.8795e-01, 1.4728e-01, 7.7217e-02, 4.0736e-01, + 7.4286e-01, 4.3040e-01, 3.6512e-01, 5.6225e-01, + 5.8228e-01, 4.9701e-01, 3.4542e-01, 7.7128e-01, + 7.9901e-01, 1.3439e-01, 5.9351e-01, 9.6899e-01, + 1.9404e-01, 5.9343e-01, 6.9413e-01, 3.5619e-01, + 5.8933e-02, 5.3074e-01, 3.6413e-01, 2.2481e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.0633, 0.2712, 0.1613, ..., 0.7795, 0.8074, 0.9414]) +tensor([0.5425, 0.1860, 0.7804, ..., 0.3375, 0.7494, 0.4073]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -1509,13 +754,554 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 10.386851072311401 seconds +Time: 7.6851487159729 seconds -[18.39, 17.96, 17.95, 18.06, 18.03, 17.74, 18.17, 18.27, 17.97, 17.88] -[46.24] -14.305678367614746 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 225343, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.386851072311401, 'TIME_S_1KI': 0.04609351553991649, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 661.4945677185059, 'W': 46.24} -[18.39, 17.96, 17.95, 18.06, 18.03, 17.74, 18.17, 18.27, 17.97, 17.88, 18.01, 17.95, 18.03, 17.68, 17.83, 17.84, 17.98, 17.81, 17.89, 18.12] -323.35999999999996 -16.168 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 225343, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.386851072311401, 'TIME_S_1KI': 0.04609351553991649, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 661.4945677185059, 'W': 46.24, 'J_1KI': 2.935500848566434, 'W_1KI': 0.20519829770616352, 'W_D': 30.072000000000003, 'J_D': 430.2003598709107, 'W_D_1KI': 0.1334498963801849, 'J_D_1KI': 0.0005922078625925141} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '225815', '-ss', '10000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.501307487487793} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), + col_indices=tensor([ 621, 8067, 8701, 6486, 5538, 5824, 379, 1918, 5000, + 5124, 6265, 1757, 7171, 5785, 2098, 8110, 8680, 9293, + 3536, 8102, 4182, 8879, 9877, 2040, 7911, 510, 3802, + 7722, 6811, 1404, 2410, 8431, 3523, 6495, 6498, 6685, + 7173, 7872, 4534, 9047, 7100, 8447, 6072, 5630, 5799, + 190, 6891, 1441, 9822, 4335, 8399, 1784, 1404, 5633, + 6623, 2518, 6475, 3954, 4736, 1500, 5281, 4391, 6371, + 886, 805, 6503, 5528, 1428, 6887, 8163, 4623, 5541, + 4640, 3383, 6444, 4711, 4505, 3203, 7934, 4654, 687, + 7329, 1943, 6395, 8455, 1952, 3346, 9199, 2955, 3712, + 7082, 8540, 6711, 1353, 3492, 6382, 9227, 3128, 4738, + 7860, 8372, 15, 1552, 8319, 9811, 3777, 9596, 8620, + 4064, 4884, 9629, 5329, 7715, 7613, 6097, 4214, 6601, + 8769, 7774, 2256, 3188, 9906, 6088, 7859, 2481, 3977, + 5219, 4949, 6267, 3615, 4199, 9995, 444, 6572, 6590, + 7908, 8736, 1682, 7932, 9231, 9498, 4969, 8032, 7386, + 3757, 5388, 5500, 7007, 497, 6899, 924, 5470, 4242, + 8349, 988, 321, 277, 6809, 2621, 4996, 3392, 2070, + 8599, 2865, 1951, 2114, 8145, 435, 2953, 956, 2814, + 1486, 8770, 3701, 8858, 3904, 8946, 1921, 2720, 8762, + 9358, 9717, 5029, 4186, 4041, 3158, 5636, 5605, 4525, + 5593, 7451, 6146, 5196, 850, 4025, 7864, 9672, 7108, + 777, 2450, 9967, 3039, 7352, 7620, 5364, 1636, 7924, + 9792, 4541, 3656, 1580, 8687, 4603, 7545, 1231, 9649, + 2748, 7212, 6116, 9217, 2342, 1946, 3303, 6323, 5879, + 4477, 5340, 4516, 5722, 3754, 6742, 650, 8207, 6470, + 2264, 3353, 2048, 8251, 400, 1044, 8345, 2532, 2124, + 1395, 5517, 6024, 3422, 7067, 1645, 6318, 3535, 6960, + 308, 4401, 9204, 1112, 4901, 4265, 8909, 888, 7274, + 2714, 4875, 7150, 2797, 4106, 4244, 2138, 3876, 2559, + 3408, 2983, 6763, 1791, 1130, 1328, 3454, 3597, 3047, + 9406, 8234, 2404, 5313, 8028, 8018, 4816, 2500, 8560, + 3279, 4533, 506, 4358, 9720, 9432, 5299, 1497, 4103, + 4459, 2841, 3387, 6518, 8285, 1486, 4206, 317, 3410, + 1281, 6931, 471, 5237, 7799, 9052, 1693, 1883, 977, + 3915, 377, 6991, 2617, 5482, 7380, 2319, 6326, 4913, + 63, 6407, 6245, 8595, 8116, 5131, 5553, 3847, 9485, + 7909, 3795, 8618, 7145, 3635, 6413, 9128, 3130, 8692, + 2576, 4366, 8928, 6249, 7783, 8379, 9553, 1007, 4718, + 1322, 4530, 5651, 4480, 2964, 4421, 6311, 2055, 9796, + 5531, 7518, 7953, 2916, 7603, 6190, 5496, 6074, 3416, + 4536, 338, 5349, 4767, 7727, 61, 2223, 5727, 6702, + 1168, 3309, 6908, 120, 2145, 7426, 217, 6262, 4322, + 710, 1996, 1502, 6744, 3362, 9551, 9541, 4765, 5555, + 7677, 3714, 9116, 9201, 9677, 808, 3444, 7948, 3547, + 3147, 8821, 4131, 7853, 4873, 3727, 8956, 9242, 463, + 6102, 3207, 2647, 2009, 6976, 8113, 5383, 9950, 9210, + 2607, 6196, 3792, 5279, 4469, 6166, 8307, 3139, 8294, + 3045, 8587, 5732, 6420, 4767, 6384, 3262, 6516, 9124, + 5629, 9140, 4268, 7884, 9280, 2845, 4396, 1695, 5892, + 9646, 4870, 474, 711, 5269, 9561, 510, 234, 6036, + 1463, 1833, 5092, 1789, 1299, 8945, 842, 2582, 1165, + 9387, 4647, 18, 2357, 8595, 5475, 9627, 2152, 6592, + 7559, 3616, 6551, 6061, 5016, 351, 6411, 5971, 9815, + 8118, 3528, 9738, 1760, 6048, 7402, 1939, 2804, 8305, + 1191, 7749, 4316, 6651, 179, 6542, 9648, 2265, 7391, + 7426, 6846, 1001, 1073, 1131, 6087, 4908, 4135, 4320, + 7720, 1601, 7467, 9773, 8874, 2360, 6153, 8937, 6481, + 3762, 9196, 7483, 944, 7320, 7745, 860, 3192, 2592, + 5428, 1558, 4118, 6406, 4829, 7167, 5223, 5427, 1349, + 5294, 7920, 7467, 9863, 7414, 1430, 2272, 4749, 9366, + 5823, 7199, 7730, 4037, 4741, 8062, 2210, 4947, 2286, + 3927, 3530, 6602, 7303, 6039, 6871, 649, 2184, 5850, + 8991, 9883, 1962, 7997, 8066, 2827, 485, 73, 5074, + 4027, 8247, 1956, 466, 6094, 7363, 8807, 3485, 4713, + 8857, 4550, 6148, 5177, 6630, 3729, 5962, 2364, 8027, + 1540, 5512, 8420, 1833, 166, 185, 1717, 5236, 2057, + 1920, 8729, 7603, 7798, 4581, 7903, 2753, 3830, 1012, + 3358, 98, 1431, 3548, 1765, 7173, 2113, 1774, 5074, + 9875, 5748, 6477, 3076, 4013, 5494, 142, 6261, 1546, + 735, 8132, 3760, 3833, 8392, 2866, 3747, 2238, 5039, + 9991, 3930, 3295, 4281, 6582, 1507, 2339, 8308, 4795, + 6042, 9144, 5178, 7200, 1854, 5289, 1036, 263, 4673, + 1755, 7843, 9590, 1674, 1997, 5404, 7842, 2316, 4765, + 903, 4521, 1615, 9734, 2683, 9393, 4743, 465, 2256, + 1490, 4969, 5519, 7553, 9750, 1156, 9506, 2573, 7753, + 8858, 2242, 2353, 4052, 130, 5566, 7279, 9261, 2032, + 5574, 9697, 2307, 6357, 4851, 125, 7010, 9557, 7434, + 1764, 5376, 8681, 520, 1147, 3019, 2766, 4221, 6787, + 913, 9582, 7327, 3426, 4085, 9753, 397, 1757, 2467, + 6219, 9421, 4979, 4880, 9636, 399, 4017, 6742, 7711, + 4977, 597, 3788, 7425, 4538, 5803, 7511, 707, 6862, + 4448, 2935, 796, 5761, 8972, 3222, 5360, 4073, 3780, + 7587, 3935, 3659, 3516, 3305, 6397, 835, 8897, 2260, + 8074, 4151, 59, 9881, 5722, 4749, 4490, 8386, 9531, + 1681, 8322, 7280, 7383, 9110, 9496, 3979, 1667, 1609, + 605, 7186, 3289, 9548, 9178, 3499, 8700, 8958, 6798, + 4712, 6079, 3652, 2893, 6478, 6228, 2105, 1064, 1126, + 594, 1239, 5010, 9616, 2691, 5967, 1503, 1488, 3756, + 3247, 4285, 4420, 2495, 3105, 2726, 8612, 1019, 7638, + 301, 3462, 8224, 7646, 2804, 892, 3360, 9148, 9972, + 4302, 6615, 1094, 5589, 9642, 9897, 7341, 5880, 2250, + 504, 6779, 1200, 8941, 3565, 8375, 7844, 9898, 7773, + 866, 2804, 8221, 7345, 7948, 44, 1443, 2712, 6139, + 27, 5009, 6474, 7863, 6279, 2280, 2088, 3757, 8456, + 8642, 2733, 1757, 7233, 5141, 3949, 8447, 2690, 3294, + 2211, 161, 6191, 1188, 871, 3978, 5524, 3757, 7095, + 4586, 4518, 8935, 4168, 5727, 4325, 5573, 274, 8627, + 5160, 6998, 5604, 9203, 1927, 6912, 3284, 5635, 8095, + 4022, 8206, 1927, 9163, 7500, 7635, 8229, 2377, 4284, + 5090, 9687, 8448, 6150, 8497, 8068, 7265, 2977, 4847, + 9227, 4643, 5971, 3091, 9617, 1463, 3964, 7383, 3704, + 5970, 2528, 4339, 780, 6389, 7456, 1723, 7888, 8852, + 109, 5808, 2711, 1313, 97, 5642, 753, 4358, 9054, + 5886, 9449, 104, 2884, 1499, 7364, 9304, 4311, 9143, + 7421, 2337, 2694, 3377, 3997, 3023, 7414, 4869, 419, + 3257, 224, 4395, 7186, 1420, 2887, 9106, 1717, 5082, + 3894, 8215, 6787, 4497, 3468, 9059, 3488, 9487, 7280, + 6265, 4060, 8912, 5499, 4442, 7535, 6018, 6368, 1204, + 8864, 9258, 6588, 5167, 2209, 2012, 4429, 8470, 8234, + 9211, 4382, 7963, 5391, 591, 4867, 8168, 9279, 897, + 4170]), + values=tensor([0.4878, 0.2366, 0.8733, 0.8071, 0.3473, 0.2387, 0.9164, + 0.3754, 0.7791, 0.5219, 0.2035, 0.6398, 0.4422, 0.3796, + 0.7410, 0.0343, 0.0746, 0.6347, 0.0691, 0.8108, 0.6743, + 0.7644, 0.4125, 0.8362, 0.4857, 0.8892, 0.4751, 0.0020, + 0.5709, 0.6384, 0.1497, 0.3464, 0.3034, 0.5189, 0.2843, + 0.8914, 0.9227, 0.3685, 0.3834, 0.0155, 0.1265, 0.8952, + 0.3893, 0.8670, 0.7098, 0.4601, 0.8885, 0.0046, 0.1082, + 0.5850, 0.3348, 0.6623, 0.6228, 0.0161, 0.7512, 0.8534, + 0.5169, 0.3788, 0.1287, 0.4768, 0.5501, 0.8770, 0.6106, + 0.3540, 0.4183, 0.1505, 0.6415, 0.2760, 0.4263, 0.1294, + 0.2381, 0.3413, 0.7792, 0.3560, 0.7846, 0.6195, 0.9067, + 0.9607, 0.5449, 0.5238, 0.7187, 0.7121, 0.1293, 0.8894, + 0.0511, 0.0830, 0.2825, 0.4900, 0.0409, 0.2002, 0.4854, + 0.3856, 0.8627, 0.8310, 0.5780, 0.7482, 0.3676, 0.5167, + 0.4530, 0.8058, 0.4658, 0.0519, 0.2134, 0.3241, 0.3833, + 0.1132, 0.1256, 0.5042, 0.0189, 0.1971, 0.1400, 0.7715, + 0.1621, 0.0713, 0.8856, 0.2929, 0.3989, 0.8128, 0.0289, + 0.2640, 0.5075, 0.4705, 0.8865, 0.9171, 0.4145, 0.4919, + 0.4500, 0.5633, 0.3618, 0.0032, 0.3175, 0.9427, 0.8902, + 0.2880, 0.6791, 0.9616, 0.0047, 0.8354, 0.3680, 0.2314, + 0.5566, 0.6728, 0.8761, 0.3109, 0.4191, 0.1257, 0.0231, + 0.4635, 0.8760, 0.8559, 0.1650, 0.1720, 0.8840, 0.1952, + 0.5716, 0.4436, 0.6030, 0.9799, 0.4962, 0.0060, 0.0091, + 0.1820, 0.1058, 0.9674, 0.3332, 0.5543, 0.3055, 0.7098, + 0.5098, 0.5712, 0.3186, 0.2511, 0.9486, 0.2897, 0.7906, + 0.4682, 0.9460, 0.4947, 0.0711, 0.2903, 0.4667, 0.5414, + 0.0305, 0.9032, 0.1444, 0.0549, 0.4534, 0.0693, 0.9432, + 0.7050, 0.0278, 0.6850, 0.1700, 0.0189, 0.3286, 0.1955, + 0.6291, 0.1692, 0.8585, 0.9646, 0.7537, 0.6946, 0.7438, + 0.4102, 0.6268, 0.2998, 0.4067, 0.3522, 0.5813, 0.2936, + 0.9445, 0.1475, 0.9903, 0.8673, 0.8393, 0.5631, 0.2710, + 0.4872, 0.2561, 0.4261, 0.0249, 0.8866, 0.7154, 0.7262, + 0.4255, 0.2060, 0.4915, 0.5716, 0.3734, 0.5359, 0.0582, + 0.7274, 0.3193, 0.1823, 0.9469, 0.1962, 0.8306, 0.0367, + 0.3559, 0.8060, 0.2544, 0.7597, 0.1912, 0.5306, 0.2236, + 0.1306, 0.0906, 0.3830, 0.2840, 0.1455, 0.7884, 0.5093, + 0.2096, 0.7409, 0.5736, 0.0118, 0.2511, 0.1254, 0.6493, + 0.5987, 0.1422, 0.7590, 0.3750, 0.0146, 0.3755, 0.5252, + 0.4470, 0.1730, 0.8157, 0.1781, 0.3561, 0.5010, 0.2990, + 0.7060, 0.9531, 0.0424, 0.8444, 0.0333, 0.9215, 0.3468, + 0.6299, 0.5208, 0.5070, 0.7369, 0.3345, 0.6893, 0.6628, + 0.0479, 0.2533, 0.8987, 0.9263, 0.7389, 0.2881, 0.3437, + 0.4669, 0.0061, 0.2705, 0.7551, 0.5375, 0.9128, 0.4733, + 0.3625, 0.3041, 0.3906, 0.4750, 0.5152, 0.6836, 0.5047, + 0.3035, 0.6463, 0.6891, 0.4384, 0.3322, 0.0632, 0.1828, + 0.0306, 0.6593, 0.7636, 0.2683, 0.3746, 0.6911, 0.2833, + 0.2470, 0.5171, 0.6073, 0.6043, 0.2759, 0.6224, 0.0070, + 0.6025, 0.5674, 0.9167, 0.4212, 0.0308, 0.3773, 0.8746, + 0.6407, 0.5547, 0.0444, 0.6266, 0.5499, 0.1996, 0.9428, + 0.8289, 0.3044, 0.3689, 0.5795, 0.4388, 0.9210, 0.7518, + 0.6388, 0.2171, 0.2636, 0.4873, 0.6059, 0.1825, 0.2514, + 0.8353, 0.3177, 0.0671, 0.6724, 0.2137, 0.9634, 0.1659, + 0.8310, 0.1283, 0.7040, 0.7122, 0.7650, 0.2004, 0.0295, + 0.0888, 0.3593, 0.6433, 0.9173, 0.5749, 0.5966, 0.6545, + 0.8191, 0.5699, 0.1636, 0.7580, 0.4365, 0.7079, 0.9192, + 0.9281, 0.8754, 0.6240, 0.7560, 0.6668, 0.9111, 0.9369, + 0.0367, 0.7533, 0.4804, 0.3166, 0.9053, 0.4957, 0.5130, + 0.7835, 0.3616, 0.2789, 0.5579, 0.6716, 0.4682, 0.6876, + 0.0966, 0.9182, 0.5997, 0.8196, 0.3189, 0.4501, 0.8553, + 0.2473, 0.0680, 0.4275, 0.1537, 0.0899, 0.7313, 0.8241, + 0.3081, 0.6292, 0.1662, 0.0904, 0.5149, 0.6344, 0.2456, + 0.2729, 0.2756, 0.6668, 0.4375, 0.7295, 0.4340, 0.2948, + 0.0278, 0.7870, 0.4490, 0.1408, 0.9227, 0.4041, 0.8449, + 0.5107, 0.6642, 0.1322, 0.7169, 0.8679, 0.3739, 0.7236, + 0.4248, 0.5882, 0.9469, 0.8184, 0.8365, 0.8779, 0.1038, + 0.8810, 0.9769, 0.5428, 0.3538, 0.9830, 0.9676, 0.1831, + 0.0084, 0.5971, 0.7115, 0.1927, 0.2619, 0.1005, 0.5333, + 0.2042, 0.2705, 0.6258, 0.9835, 0.9394, 0.7617, 0.3233, + 0.0297, 0.9049, 0.3704, 0.8040, 0.8975, 0.1579, 0.5446, + 0.2183, 0.2325, 0.0639, 0.2768, 0.2461, 0.4765, 0.2089, + 0.9111, 0.6567, 0.5216, 0.9797, 0.3080, 0.5219, 0.5582, + 0.3741, 0.2265, 0.1644, 0.9904, 0.9096, 0.6977, 0.7378, + 0.6163, 0.2973, 0.5170, 0.1559, 0.9194, 0.1883, 0.3665, + 0.6107, 0.4618, 0.4740, 0.0187, 0.8554, 0.8371, 0.2227, + 0.9554, 0.1756, 0.1396, 0.9691, 0.1329, 0.2855, 0.2852, + 0.5955, 0.2284, 0.7196, 0.1940, 0.7502, 0.5298, 0.9924, + 0.7439, 0.6637, 0.2900, 0.6522, 0.0204, 0.0933, 0.3725, + 0.2785, 0.1362, 0.2224, 0.2585, 0.4365, 0.6210, 0.9700, + 0.5368, 0.3375, 0.9444, 0.7297, 0.5092, 0.3376, 0.8591, + 0.9008, 0.5379, 0.9510, 0.9421, 0.0713, 0.2091, 0.6298, + 0.4005, 0.7776, 0.4989, 0.8706, 0.2922, 0.3070, 0.0813, + 0.5714, 0.2514, 0.4480, 0.5234, 0.0360, 0.0758, 0.3730, + 0.7634, 0.0856, 0.8934, 0.6490, 0.5291, 0.3184, 0.3037, + 0.3443, 0.5792, 0.5490, 0.5327, 0.4678, 0.2006, 0.8032, + 0.9436, 0.9189, 0.7953, 0.9640, 0.0058, 0.7983, 0.6143, + 0.9639, 0.7438, 0.7417, 0.1685, 0.5369, 0.7292, 0.5840, + 0.3088, 0.6745, 0.1144, 0.4342, 0.4148, 0.3081, 0.3141, + 0.7523, 0.9548, 0.1159, 0.9214, 0.7606, 0.1319, 0.0108, + 0.5213, 0.7386, 0.2722, 0.3665, 0.1771, 0.0556, 0.9859, + 0.0054, 0.2965, 0.3687, 0.8425, 0.4479, 0.3524, 0.2824, + 0.4727, 0.3410, 0.8501, 0.7125, 0.9448, 0.2352, 0.2937, + 0.6534, 0.3739, 0.6671, 0.8901, 0.3611, 0.4255, 0.2194, + 0.1776, 0.7356, 0.6315, 0.2672, 0.6863, 0.5095, 0.5719, + 0.6850, 0.4423, 0.4707, 0.8096, 0.8117, 0.4330, 0.1565, + 0.6528, 0.3855, 0.7960, 0.8457, 0.9509, 0.4271, 0.9874, + 0.2012, 0.1858, 0.4458, 0.2549, 0.0069, 0.9617, 0.8083, + 0.8286, 0.3440, 0.6245, 0.1685, 0.2887, 0.8351, 0.3371, + 0.8326, 0.2133, 0.4971, 0.1969, 0.9997, 0.0108, 0.8265, + 0.2603, 0.2486, 0.1123, 0.3933, 0.1972, 0.1286, 0.2410, + 0.9427, 0.8517, 0.1056, 0.4498, 0.3528, 0.4997, 0.1873, + 0.9308, 0.5270, 0.1818, 0.0192, 0.5147, 0.6995, 0.3808, + 0.1801, 0.1254, 0.8616, 0.9334, 0.6635, 0.6097, 0.9317, + 0.0639, 0.5688, 0.1321, 0.2406, 0.0702, 0.3025, 0.5470, + 0.2516, 0.7748, 0.5947, 0.3008, 0.0640, 0.6075, 0.7333, + 0.2489, 0.8087, 0.9366, 0.5301, 0.5997, 0.9710, 0.1167, + 0.8349, 0.0943, 0.3077, 0.1781, 0.0964, 0.5114, 0.2721, + 0.4485, 0.5358, 0.9249, 0.1281, 0.7787, 0.9696, 0.7641, + 0.9267, 0.8005, 0.2565, 0.9156, 0.0177, 0.4399, 0.6139, + 0.6561, 0.2921, 0.2455, 0.0677, 0.8007, 0.1673, 0.6272, + 0.9708, 0.7757, 0.7844, 0.4143, 0.7244, 0.3178, 0.2015, + 0.7092, 0.6947, 0.5693, 0.4803, 0.3248, 0.5740, 0.1234, + 0.7532, 0.8432, 0.5693, 0.7346, 0.8612, 0.6697, 0.0106, + 0.9867, 0.9456, 0.7600, 0.4214, 0.7451, 0.1923, 0.8683, + 0.7611, 0.2801, 0.7750, 0.6682, 0.9146, 0.0443, 0.5069, + 0.8481, 0.2446, 0.9950, 0.4152, 0.1745, 0.1637, 0.2428, + 0.6569, 0.4895, 0.4090, 0.9641, 0.2949, 0.4773, 0.4363, + 0.3083, 0.0374, 0.4019, 0.0579, 0.0686, 0.4139, 0.1952, + 0.0362, 0.0311, 0.8833, 0.6790, 0.8853, 0.7058, 0.2329, + 0.9566, 0.6724, 0.9699, 0.2549, 0.0950, 0.9403, 0.8375, + 0.7841, 0.9563, 0.7054, 0.1040, 0.8158, 0.8050, 0.1655, + 0.1587, 0.3559, 0.9451, 0.7518, 0.4895, 0.3845, 0.5434, + 0.2137, 0.1454, 0.9690, 0.7963, 0.0141, 0.6547, 0.3894, + 0.1222, 0.1580, 0.2599, 0.1493, 0.3838, 0.7658, 0.2010, + 0.8027, 0.8916, 0.5453, 0.4941, 0.7164, 0.8434, 0.4222, + 0.2960, 0.0732, 0.0275, 0.3155, 0.9582, 0.2006, 0.5100, + 0.3835, 0.7693, 0.0825, 0.5403, 0.0058, 0.5775, 0.4579, + 0.7231, 0.5432, 0.1385, 0.9942, 0.7281, 0.1494, 0.2026, + 0.0383, 0.2089, 0.1678, 0.3352, 0.3308, 0.9511, 0.9479, + 0.3486, 0.6349, 0.3617, 0.1790, 0.9290, 0.9003, 0.7128, + 0.2573, 0.5128, 0.2734, 0.8333, 0.7565, 0.2114, 0.0550, + 0.1806, 0.7785, 0.3657, 0.1751, 0.5105, 0.4138, 0.6648, + 0.0988, 0.5462, 0.5957, 0.0841, 0.7102, 0.7202, 0.1878, + 0.8772, 0.4902, 0.6123, 0.9058, 0.6380, 0.2081, 0.1591, + 0.1205, 0.1672, 0.9664, 0.6634, 0.6646, 0.2765, 0.4269, + 0.5829, 0.8144, 0.4256, 0.2309, 0.0615, 0.0926, 0.0573, + 0.0407, 0.6434, 0.2531, 0.0667, 0.6337, 0.0241, 0.7295, + 0.9133, 0.7637, 0.9403, 0.7968, 0.3787, 0.2873, 0.0225, + 0.1808, 0.3069, 0.1014, 0.5506, 0.2895, 0.7186, 0.5054, + 0.4710, 0.7466, 0.0956, 0.6689, 0.1987, 0.9374, 0.4713, + 0.8415, 0.9657, 0.9038, 0.1088, 0.8688, 0.2488, 0.6505, + 0.9593, 0.4610, 0.7212, 0.1418, 0.6637, 0.4566, 0.9474, + 0.6935, 0.5967, 0.6983, 0.4944, 0.8522, 0.4429, 0.6141, + 0.6725, 0.8987, 0.3589, 0.1096, 0.3360, 0.7261]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.5928, 0.5923, 0.2769, ..., 0.7868, 0.2495, 0.0989]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.501307487487793 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), + col_indices=tensor([ 621, 8067, 8701, 6486, 5538, 5824, 379, 1918, 5000, + 5124, 6265, 1757, 7171, 5785, 2098, 8110, 8680, 9293, + 3536, 8102, 4182, 8879, 9877, 2040, 7911, 510, 3802, + 7722, 6811, 1404, 2410, 8431, 3523, 6495, 6498, 6685, + 7173, 7872, 4534, 9047, 7100, 8447, 6072, 5630, 5799, + 190, 6891, 1441, 9822, 4335, 8399, 1784, 1404, 5633, + 6623, 2518, 6475, 3954, 4736, 1500, 5281, 4391, 6371, + 886, 805, 6503, 5528, 1428, 6887, 8163, 4623, 5541, + 4640, 3383, 6444, 4711, 4505, 3203, 7934, 4654, 687, + 7329, 1943, 6395, 8455, 1952, 3346, 9199, 2955, 3712, + 7082, 8540, 6711, 1353, 3492, 6382, 9227, 3128, 4738, + 7860, 8372, 15, 1552, 8319, 9811, 3777, 9596, 8620, + 4064, 4884, 9629, 5329, 7715, 7613, 6097, 4214, 6601, + 8769, 7774, 2256, 3188, 9906, 6088, 7859, 2481, 3977, + 5219, 4949, 6267, 3615, 4199, 9995, 444, 6572, 6590, + 7908, 8736, 1682, 7932, 9231, 9498, 4969, 8032, 7386, + 3757, 5388, 5500, 7007, 497, 6899, 924, 5470, 4242, + 8349, 988, 321, 277, 6809, 2621, 4996, 3392, 2070, + 8599, 2865, 1951, 2114, 8145, 435, 2953, 956, 2814, + 1486, 8770, 3701, 8858, 3904, 8946, 1921, 2720, 8762, + 9358, 9717, 5029, 4186, 4041, 3158, 5636, 5605, 4525, + 5593, 7451, 6146, 5196, 850, 4025, 7864, 9672, 7108, + 777, 2450, 9967, 3039, 7352, 7620, 5364, 1636, 7924, + 9792, 4541, 3656, 1580, 8687, 4603, 7545, 1231, 9649, + 2748, 7212, 6116, 9217, 2342, 1946, 3303, 6323, 5879, + 4477, 5340, 4516, 5722, 3754, 6742, 650, 8207, 6470, + 2264, 3353, 2048, 8251, 400, 1044, 8345, 2532, 2124, + 1395, 5517, 6024, 3422, 7067, 1645, 6318, 3535, 6960, + 308, 4401, 9204, 1112, 4901, 4265, 8909, 888, 7274, + 2714, 4875, 7150, 2797, 4106, 4244, 2138, 3876, 2559, + 3408, 2983, 6763, 1791, 1130, 1328, 3454, 3597, 3047, + 9406, 8234, 2404, 5313, 8028, 8018, 4816, 2500, 8560, + 3279, 4533, 506, 4358, 9720, 9432, 5299, 1497, 4103, + 4459, 2841, 3387, 6518, 8285, 1486, 4206, 317, 3410, + 1281, 6931, 471, 5237, 7799, 9052, 1693, 1883, 977, + 3915, 377, 6991, 2617, 5482, 7380, 2319, 6326, 4913, + 63, 6407, 6245, 8595, 8116, 5131, 5553, 3847, 9485, + 7909, 3795, 8618, 7145, 3635, 6413, 9128, 3130, 8692, + 2576, 4366, 8928, 6249, 7783, 8379, 9553, 1007, 4718, + 1322, 4530, 5651, 4480, 2964, 4421, 6311, 2055, 9796, + 5531, 7518, 7953, 2916, 7603, 6190, 5496, 6074, 3416, + 4536, 338, 5349, 4767, 7727, 61, 2223, 5727, 6702, + 1168, 3309, 6908, 120, 2145, 7426, 217, 6262, 4322, + 710, 1996, 1502, 6744, 3362, 9551, 9541, 4765, 5555, + 7677, 3714, 9116, 9201, 9677, 808, 3444, 7948, 3547, + 3147, 8821, 4131, 7853, 4873, 3727, 8956, 9242, 463, + 6102, 3207, 2647, 2009, 6976, 8113, 5383, 9950, 9210, + 2607, 6196, 3792, 5279, 4469, 6166, 8307, 3139, 8294, + 3045, 8587, 5732, 6420, 4767, 6384, 3262, 6516, 9124, + 5629, 9140, 4268, 7884, 9280, 2845, 4396, 1695, 5892, + 9646, 4870, 474, 711, 5269, 9561, 510, 234, 6036, + 1463, 1833, 5092, 1789, 1299, 8945, 842, 2582, 1165, + 9387, 4647, 18, 2357, 8595, 5475, 9627, 2152, 6592, + 7559, 3616, 6551, 6061, 5016, 351, 6411, 5971, 9815, + 8118, 3528, 9738, 1760, 6048, 7402, 1939, 2804, 8305, + 1191, 7749, 4316, 6651, 179, 6542, 9648, 2265, 7391, + 7426, 6846, 1001, 1073, 1131, 6087, 4908, 4135, 4320, + 7720, 1601, 7467, 9773, 8874, 2360, 6153, 8937, 6481, + 3762, 9196, 7483, 944, 7320, 7745, 860, 3192, 2592, + 5428, 1558, 4118, 6406, 4829, 7167, 5223, 5427, 1349, + 5294, 7920, 7467, 9863, 7414, 1430, 2272, 4749, 9366, + 5823, 7199, 7730, 4037, 4741, 8062, 2210, 4947, 2286, + 3927, 3530, 6602, 7303, 6039, 6871, 649, 2184, 5850, + 8991, 9883, 1962, 7997, 8066, 2827, 485, 73, 5074, + 4027, 8247, 1956, 466, 6094, 7363, 8807, 3485, 4713, + 8857, 4550, 6148, 5177, 6630, 3729, 5962, 2364, 8027, + 1540, 5512, 8420, 1833, 166, 185, 1717, 5236, 2057, + 1920, 8729, 7603, 7798, 4581, 7903, 2753, 3830, 1012, + 3358, 98, 1431, 3548, 1765, 7173, 2113, 1774, 5074, + 9875, 5748, 6477, 3076, 4013, 5494, 142, 6261, 1546, + 735, 8132, 3760, 3833, 8392, 2866, 3747, 2238, 5039, + 9991, 3930, 3295, 4281, 6582, 1507, 2339, 8308, 4795, + 6042, 9144, 5178, 7200, 1854, 5289, 1036, 263, 4673, + 1755, 7843, 9590, 1674, 1997, 5404, 7842, 2316, 4765, + 903, 4521, 1615, 9734, 2683, 9393, 4743, 465, 2256, + 1490, 4969, 5519, 7553, 9750, 1156, 9506, 2573, 7753, + 8858, 2242, 2353, 4052, 130, 5566, 7279, 9261, 2032, + 5574, 9697, 2307, 6357, 4851, 125, 7010, 9557, 7434, + 1764, 5376, 8681, 520, 1147, 3019, 2766, 4221, 6787, + 913, 9582, 7327, 3426, 4085, 9753, 397, 1757, 2467, + 6219, 9421, 4979, 4880, 9636, 399, 4017, 6742, 7711, + 4977, 597, 3788, 7425, 4538, 5803, 7511, 707, 6862, + 4448, 2935, 796, 5761, 8972, 3222, 5360, 4073, 3780, + 7587, 3935, 3659, 3516, 3305, 6397, 835, 8897, 2260, + 8074, 4151, 59, 9881, 5722, 4749, 4490, 8386, 9531, + 1681, 8322, 7280, 7383, 9110, 9496, 3979, 1667, 1609, + 605, 7186, 3289, 9548, 9178, 3499, 8700, 8958, 6798, + 4712, 6079, 3652, 2893, 6478, 6228, 2105, 1064, 1126, + 594, 1239, 5010, 9616, 2691, 5967, 1503, 1488, 3756, + 3247, 4285, 4420, 2495, 3105, 2726, 8612, 1019, 7638, + 301, 3462, 8224, 7646, 2804, 892, 3360, 9148, 9972, + 4302, 6615, 1094, 5589, 9642, 9897, 7341, 5880, 2250, + 504, 6779, 1200, 8941, 3565, 8375, 7844, 9898, 7773, + 866, 2804, 8221, 7345, 7948, 44, 1443, 2712, 6139, + 27, 5009, 6474, 7863, 6279, 2280, 2088, 3757, 8456, + 8642, 2733, 1757, 7233, 5141, 3949, 8447, 2690, 3294, + 2211, 161, 6191, 1188, 871, 3978, 5524, 3757, 7095, + 4586, 4518, 8935, 4168, 5727, 4325, 5573, 274, 8627, + 5160, 6998, 5604, 9203, 1927, 6912, 3284, 5635, 8095, + 4022, 8206, 1927, 9163, 7500, 7635, 8229, 2377, 4284, + 5090, 9687, 8448, 6150, 8497, 8068, 7265, 2977, 4847, + 9227, 4643, 5971, 3091, 9617, 1463, 3964, 7383, 3704, + 5970, 2528, 4339, 780, 6389, 7456, 1723, 7888, 8852, + 109, 5808, 2711, 1313, 97, 5642, 753, 4358, 9054, + 5886, 9449, 104, 2884, 1499, 7364, 9304, 4311, 9143, + 7421, 2337, 2694, 3377, 3997, 3023, 7414, 4869, 419, + 3257, 224, 4395, 7186, 1420, 2887, 9106, 1717, 5082, + 3894, 8215, 6787, 4497, 3468, 9059, 3488, 9487, 7280, + 6265, 4060, 8912, 5499, 4442, 7535, 6018, 6368, 1204, + 8864, 9258, 6588, 5167, 2209, 2012, 4429, 8470, 8234, + 9211, 4382, 7963, 5391, 591, 4867, 8168, 9279, 897, + 4170]), + values=tensor([0.4878, 0.2366, 0.8733, 0.8071, 0.3473, 0.2387, 0.9164, + 0.3754, 0.7791, 0.5219, 0.2035, 0.6398, 0.4422, 0.3796, + 0.7410, 0.0343, 0.0746, 0.6347, 0.0691, 0.8108, 0.6743, + 0.7644, 0.4125, 0.8362, 0.4857, 0.8892, 0.4751, 0.0020, + 0.5709, 0.6384, 0.1497, 0.3464, 0.3034, 0.5189, 0.2843, + 0.8914, 0.9227, 0.3685, 0.3834, 0.0155, 0.1265, 0.8952, + 0.3893, 0.8670, 0.7098, 0.4601, 0.8885, 0.0046, 0.1082, + 0.5850, 0.3348, 0.6623, 0.6228, 0.0161, 0.7512, 0.8534, + 0.5169, 0.3788, 0.1287, 0.4768, 0.5501, 0.8770, 0.6106, + 0.3540, 0.4183, 0.1505, 0.6415, 0.2760, 0.4263, 0.1294, + 0.2381, 0.3413, 0.7792, 0.3560, 0.7846, 0.6195, 0.9067, + 0.9607, 0.5449, 0.5238, 0.7187, 0.7121, 0.1293, 0.8894, + 0.0511, 0.0830, 0.2825, 0.4900, 0.0409, 0.2002, 0.4854, + 0.3856, 0.8627, 0.8310, 0.5780, 0.7482, 0.3676, 0.5167, + 0.4530, 0.8058, 0.4658, 0.0519, 0.2134, 0.3241, 0.3833, + 0.1132, 0.1256, 0.5042, 0.0189, 0.1971, 0.1400, 0.7715, + 0.1621, 0.0713, 0.8856, 0.2929, 0.3989, 0.8128, 0.0289, + 0.2640, 0.5075, 0.4705, 0.8865, 0.9171, 0.4145, 0.4919, + 0.4500, 0.5633, 0.3618, 0.0032, 0.3175, 0.9427, 0.8902, + 0.2880, 0.6791, 0.9616, 0.0047, 0.8354, 0.3680, 0.2314, + 0.5566, 0.6728, 0.8761, 0.3109, 0.4191, 0.1257, 0.0231, + 0.4635, 0.8760, 0.8559, 0.1650, 0.1720, 0.8840, 0.1952, + 0.5716, 0.4436, 0.6030, 0.9799, 0.4962, 0.0060, 0.0091, + 0.1820, 0.1058, 0.9674, 0.3332, 0.5543, 0.3055, 0.7098, + 0.5098, 0.5712, 0.3186, 0.2511, 0.9486, 0.2897, 0.7906, + 0.4682, 0.9460, 0.4947, 0.0711, 0.2903, 0.4667, 0.5414, + 0.0305, 0.9032, 0.1444, 0.0549, 0.4534, 0.0693, 0.9432, + 0.7050, 0.0278, 0.6850, 0.1700, 0.0189, 0.3286, 0.1955, + 0.6291, 0.1692, 0.8585, 0.9646, 0.7537, 0.6946, 0.7438, + 0.4102, 0.6268, 0.2998, 0.4067, 0.3522, 0.5813, 0.2936, + 0.9445, 0.1475, 0.9903, 0.8673, 0.8393, 0.5631, 0.2710, + 0.4872, 0.2561, 0.4261, 0.0249, 0.8866, 0.7154, 0.7262, + 0.4255, 0.2060, 0.4915, 0.5716, 0.3734, 0.5359, 0.0582, + 0.7274, 0.3193, 0.1823, 0.9469, 0.1962, 0.8306, 0.0367, + 0.3559, 0.8060, 0.2544, 0.7597, 0.1912, 0.5306, 0.2236, + 0.1306, 0.0906, 0.3830, 0.2840, 0.1455, 0.7884, 0.5093, + 0.2096, 0.7409, 0.5736, 0.0118, 0.2511, 0.1254, 0.6493, + 0.5987, 0.1422, 0.7590, 0.3750, 0.0146, 0.3755, 0.5252, + 0.4470, 0.1730, 0.8157, 0.1781, 0.3561, 0.5010, 0.2990, + 0.7060, 0.9531, 0.0424, 0.8444, 0.0333, 0.9215, 0.3468, + 0.6299, 0.5208, 0.5070, 0.7369, 0.3345, 0.6893, 0.6628, + 0.0479, 0.2533, 0.8987, 0.9263, 0.7389, 0.2881, 0.3437, + 0.4669, 0.0061, 0.2705, 0.7551, 0.5375, 0.9128, 0.4733, + 0.3625, 0.3041, 0.3906, 0.4750, 0.5152, 0.6836, 0.5047, + 0.3035, 0.6463, 0.6891, 0.4384, 0.3322, 0.0632, 0.1828, + 0.0306, 0.6593, 0.7636, 0.2683, 0.3746, 0.6911, 0.2833, + 0.2470, 0.5171, 0.6073, 0.6043, 0.2759, 0.6224, 0.0070, + 0.6025, 0.5674, 0.9167, 0.4212, 0.0308, 0.3773, 0.8746, + 0.6407, 0.5547, 0.0444, 0.6266, 0.5499, 0.1996, 0.9428, + 0.8289, 0.3044, 0.3689, 0.5795, 0.4388, 0.9210, 0.7518, + 0.6388, 0.2171, 0.2636, 0.4873, 0.6059, 0.1825, 0.2514, + 0.8353, 0.3177, 0.0671, 0.6724, 0.2137, 0.9634, 0.1659, + 0.8310, 0.1283, 0.7040, 0.7122, 0.7650, 0.2004, 0.0295, + 0.0888, 0.3593, 0.6433, 0.9173, 0.5749, 0.5966, 0.6545, + 0.8191, 0.5699, 0.1636, 0.7580, 0.4365, 0.7079, 0.9192, + 0.9281, 0.8754, 0.6240, 0.7560, 0.6668, 0.9111, 0.9369, + 0.0367, 0.7533, 0.4804, 0.3166, 0.9053, 0.4957, 0.5130, + 0.7835, 0.3616, 0.2789, 0.5579, 0.6716, 0.4682, 0.6876, + 0.0966, 0.9182, 0.5997, 0.8196, 0.3189, 0.4501, 0.8553, + 0.2473, 0.0680, 0.4275, 0.1537, 0.0899, 0.7313, 0.8241, + 0.3081, 0.6292, 0.1662, 0.0904, 0.5149, 0.6344, 0.2456, + 0.2729, 0.2756, 0.6668, 0.4375, 0.7295, 0.4340, 0.2948, + 0.0278, 0.7870, 0.4490, 0.1408, 0.9227, 0.4041, 0.8449, + 0.5107, 0.6642, 0.1322, 0.7169, 0.8679, 0.3739, 0.7236, + 0.4248, 0.5882, 0.9469, 0.8184, 0.8365, 0.8779, 0.1038, + 0.8810, 0.9769, 0.5428, 0.3538, 0.9830, 0.9676, 0.1831, + 0.0084, 0.5971, 0.7115, 0.1927, 0.2619, 0.1005, 0.5333, + 0.2042, 0.2705, 0.6258, 0.9835, 0.9394, 0.7617, 0.3233, + 0.0297, 0.9049, 0.3704, 0.8040, 0.8975, 0.1579, 0.5446, + 0.2183, 0.2325, 0.0639, 0.2768, 0.2461, 0.4765, 0.2089, + 0.9111, 0.6567, 0.5216, 0.9797, 0.3080, 0.5219, 0.5582, + 0.3741, 0.2265, 0.1644, 0.9904, 0.9096, 0.6977, 0.7378, + 0.6163, 0.2973, 0.5170, 0.1559, 0.9194, 0.1883, 0.3665, + 0.6107, 0.4618, 0.4740, 0.0187, 0.8554, 0.8371, 0.2227, + 0.9554, 0.1756, 0.1396, 0.9691, 0.1329, 0.2855, 0.2852, + 0.5955, 0.2284, 0.7196, 0.1940, 0.7502, 0.5298, 0.9924, + 0.7439, 0.6637, 0.2900, 0.6522, 0.0204, 0.0933, 0.3725, + 0.2785, 0.1362, 0.2224, 0.2585, 0.4365, 0.6210, 0.9700, + 0.5368, 0.3375, 0.9444, 0.7297, 0.5092, 0.3376, 0.8591, + 0.9008, 0.5379, 0.9510, 0.9421, 0.0713, 0.2091, 0.6298, + 0.4005, 0.7776, 0.4989, 0.8706, 0.2922, 0.3070, 0.0813, + 0.5714, 0.2514, 0.4480, 0.5234, 0.0360, 0.0758, 0.3730, + 0.7634, 0.0856, 0.8934, 0.6490, 0.5291, 0.3184, 0.3037, + 0.3443, 0.5792, 0.5490, 0.5327, 0.4678, 0.2006, 0.8032, + 0.9436, 0.9189, 0.7953, 0.9640, 0.0058, 0.7983, 0.6143, + 0.9639, 0.7438, 0.7417, 0.1685, 0.5369, 0.7292, 0.5840, + 0.3088, 0.6745, 0.1144, 0.4342, 0.4148, 0.3081, 0.3141, + 0.7523, 0.9548, 0.1159, 0.9214, 0.7606, 0.1319, 0.0108, + 0.5213, 0.7386, 0.2722, 0.3665, 0.1771, 0.0556, 0.9859, + 0.0054, 0.2965, 0.3687, 0.8425, 0.4479, 0.3524, 0.2824, + 0.4727, 0.3410, 0.8501, 0.7125, 0.9448, 0.2352, 0.2937, + 0.6534, 0.3739, 0.6671, 0.8901, 0.3611, 0.4255, 0.2194, + 0.1776, 0.7356, 0.6315, 0.2672, 0.6863, 0.5095, 0.5719, + 0.6850, 0.4423, 0.4707, 0.8096, 0.8117, 0.4330, 0.1565, + 0.6528, 0.3855, 0.7960, 0.8457, 0.9509, 0.4271, 0.9874, + 0.2012, 0.1858, 0.4458, 0.2549, 0.0069, 0.9617, 0.8083, + 0.8286, 0.3440, 0.6245, 0.1685, 0.2887, 0.8351, 0.3371, + 0.8326, 0.2133, 0.4971, 0.1969, 0.9997, 0.0108, 0.8265, + 0.2603, 0.2486, 0.1123, 0.3933, 0.1972, 0.1286, 0.2410, + 0.9427, 0.8517, 0.1056, 0.4498, 0.3528, 0.4997, 0.1873, + 0.9308, 0.5270, 0.1818, 0.0192, 0.5147, 0.6995, 0.3808, + 0.1801, 0.1254, 0.8616, 0.9334, 0.6635, 0.6097, 0.9317, + 0.0639, 0.5688, 0.1321, 0.2406, 0.0702, 0.3025, 0.5470, + 0.2516, 0.7748, 0.5947, 0.3008, 0.0640, 0.6075, 0.7333, + 0.2489, 0.8087, 0.9366, 0.5301, 0.5997, 0.9710, 0.1167, + 0.8349, 0.0943, 0.3077, 0.1781, 0.0964, 0.5114, 0.2721, + 0.4485, 0.5358, 0.9249, 0.1281, 0.7787, 0.9696, 0.7641, + 0.9267, 0.8005, 0.2565, 0.9156, 0.0177, 0.4399, 0.6139, + 0.6561, 0.2921, 0.2455, 0.0677, 0.8007, 0.1673, 0.6272, + 0.9708, 0.7757, 0.7844, 0.4143, 0.7244, 0.3178, 0.2015, + 0.7092, 0.6947, 0.5693, 0.4803, 0.3248, 0.5740, 0.1234, + 0.7532, 0.8432, 0.5693, 0.7346, 0.8612, 0.6697, 0.0106, + 0.9867, 0.9456, 0.7600, 0.4214, 0.7451, 0.1923, 0.8683, + 0.7611, 0.2801, 0.7750, 0.6682, 0.9146, 0.0443, 0.5069, + 0.8481, 0.2446, 0.9950, 0.4152, 0.1745, 0.1637, 0.2428, + 0.6569, 0.4895, 0.4090, 0.9641, 0.2949, 0.4773, 0.4363, + 0.3083, 0.0374, 0.4019, 0.0579, 0.0686, 0.4139, 0.1952, + 0.0362, 0.0311, 0.8833, 0.6790, 0.8853, 0.7058, 0.2329, + 0.9566, 0.6724, 0.9699, 0.2549, 0.0950, 0.9403, 0.8375, + 0.7841, 0.9563, 0.7054, 0.1040, 0.8158, 0.8050, 0.1655, + 0.1587, 0.3559, 0.9451, 0.7518, 0.4895, 0.3845, 0.5434, + 0.2137, 0.1454, 0.9690, 0.7963, 0.0141, 0.6547, 0.3894, + 0.1222, 0.1580, 0.2599, 0.1493, 0.3838, 0.7658, 0.2010, + 0.8027, 0.8916, 0.5453, 0.4941, 0.7164, 0.8434, 0.4222, + 0.2960, 0.0732, 0.0275, 0.3155, 0.9582, 0.2006, 0.5100, + 0.3835, 0.7693, 0.0825, 0.5403, 0.0058, 0.5775, 0.4579, + 0.7231, 0.5432, 0.1385, 0.9942, 0.7281, 0.1494, 0.2026, + 0.0383, 0.2089, 0.1678, 0.3352, 0.3308, 0.9511, 0.9479, + 0.3486, 0.6349, 0.3617, 0.1790, 0.9290, 0.9003, 0.7128, + 0.2573, 0.5128, 0.2734, 0.8333, 0.7565, 0.2114, 0.0550, + 0.1806, 0.7785, 0.3657, 0.1751, 0.5105, 0.4138, 0.6648, + 0.0988, 0.5462, 0.5957, 0.0841, 0.7102, 0.7202, 0.1878, + 0.8772, 0.4902, 0.6123, 0.9058, 0.6380, 0.2081, 0.1591, + 0.1205, 0.1672, 0.9664, 0.6634, 0.6646, 0.2765, 0.4269, + 0.5829, 0.8144, 0.4256, 0.2309, 0.0615, 0.0926, 0.0573, + 0.0407, 0.6434, 0.2531, 0.0667, 0.6337, 0.0241, 0.7295, + 0.9133, 0.7637, 0.9403, 0.7968, 0.3787, 0.2873, 0.0225, + 0.1808, 0.3069, 0.1014, 0.5506, 0.2895, 0.7186, 0.5054, + 0.4710, 0.7466, 0.0956, 0.6689, 0.1987, 0.9374, 0.4713, + 0.8415, 0.9657, 0.9038, 0.1088, 0.8688, 0.2488, 0.6505, + 0.9593, 0.4610, 0.7212, 0.1418, 0.6637, 0.4566, 0.9474, + 0.6935, 0.5967, 0.6983, 0.4944, 0.8522, 0.4429, 0.6141, + 0.6725, 0.8987, 0.3589, 0.1096, 0.3360, 0.7261]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.5928, 0.5923, 0.2769, ..., 0.7868, 0.2495, 0.0989]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.501307487487793 seconds + +[18.25, 18.14, 17.95, 17.87, 17.91, 17.96, 17.87, 17.59, 18.03, 17.97] +[50.86] +14.265194416046143 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 225815, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.501307487487793, 'TIME_S_1KI': 0.046504029792032386, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 725.5277880001069, 'W': 50.86} +[18.25, 18.14, 17.95, 17.87, 17.91, 17.96, 17.87, 17.59, 18.03, 17.97, 17.9, 17.6, 18.05, 17.8, 18.18, 17.66, 17.82, 21.98, 18.23, 17.62] +326.51 +16.325499999999998 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 225815, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.501307487487793, 'TIME_S_1KI': 0.046504029792032386, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 725.5277880001069, 'W': 50.86, 'J_1KI': 3.2129300002218932, 'W_1KI': 0.22522861634523836, 'W_D': 34.5345, 'J_D': 492.64135656094555, 'W_D_1KI': 0.15293271040453468, 'J_D_1KI': 0.0006772477931250567} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_500000_1e-05.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_500000_1e-05.json index d7d5020..f1d93dd 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_500000_1e-05.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_500000_1e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 13.995913743972778, "TIME_S_1KI": 13.995913743972778, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 871.8022893977165, "W": 48.71, "J_1KI": 871.8022893977165, "W_1KI": 48.71, "W_D": 32.347, "J_D": 578.9404363610745, "W_D_1KI": 32.347, "J_D_1KI": 32.347} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 13.435759782791138, "TIME_S_1KI": 13.435759782791138, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 971.6288854122162, "W": 53.4, "J_1KI": 971.6288854122162, "W_1KI": 53.4, "W_D": 37.34824999999999, "J_D": 679.562519093573, "W_D_1KI": 37.34824999999999, "J_D_1KI": 37.34824999999999} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_500000_1e-05.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_500000_1e-05.output index 530a6a0..35674e7 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_500000_1e-05.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_500000_1e-05.output @@ -1,15 +1,15 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '500000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 13.995913743972778} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 13.435759782791138} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 10, ..., 2499992, - 2499997, 2500000]), - col_indices=tensor([ 17718, 235055, 35243, ..., 14166, 348855, - 416543]), - values=tensor([0.0021, 0.9166, 0.2725, ..., 0.7498, 0.6792, 0.5299]), +tensor(crow_indices=tensor([ 0, 3, 8, ..., 2499995, + 2499996, 2500000]), + col_indices=tensor([ 61754, 291279, 469696, ..., 173785, 177543, + 423232]), + values=tensor([0.5269, 0.9088, 0.4901, ..., 0.3381, 0.9016, 0.0517]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.0136, 0.8273, 0.9896, ..., 0.5941, 0.9828, 0.6210]) +tensor([0.7575, 0.8230, 0.6656, ..., 0.2327, 0.7437, 0.7040]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -17,17 +17,17 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 13.995913743972778 seconds +Time: 13.435759782791138 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 10, ..., 2499992, - 2499997, 2500000]), - col_indices=tensor([ 17718, 235055, 35243, ..., 14166, 348855, - 416543]), - values=tensor([0.0021, 0.9166, 0.2725, ..., 0.7498, 0.6792, 0.5299]), +tensor(crow_indices=tensor([ 0, 3, 8, ..., 2499995, + 2499996, 2500000]), + col_indices=tensor([ 61754, 291279, 469696, ..., 173785, 177543, + 423232]), + values=tensor([0.5269, 0.9088, 0.4901, ..., 0.3381, 0.9016, 0.0517]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.0136, 0.8273, 0.9896, ..., 0.5941, 0.9828, 0.6210]) +tensor([0.7575, 0.8230, 0.6656, ..., 0.2327, 0.7437, 0.7040]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -35,13 +35,13 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 13.995913743972778 seconds +Time: 13.435759782791138 seconds -[18.5, 17.83, 21.98, 17.84, 18.21, 18.0, 17.97, 17.87, 17.99, 17.99] -[48.71] -17.897809267044067 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 13.995913743972778, 'TIME_S_1KI': 13.995913743972778, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 871.8022893977165, 'W': 48.71} -[18.5, 17.83, 21.98, 17.84, 18.21, 18.0, 17.97, 17.87, 17.99, 17.99, 18.27, 17.72, 18.53, 17.6, 18.07, 17.75, 17.78, 17.88, 17.93, 17.86] -327.26 -16.363 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 13.995913743972778, 'TIME_S_1KI': 13.995913743972778, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 871.8022893977165, 'W': 48.71, 'J_1KI': 871.8022893977165, 'W_1KI': 48.71, 'W_D': 32.347, 'J_D': 578.9404363610745, 'W_D_1KI': 32.347, 'J_D_1KI': 32.347} +[18.26, 17.5, 18.12, 17.51, 17.77, 17.68, 17.73, 17.56, 17.86, 17.52] +[53.4] +18.195297479629517 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 13.435759782791138, 'TIME_S_1KI': 13.435759782791138, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 971.6288854122162, 'W': 53.4} +[18.26, 17.5, 18.12, 17.51, 17.77, 17.68, 17.73, 17.56, 17.86, 17.52, 18.07, 17.37, 18.42, 19.11, 17.73, 17.65, 18.23, 17.55, 17.59, 17.46] +321.035 +16.051750000000002 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 13.435759782791138, 'TIME_S_1KI': 13.435759782791138, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 971.6288854122162, 'W': 53.4, 'J_1KI': 971.6288854122162, 'W_1KI': 53.4, 'W_D': 37.34824999999999, 'J_D': 679.562519093573, 'W_D_1KI': 37.34824999999999, 'J_D_1KI': 37.34824999999999} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.0001.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.0001.json index d6c035a..af9c51f 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.0001.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.0001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 8984, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.250720024108887, "TIME_S_1KI": 1.1409973312676855, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 667.435348098278, "W": 47.13, "J_1KI": 74.29155700114404, "W_1KI": 5.245992876224399, "W_D": 30.525750000000002, "J_D": 432.29290424805885, "W_D_1KI": 3.3977905164737314, "J_D_1KI": 0.37820464341871457} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 9021, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.444438695907593, "TIME_S_1KI": 1.1577916745269474, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 735.9580717468261, "W": 51.98, "J_1KI": 81.58275931125442, "W_1KI": 5.762110630750471, "W_D": 35.674749999999996, "J_D": 505.1004274730682, "W_D_1KI": 3.9546336326349625, "J_D_1KI": 0.43838084831337576} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.0001.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.0001.output index eb4a2d8..945b881 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.0001.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.0001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '50000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 1.1686315536499023} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 1.1638367176055908} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 12, ..., 249984, 249988, +tensor(crow_indices=tensor([ 0, 5, 10, ..., 249989, 249992, 250000]), - col_indices=tensor([ 9222, 11801, 17371, ..., 41613, 43396, 49641]), - values=tensor([0.5050, 0.7653, 0.0671, ..., 0.1421, 0.6855, 0.0275]), + col_indices=tensor([ 5085, 27218, 28258, ..., 33170, 33475, 34242]), + values=tensor([0.4699, 0.9594, 0.0965, ..., 0.7443, 0.7286, 0.0273]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.2330, 0.0304, 0.5518, ..., 0.1557, 0.6263, 0.0730]) +tensor([0.3938, 0.4910, 0.8553, ..., 0.5913, 0.5925, 0.7936]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 1.1686315536499023 seconds +Time: 1.1638367176055908 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '8984', '-ss', '50000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.250720024108887} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '9021', '-ss', '50000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.444438695907593} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 7, ..., 249994, 249997, +tensor(crow_indices=tensor([ 0, 5, 13, ..., 249988, 249994, 250000]), - col_indices=tensor([ 1146, 2450, 11327, ..., 241, 2629, 25085]), - values=tensor([0.2696, 0.3732, 0.9366, ..., 0.5943, 0.0784, 0.3144]), + col_indices=tensor([ 3969, 14280, 16197, ..., 14337, 15782, 32993]), + values=tensor([0.2139, 0.2141, 0.1060, ..., 0.9818, 0.6790, 0.2416]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.7229, 0.2746, 0.7643, ..., 0.7812, 0.8470, 0.7243]) +tensor([0.8858, 0.9490, 0.2990, ..., 0.1473, 0.1815, 0.8776]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,16 +36,16 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 10.250720024108887 seconds +Time: 10.444438695907593 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 7, ..., 249994, 249997, +tensor(crow_indices=tensor([ 0, 5, 13, ..., 249988, 249994, 250000]), - col_indices=tensor([ 1146, 2450, 11327, ..., 241, 2629, 25085]), - values=tensor([0.2696, 0.3732, 0.9366, ..., 0.5943, 0.0784, 0.3144]), + col_indices=tensor([ 3969, 14280, 16197, ..., 14337, 15782, 32993]), + values=tensor([0.2139, 0.2141, 0.1060, ..., 0.9818, 0.6790, 0.2416]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.7229, 0.2746, 0.7643, ..., 0.7812, 0.8470, 0.7243]) +tensor([0.8858, 0.9490, 0.2990, ..., 0.1473, 0.1815, 0.8776]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -53,13 +53,13 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 10.250720024108887 seconds +Time: 10.444438695907593 seconds -[18.69, 18.1, 18.11, 18.0, 21.83, 17.95, 18.22, 18.09, 18.15, 18.1] -[47.13] -14.161581754684448 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 8984, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.250720024108887, 'TIME_S_1KI': 1.1409973312676855, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 667.435348098278, 'W': 47.13} -[18.69, 18.1, 18.11, 18.0, 21.83, 17.95, 18.22, 18.09, 18.15, 18.1, 21.63, 17.85, 18.41, 18.24, 17.96, 18.09, 18.1, 17.94, 18.92, 17.83] -332.08500000000004 -16.60425 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 8984, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.250720024108887, 'TIME_S_1KI': 1.1409973312676855, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 667.435348098278, 'W': 47.13, 'J_1KI': 74.29155700114404, 'W_1KI': 5.245992876224399, 'W_D': 30.525750000000002, 'J_D': 432.29290424805885, 'W_D_1KI': 3.3977905164737314, 'J_D_1KI': 0.37820464341871457} +[18.28, 18.06, 17.98, 18.51, 17.98, 18.02, 19.12, 19.17, 17.99, 18.0] +[51.98] +14.158485412597656 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 9021, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.444438695907593, 'TIME_S_1KI': 1.1577916745269474, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 735.9580717468261, 'W': 51.98} +[18.28, 18.06, 17.98, 18.51, 17.98, 18.02, 19.12, 19.17, 17.99, 18.0, 18.49, 17.73, 18.15, 17.89, 18.02, 17.91, 17.85, 17.6, 17.85, 17.78] +326.105 +16.30525 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 9021, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.444438695907593, 'TIME_S_1KI': 1.1577916745269474, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 735.9580717468261, 'W': 51.98, 'J_1KI': 81.58275931125442, 'W_1KI': 5.762110630750471, 'W_D': 35.674749999999996, 'J_D': 505.1004274730682, 'W_D_1KI': 3.9546336326349625, 'J_D_1KI': 0.43838084831337576} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.001.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.001.json index 6747ace..0738cf0 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.001.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 1969, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.35115671157837, "TIME_S_1KI": 5.25706282964874, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 727.7024494171142, "W": 48.48, "J_1KI": 369.5797102169193, "W_1KI": 24.62163534789233, "W_D": 32.2385, "J_D": 483.9116216075421, "W_D_1KI": 16.373031995937023, "J_D_1KI": 8.315404771933482} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 1973, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.479424238204956, "TIME_S_1KI": 5.311416238319795, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 791.6620005321503, "W": 53.24, "J_1KI": 401.2478461896352, "W_1KI": 26.984287886467307, "W_D": 37.20700000000001, "J_D": 553.2563496205807, "W_D_1KI": 18.858084135833757, "J_D_1KI": 9.558076095202107} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.001.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.001.output index edef138..3bd1995 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.001.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '50000', '-sd', '0.001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 5.33142876625061} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 5.321045160293579} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 50, 102, ..., 2499894, - 2499942, 2500000]), - col_indices=tensor([ 362, 476, 734, ..., 42817, 42901, 48624]), - values=tensor([0.1861, 0.1141, 0.9529, ..., 0.0521, 0.7769, 0.3485]), +tensor(crow_indices=tensor([ 0, 54, 97, ..., 2499896, + 2499948, 2500000]), + col_indices=tensor([ 176, 180, 853, ..., 47415, 47956, 49304]), + values=tensor([0.4358, 0.1204, 0.8362, ..., 0.7793, 0.3332, 0.4077]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.6800, 0.1652, 0.7606, ..., 0.1973, 0.6571, 0.7552]) +tensor([0.9660, 0.1174, 0.2174, ..., 0.0235, 0.8944, 0.4447]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 5.33142876625061 seconds +Time: 5.321045160293579 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1969', '-ss', '50000', '-sd', '0.001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.35115671157837} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1973', '-ss', '50000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.479424238204956} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 59, 106, ..., 2499882, - 2499933, 2500000]), - col_indices=tensor([ 752, 1386, 1561, ..., 49182, 49404, 49846]), - values=tensor([0.0219, 0.4602, 0.8212, ..., 0.9720, 0.3228, 0.9373]), +tensor(crow_indices=tensor([ 0, 43, 100, ..., 2499912, + 2499964, 2500000]), + col_indices=tensor([ 471, 539, 1515, ..., 46324, 49367, 49678]), + values=tensor([0.0688, 0.1954, 0.6278, ..., 0.4403, 0.6708, 0.8543]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.3378, 0.8054, 0.7422, ..., 0.6857, 0.1927, 0.4134]) +tensor([0.7713, 0.8001, 0.0882, ..., 0.6644, 0.4702, 0.2491]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,16 +36,16 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 10.35115671157837 seconds +Time: 10.479424238204956 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 59, 106, ..., 2499882, - 2499933, 2500000]), - col_indices=tensor([ 752, 1386, 1561, ..., 49182, 49404, 49846]), - values=tensor([0.0219, 0.4602, 0.8212, ..., 0.9720, 0.3228, 0.9373]), +tensor(crow_indices=tensor([ 0, 43, 100, ..., 2499912, + 2499964, 2500000]), + col_indices=tensor([ 471, 539, 1515, ..., 46324, 49367, 49678]), + values=tensor([0.0688, 0.1954, 0.6278, ..., 0.4403, 0.6708, 0.8543]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.3378, 0.8054, 0.7422, ..., 0.6857, 0.1927, 0.4134]) +tensor([0.7713, 0.8001, 0.0882, ..., 0.6644, 0.4702, 0.2491]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -53,13 +53,13 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 10.35115671157837 seconds +Time: 10.479424238204956 seconds -[18.43, 18.02, 18.06, 17.85, 18.04, 18.18, 18.02, 17.8, 18.02, 18.51] -[48.48] -15.010364055633545 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1969, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.35115671157837, 'TIME_S_1KI': 5.25706282964874, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 727.7024494171142, 'W': 48.48} -[18.43, 18.02, 18.06, 17.85, 18.04, 18.18, 18.02, 17.8, 18.02, 18.51, 18.08, 17.97, 18.18, 18.15, 17.97, 18.08, 18.16, 17.86, 18.05, 17.82] -324.83 -16.2415 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1969, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.35115671157837, 'TIME_S_1KI': 5.25706282964874, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 727.7024494171142, 'W': 48.48, 'J_1KI': 369.5797102169193, 'W_1KI': 24.62163534789233, 'W_D': 32.2385, 'J_D': 483.9116216075421, 'W_D_1KI': 16.373031995937023, 'J_D_1KI': 8.315404771933482} +[18.09, 17.68, 17.78, 17.6, 17.88, 17.97, 17.49, 17.67, 17.75, 17.8] +[53.24] +14.86968445777893 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1973, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.479424238204956, 'TIME_S_1KI': 5.311416238319795, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 791.6620005321503, 'W': 53.24} +[18.09, 17.68, 17.78, 17.6, 17.88, 17.97, 17.49, 17.67, 17.75, 17.8, 18.63, 17.66, 18.2, 17.66, 17.95, 17.62, 18.05, 17.65, 17.58, 18.42] +320.65999999999997 +16.032999999999998 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1973, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.479424238204956, 'TIME_S_1KI': 5.311416238319795, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 791.6620005321503, 'W': 53.24, 'J_1KI': 401.2478461896352, 'W_1KI': 26.984287886467307, 'W_D': 37.20700000000001, 'J_D': 553.2563496205807, 'W_D_1KI': 18.858084135833757, 'J_D_1KI': 9.558076095202107} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_1e-05.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_1e-05.json index 91b7ae2..0990f32 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_1e-05.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_1e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 21352, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.102294206619263, "TIME_S_1KI": 0.47313105126542065, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 644.554582464695, "W": 46.45, "J_1KI": 30.187082355971103, "W_1KI": 2.1754402397901837, "W_D": 30.18325, "J_D": 418.83212273794413, "W_D_1KI": 1.4136029411764706, "J_D_1KI": 0.066204708747493} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 21464, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.162778615951538, "TIME_S_1KI": 0.47348018151097365, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 708.2274965262413, "W": 50.99, "J_1KI": 32.99606301370859, "W_1KI": 2.3756056653000375, "W_D": 25.345, "J_D": 352.0303176987171, "W_D_1KI": 1.1808143868803578, "J_D_1KI": 0.055013715378324536} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_1e-05.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_1e-05.output index c1ed6fd..eca4709 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_1e-05.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_1e-05.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '50000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.49173688888549805} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.4891834259033203} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 25000, 25000, 25000]), - col_indices=tensor([ 851, 39596, 1204, ..., 6262, 34652, 46359]), - values=tensor([0.1009, 0.2308, 0.6894, ..., 0.4766, 0.7010, 0.2687]), +tensor(crow_indices=tensor([ 0, 0, 2, ..., 25000, 25000, 25000]), + col_indices=tensor([16409, 39665, 45486, ..., 40216, 44015, 30698]), + values=tensor([0.3828, 0.2137, 0.3194, ..., 0.5609, 0.6557, 0.9594]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.2088, 0.1405, 0.6063, ..., 0.1063, 0.3954, 0.8044]) +tensor([0.1367, 0.4150, 0.8251, ..., 0.6451, 0.2178, 0.9645]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -15,18 +15,18 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 0.49173688888549805 seconds +Time: 0.4891834259033203 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '21352', '-ss', '50000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.102294206619263} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '21464', '-ss', '50000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.162778615951538} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 24998, 24998, 25000]), - col_indices=tensor([15528, 30130, 16433, ..., 30917, 35420, 44166]), - values=tensor([0.6196, 0.0183, 0.2015, ..., 0.9265, 0.2661, 0.3216]), +tensor(crow_indices=tensor([ 0, 0, 2, ..., 24997, 24999, 25000]), + col_indices=tensor([27591, 28713, 10997, ..., 3373, 26495, 43984]), + values=tensor([0.0595, 0.2219, 0.9508, ..., 0.7420, 0.6896, 0.8252]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.3139, 0.2113, 0.1225, ..., 0.3436, 0.4255, 0.1892]) +tensor([0.4890, 0.2230, 0.0247, ..., 0.5863, 0.9029, 0.3113]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -34,15 +34,15 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 10.102294206619263 seconds +Time: 10.162778615951538 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 24998, 24998, 25000]), - col_indices=tensor([15528, 30130, 16433, ..., 30917, 35420, 44166]), - values=tensor([0.6196, 0.0183, 0.2015, ..., 0.9265, 0.2661, 0.3216]), +tensor(crow_indices=tensor([ 0, 0, 2, ..., 24997, 24999, 25000]), + col_indices=tensor([27591, 28713, 10997, ..., 3373, 26495, 43984]), + values=tensor([0.0595, 0.2219, 0.9508, ..., 0.7420, 0.6896, 0.8252]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.3139, 0.2113, 0.1225, ..., 0.3436, 0.4255, 0.1892]) +tensor([0.4890, 0.2230, 0.0247, ..., 0.5863, 0.9029, 0.3113]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -50,13 +50,13 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 10.102294206619263 seconds +Time: 10.162778615951538 seconds -[18.39, 18.05, 18.35, 17.95, 18.07, 17.91, 18.01, 17.84, 17.86, 18.12] -[46.45] -13.876309633255005 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 21352, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.102294206619263, 'TIME_S_1KI': 0.47313105126542065, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 644.554582464695, 'W': 46.45} -[18.39, 18.05, 18.35, 17.95, 18.07, 17.91, 18.01, 17.84, 17.86, 18.12, 18.35, 17.9, 17.93, 17.87, 17.99, 17.82, 18.04, 18.01, 19.31, 17.99] -325.33500000000004 -16.266750000000002 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 21352, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.102294206619263, 'TIME_S_1KI': 0.47313105126542065, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 644.554582464695, 'W': 46.45, 'J_1KI': 30.187082355971103, 'W_1KI': 2.1754402397901837, 'W_D': 30.18325, 'J_D': 418.83212273794413, 'W_D_1KI': 1.4136029411764706, 'J_D_1KI': 0.066204708747493} +[22.1, 18.96, 18.21, 17.89, 17.76, 17.62, 17.96, 18.44, 17.92, 17.8] +[50.99] +13.88953709602356 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 21464, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.162778615951538, 'TIME_S_1KI': 0.47348018151097365, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 708.2274965262413, 'W': 50.99} +[22.1, 18.96, 18.21, 17.89, 17.76, 17.62, 17.96, 18.44, 17.92, 17.8, 27.02, 48.47, 52.31, 52.11, 52.93, 45.58, 32.74, 23.06, 18.26, 18.44] +512.9000000000001 +25.645000000000003 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 21464, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.162778615951538, 'TIME_S_1KI': 0.47348018151097365, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 708.2274965262413, 'W': 50.99, 'J_1KI': 32.99606301370859, 'W_1KI': 2.3756056653000375, 'W_D': 25.345, 'J_D': 352.0303176987171, 'W_D_1KI': 1.1808143868803578, 'J_D_1KI': 0.055013715378324536} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.0001.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.0001.json new file mode 100644 index 0000000..cdd4446 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 220548, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.518115997314453, "TIME_S_1KI": 0.04769082466091034, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 712.8332035136224, "W": 50.77000000000001, "J_1KI": 3.2321000576456025, "W_1KI": 0.23019932168960958, "W_D": 34.49475000000001, "J_D": 484.32151165848984, "W_D_1KI": 0.15640472822242782, "J_D_1KI": 0.0007091641194770654} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.0001.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.0001.output new file mode 100644 index 0000000..0444e69 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.0001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '5000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.06373429298400879} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 2496, 2500, 2500]), + col_indices=tensor([ 225, 423, 3600, ..., 1030, 3468, 3660]), + values=tensor([0.7007, 0.4494, 0.9248, ..., 0.2922, 0.0433, 0.9500]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.8445, 0.6906, 0.6660, ..., 0.8648, 0.6232, 0.6893]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 0.06373429298400879 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '164746', '-ss', '5000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 7.8433122634887695} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 2500, 2500, 2500]), + col_indices=tensor([3043, 3415, 2314, ..., 4144, 83, 2442]), + values=tensor([0.9885, 0.5870, 0.9255, ..., 0.0554, 0.8705, 0.0319]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.7612, 0.3828, 0.3624, ..., 0.7209, 0.0836, 0.1248]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 7.8433122634887695 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '220548', '-ss', '5000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.518115997314453} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 4, ..., 2499, 2500, 2500]), + col_indices=tensor([1110, 1648, 1178, ..., 3403, 882, 3863]), + values=tensor([0.7053, 0.9818, 0.3657, ..., 0.7070, 0.0906, 0.0064]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.7696, 0.8663, 0.2054, ..., 0.2110, 0.6343, 0.9754]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.518115997314453 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 4, ..., 2499, 2500, 2500]), + col_indices=tensor([1110, 1648, 1178, ..., 3403, 882, 3863]), + values=tensor([0.7053, 0.9818, 0.3657, ..., 0.7070, 0.0906, 0.0064]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.7696, 0.8663, 0.2054, ..., 0.2110, 0.6343, 0.9754]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.518115997314453 seconds + +[18.38, 17.72, 18.11, 17.9, 17.72, 18.84, 18.26, 17.75, 18.1, 19.21] +[50.77] +14.040441274642944 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 220548, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.518115997314453, 'TIME_S_1KI': 0.04769082466091034, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 712.8332035136224, 'W': 50.77000000000001} +[18.38, 17.72, 18.11, 17.9, 17.72, 18.84, 18.26, 17.75, 18.1, 19.21, 18.75, 19.27, 17.62, 17.61, 18.13, 17.64, 17.82, 17.78, 18.21, 17.71] +325.505 +16.27525 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 220548, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.518115997314453, 'TIME_S_1KI': 0.04769082466091034, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 712.8332035136224, 'W': 50.77000000000001, 'J_1KI': 3.2321000576456025, 'W_1KI': 0.23019932168960958, 'W_D': 34.49475000000001, 'J_D': 484.32151165848984, 'W_D_1KI': 0.15640472822242782, 'J_D_1KI': 0.0007091641194770654} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.001.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.001.json new file mode 100644 index 0000000..8603573 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 110820, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.466707706451416, "TIME_S_1KI": 0.0944478226534147, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 732.0924870371819, "W": 51.05, "J_1KI": 6.606140471369625, "W_1KI": 0.46065692113336937, "W_D": 34.78399999999999, "J_D": 498.82673984527577, "W_D_1KI": 0.3138783613066233, "J_D_1KI": 0.0028323259457374416} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.001.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.001.output new file mode 100644 index 0000000..d9f1f9f --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '5000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.10903120040893555} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 8, 12, ..., 24993, 24997, 25000]), + col_indices=tensor([ 238, 1233, 1853, ..., 2176, 2430, 4262]), + values=tensor([0.6643, 0.7436, 0.3106, ..., 0.6873, 0.4400, 0.9022]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.1554, 0.8998, 0.5501, ..., 0.9645, 0.8024, 0.0587]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 0.10903120040893555 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '96302', '-ss', '5000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 9.124423503875732} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 5, ..., 24990, 24996, 25000]), + col_indices=tensor([ 172, 514, 1428, ..., 3067, 4065, 4821]), + values=tensor([0.3942, 0.3525, 0.9893, ..., 0.1091, 0.2236, 0.5194]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.3072, 0.9146, 0.5714, ..., 0.0055, 0.2166, 0.7033]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 9.124423503875732 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '110820', '-ss', '5000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.466707706451416} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 9, ..., 24990, 24995, 25000]), + col_indices=tensor([ 254, 2428, 3765, ..., 2763, 3021, 4452]), + values=tensor([0.4991, 0.2229, 0.1709, ..., 0.9765, 0.1191, 0.1560]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.0044, 0.7640, 0.5767, ..., 0.5512, 0.1474, 0.2527]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.466707706451416 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 9, ..., 24990, 24995, 25000]), + col_indices=tensor([ 254, 2428, 3765, ..., 2763, 3021, 4452]), + values=tensor([0.4991, 0.2229, 0.1709, ..., 0.9765, 0.1191, 0.1560]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.0044, 0.7640, 0.5767, ..., 0.5512, 0.1474, 0.2527]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.466707706451416 seconds + +[18.71, 18.12, 17.8, 18.17, 17.79, 18.66, 18.24, 17.82, 17.96, 18.33] +[51.05] +14.340695142745972 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 110820, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.466707706451416, 'TIME_S_1KI': 0.0944478226534147, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 732.0924870371819, 'W': 51.05} +[18.71, 18.12, 17.8, 18.17, 17.79, 18.66, 18.24, 17.82, 17.96, 18.33, 18.24, 17.71, 17.88, 18.05, 17.89, 18.02, 18.2, 18.01, 17.96, 18.8] +325.32000000000005 +16.266000000000002 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 110820, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.466707706451416, 'TIME_S_1KI': 0.0944478226534147, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 732.0924870371819, 'W': 51.05, 'J_1KI': 6.606140471369625, 'W_1KI': 0.46065692113336937, 'W_D': 34.78399999999999, 'J_D': 498.82673984527577, 'W_D_1KI': 0.3138783613066233, 'J_D_1KI': 0.0028323259457374416} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.01.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.01.json new file mode 100644 index 0000000..480b88c --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 20672, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.150692462921143, "TIME_S_1KI": 0.4910358196072534, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 730.3927840781212, "W": 52.39, "J_1KI": 35.332468270032955, "W_1KI": 2.534345975232198, "W_D": 36.085, "J_D": 503.07737380146983, "W_D_1KI": 1.7455979102167183, "J_D_1KI": 0.08444262336574683} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.01.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.01.output new file mode 100644 index 0000000..a5c980d --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.01.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '5000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.5079245567321777} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 49, 95, ..., 249890, 249948, + 250000]), + col_indices=tensor([ 55, 65, 142, ..., 4926, 4940, 4998]), + values=tensor([0.9119, 0.0018, 0.8572, ..., 0.6690, 0.1772, 0.9395]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.5150, 0.8940, 0.4191, ..., 0.2946, 0.8617, 0.5629]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 0.5079245567321777 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '20672', '-ss', '5000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.150692462921143} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 43, 97, ..., 249889, 249944, + 250000]), + col_indices=tensor([ 6, 85, 316, ..., 4939, 4964, 4997]), + values=tensor([0.9982, 0.1843, 0.4498, ..., 0.0146, 0.5221, 0.3769]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.3609, 0.3004, 0.4171, ..., 0.6127, 0.3616, 0.7085]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.150692462921143 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 43, 97, ..., 249889, 249944, + 250000]), + col_indices=tensor([ 6, 85, 316, ..., 4939, 4964, 4997]), + values=tensor([0.9982, 0.1843, 0.4498, ..., 0.0146, 0.5221, 0.3769]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.3609, 0.3004, 0.4171, ..., 0.6127, 0.3616, 0.7085]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.150692462921143 seconds + +[18.49, 17.73, 17.92, 17.82, 17.93, 17.75, 17.92, 17.66, 18.2, 17.78] +[52.39] +13.9414541721344 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 20672, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.150692462921143, 'TIME_S_1KI': 0.4910358196072534, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 730.3927840781212, 'W': 52.39} +[18.49, 17.73, 17.92, 17.82, 17.93, 17.75, 17.92, 17.66, 18.2, 17.78, 18.3, 17.44, 17.89, 17.79, 17.89, 17.6, 18.39, 22.34, 17.77, 17.55] +326.1 +16.305 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 20672, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.150692462921143, 'TIME_S_1KI': 0.4910358196072534, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 730.3927840781212, 'W': 52.39, 'J_1KI': 35.332468270032955, 'W_1KI': 2.534345975232198, 'W_D': 36.085, 'J_D': 503.07737380146983, 'W_D_1KI': 1.7455979102167183, 'J_D_1KI': 0.08444262336574683} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.05.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.05.json new file mode 100644 index 0000000..e52e6e6 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 4507, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.511178016662598, "TIME_S_1KI": 2.332189486723452, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 757.7805129575729, "W": 51.93, "J_1KI": 168.13412756990746, "W_1KI": 11.522076769469715, "W_D": 35.70625, "J_D": 521.0379441708326, "W_D_1KI": 7.922398491235855, "J_D_1KI": 1.7577986446052485} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.05.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.05.output new file mode 100644 index 0000000..a885cd8 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.05.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '5000', '-sd', '0.05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 2.3295679092407227} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 254, 504, ..., 1249528, + 1249756, 1250000]), + col_indices=tensor([ 6, 36, 59, ..., 4952, 4989, 4991]), + values=tensor([0.0659, 0.7749, 0.0668, ..., 0.7589, 0.1810, 0.5312]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.3067, 0.0072, 0.7740, ..., 0.2122, 0.3107, 0.3197]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 2.3295679092407227 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '4507', '-ss', '5000', '-sd', '0.05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.511178016662598} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 219, 483, ..., 1249530, + 1249766, 1250000]), + col_indices=tensor([ 20, 32, 102, ..., 4974, 4977, 4994]), + values=tensor([0.6920, 0.8171, 0.6223, ..., 0.4625, 0.9983, 0.7249]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.2561, 0.5591, 0.5400, ..., 0.8818, 0.4529, 0.6860]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.511178016662598 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 219, 483, ..., 1249530, + 1249766, 1250000]), + col_indices=tensor([ 20, 32, 102, ..., 4974, 4977, 4994]), + values=tensor([0.6920, 0.8171, 0.6223, ..., 0.4625, 0.9983, 0.7249]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.2561, 0.5591, 0.5400, ..., 0.8818, 0.4529, 0.6860]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.511178016662598 seconds + +[18.39, 17.88, 18.12, 17.92, 17.75, 18.25, 17.78, 17.64, 17.79, 18.46] +[51.93] +14.592345714569092 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 4507, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.511178016662598, 'TIME_S_1KI': 2.332189486723452, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 757.7805129575729, 'W': 51.93} +[18.39, 17.88, 18.12, 17.92, 17.75, 18.25, 17.78, 17.64, 17.79, 18.46, 18.69, 17.8, 17.97, 17.87, 17.86, 17.58, 17.99, 19.45, 18.2, 17.71] +324.47499999999997 +16.22375 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 4507, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.511178016662598, 'TIME_S_1KI': 2.332189486723452, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 757.7805129575729, 'W': 51.93, 'J_1KI': 168.13412756990746, 'W_1KI': 11.522076769469715, 'W_D': 35.70625, 'J_D': 521.0379441708326, 'W_D_1KI': 7.922398491235855, 'J_D_1KI': 1.7577986446052485} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.1.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.1.json new file mode 100644 index 0000000..635a552 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 2058, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.45784878730774, "TIME_S_1KI": 5.08155917750619, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 785.1421909189224, "W": 52.34, "J_1KI": 381.5073813988933, "W_1KI": 25.432458697764822, "W_D": 35.778000000000006, "J_D": 536.6988404030801, "W_D_1KI": 17.384839650145775, "J_D_1KI": 8.44744395050815} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.1.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.1.output new file mode 100644 index 0000000..4a3b3d8 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.1.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '5000', '-sd', '0.1', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 5.10130500793457} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 521, 995, ..., 2499020, + 2499527, 2500000]), + col_indices=tensor([ 19, 49, 51, ..., 4986, 4987, 4995]), + values=tensor([0.7936, 0.5375, 0.7301, ..., 0.7605, 0.2307, 0.9856]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.5487, 0.7747, 0.8035, ..., 0.5625, 0.3730, 0.5706]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 5.10130500793457 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2058', '-ss', '5000', '-sd', '0.1', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.45784878730774} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 509, 1017, ..., 2498984, + 2499497, 2500000]), + col_indices=tensor([ 6, 22, 24, ..., 4979, 4998, 4999]), + values=tensor([0.4917, 0.1142, 0.9293, ..., 0.2344, 0.9124, 0.9917]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.7101, 0.7759, 0.4138, ..., 0.4795, 0.2601, 0.9117]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.45784878730774 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 509, 1017, ..., 2498984, + 2499497, 2500000]), + col_indices=tensor([ 6, 22, 24, ..., 4979, 4998, 4999]), + values=tensor([0.4917, 0.1142, 0.9293, ..., 0.2344, 0.9124, 0.9917]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.7101, 0.7759, 0.4138, ..., 0.4795, 0.2601, 0.9117]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.45784878730774 seconds + +[17.93, 17.71, 18.06, 17.99, 17.73, 17.68, 22.42, 18.22, 17.93, 17.72] +[52.34] +15.000806093215942 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 2058, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.45784878730774, 'TIME_S_1KI': 5.08155917750619, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 785.1421909189224, 'W': 52.34} +[17.93, 17.71, 18.06, 17.99, 17.73, 17.68, 22.42, 18.22, 17.93, 17.72, 17.97, 22.23, 18.27, 18.03, 17.94, 17.65, 17.66, 17.82, 18.34, 17.5] +331.24 +16.562 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 2058, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.45784878730774, 'TIME_S_1KI': 5.08155917750619, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 785.1421909189224, 'W': 52.34, 'J_1KI': 381.5073813988933, 'W_1KI': 25.432458697764822, 'W_D': 35.778000000000006, 'J_D': 536.6988404030801, 'W_D_1KI': 17.384839650145775, 'J_D_1KI': 8.44744395050815} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_1e-05.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_1e-05.json new file mode 100644 index 0000000..6c5cd21 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 359075, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.526627540588379, "TIME_S_1KI": 0.029315957782046587, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 727.3910086989404, "W": 50.77, "J_1KI": 2.0257355947892233, "W_1KI": 0.14139107428810138, "W_D": 34.40475000000001, "J_D": 492.92310038477194, "W_D_1KI": 0.09581494116827963, "J_D_1KI": 0.00026683824039066943} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_1e-05.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_1e-05.output new file mode 100644 index 0000000..f806f78 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_1e-05.output @@ -0,0 +1,329 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '5000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.11398792266845703} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([ 241, 1973, 126, 4921, 4422, 2653, 3082, 2201, 909, + 773, 2476, 1101, 4124, 1149, 4932, 4150, 708, 3916, + 3901, 3756, 2285, 2145, 2412, 4449, 1421, 1959, 273, + 295, 438, 3557, 1406, 2159, 1555, 1352, 2308, 123, + 422, 816, 1668, 2887, 824, 2337, 308, 3497, 990, + 532, 4077, 543, 4572, 3537, 2814, 363, 2178, 459, + 194, 3590, 3027, 4470, 4045, 3521, 3600, 3448, 3378, + 3735, 2740, 4248, 3124, 1351, 4670, 655, 21, 1574, + 1992, 4925, 3906, 2630, 1378, 3476, 2249, 1157, 791, + 4242, 829, 3492, 751, 3125, 155, 1550, 3503, 3772, + 4314, 2771, 3009, 651, 454, 3292, 4403, 3040, 3507, + 2608, 4119, 1826, 4717, 3363, 464, 3190, 2566, 1334, + 3602, 3134, 4282, 4686, 3398, 415, 1914, 1278, 3697, + 3496, 579, 3955, 1068, 4099, 763, 3707, 3389, 1217, + 1044, 4869, 1375, 3824, 2384, 1580, 1119, 2286, 4182, + 194, 4854, 2427, 3130, 4857, 3962, 2164, 2297, 3429, + 4738, 1374, 1526, 1469, 698, 2341, 4993, 1945, 4526, + 2645, 2777, 3401, 889, 4389, 444, 1509, 4747, 2279, + 4668, 72, 129, 1221, 3493, 378, 595, 67, 1157, + 1657, 2497, 2001, 1078, 4882, 3030, 2378, 193, 2365, + 3970, 4956, 3547, 158, 4478, 3594, 3986, 4843, 4633, + 1401, 3655, 934, 1838, 4467, 1935, 2294, 329, 1885, + 2444, 2560, 3870, 4475, 843, 2939, 3686, 4333, 3066, + 1183, 367, 3706, 3954, 4842, 1757, 4835, 4167, 4982, + 1096, 3863, 1904, 2261, 4656, 4688, 3811, 4079, 2898, + 525, 3689, 59, 2698, 369, 2440, 1363, 4533, 2450, + 3223, 1033, 4049, 3368, 2542, 4831, 3226, 3742, 4496, + 434, 1015, 2564, 1295, 3848, 4039, 804]), + values=tensor([0.2184, 0.5485, 0.5631, 0.7186, 0.3971, 0.9050, 0.7143, + 0.7288, 0.3895, 0.9734, 0.7253, 0.3854, 0.7553, 0.4272, + 0.9870, 0.8470, 0.2594, 0.4864, 0.4236, 0.8391, 0.1976, + 0.0203, 0.1892, 0.3198, 0.2335, 0.4485, 0.4766, 0.2460, + 0.8756, 0.2717, 0.6013, 0.3920, 0.2318, 0.2314, 0.6325, + 0.7402, 0.4011, 0.6801, 0.0374, 0.5386, 0.8760, 0.4919, + 0.9099, 0.6426, 0.0752, 0.2458, 0.7495, 0.4949, 0.4717, + 0.8587, 0.9263, 0.5756, 0.1987, 0.1048, 0.8736, 0.4765, + 0.2414, 0.4379, 0.9381, 0.5720, 0.7831, 0.1225, 0.0871, + 0.1953, 0.0019, 0.7763, 0.7548, 0.3103, 0.4088, 0.9386, + 0.6409, 0.3915, 0.4398, 0.8886, 0.6326, 0.8708, 0.6836, + 0.2686, 0.0291, 0.4089, 0.8430, 0.7311, 0.2220, 0.0973, + 0.4335, 0.3659, 0.1254, 0.1858, 0.2947, 0.6441, 0.6573, + 0.8939, 0.8485, 0.7258, 0.8542, 0.3356, 0.6753, 0.2728, + 0.1795, 0.8246, 0.2224, 0.2674, 0.8957, 0.1897, 0.5785, + 0.0612, 0.0570, 0.6450, 0.0772, 0.5313, 0.3238, 0.7938, + 0.9961, 0.4101, 0.7007, 0.3996, 0.0865, 0.3609, 0.3202, + 0.4978, 0.4886, 0.2294, 0.1102, 0.5506, 0.2172, 0.1849, + 0.3574, 0.0197, 0.0592, 0.3653, 0.9739, 0.5626, 0.3629, + 0.5946, 0.5286, 0.9497, 0.4607, 0.1036, 0.7227, 0.1313, + 0.2695, 0.1429, 0.5049, 0.5045, 0.0131, 0.8291, 0.1488, + 0.2606, 0.8600, 0.2356, 0.5905, 0.8817, 0.3417, 0.2576, + 0.1052, 0.2996, 0.2243, 0.4829, 0.2637, 0.4923, 0.6774, + 0.3415, 0.2189, 0.4198, 0.9822, 0.0220, 0.9119, 0.7410, + 0.2466, 0.2072, 0.8839, 0.7516, 0.8153, 0.2575, 0.8303, + 0.9406, 0.0281, 0.0637, 0.8256, 0.0137, 0.8551, 0.6904, + 0.7955, 0.7126, 0.4854, 0.7077, 0.7877, 0.2703, 0.2627, + 0.1225, 0.6814, 0.1981, 0.0012, 0.1101, 0.2261, 0.0650, + 0.7540, 0.2474, 0.6597, 0.2387, 0.2473, 0.3505, 0.4892, + 0.1885, 0.9295, 0.0390, 0.0947, 0.3171, 0.4778, 0.2438, + 0.6996, 0.4455, 0.6953, 0.9830, 0.4988, 0.5386, 0.2650, + 0.2674, 0.7866, 0.9811, 0.0823, 0.0951, 0.2368, 0.8950, + 0.6075, 0.7359, 0.6430, 0.6470, 0.0664, 0.2765, 0.1109, + 0.1504, 0.4845, 0.0431, 0.3770, 0.2384, 0.0687, 0.8824, + 0.9446, 0.8249, 0.8327, 0.3623, 0.1484, 0.9592, 0.8566, + 0.3466, 0.6434, 0.1142, 0.1855, 0.2031]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.1137, 0.5017, 0.2439, ..., 0.6384, 0.0681, 0.9585]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 0.11398792266845703 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '92115', '-ss', '5000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 2.6936018466949463} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([ 137, 3972, 2939, 2536, 3585, 3536, 4694, 4081, 1091, + 3547, 2158, 1560, 4654, 3916, 1298, 1826, 148, 3363, + 2515, 695, 1436, 2549, 3112, 1426, 4349, 4876, 3863, + 2266, 79, 3433, 3354, 3087, 4915, 1126, 3703, 2213, + 4969, 2103, 3978, 4220, 3833, 3752, 4926, 1827, 2953, + 4810, 372, 1434, 633, 4328, 3235, 2981, 1886, 1672, + 4865, 3611, 2035, 3841, 2469, 1487, 1861, 3293, 4642, + 1604, 4933, 4004, 2061, 3358, 3726, 2632, 960, 126, + 2232, 2877, 895, 621, 3810, 4400, 2844, 3004, 2625, + 1260, 1779, 776, 2146, 1667, 3230, 539, 2113, 1737, + 4402, 465, 2922, 3985, 142, 4315, 2921, 2750, 885, + 710, 4008, 1590, 1261, 4292, 3623, 3503, 1672, 3336, + 2572, 3267, 2993, 70, 1995, 836, 1449, 4056, 4774, + 1934, 3439, 2960, 4562, 3889, 2634, 1182, 2896, 3385, + 205, 905, 4516, 1281, 169, 4524, 563, 927, 1718, + 3751, 3566, 1379, 2664, 985, 2775, 4965, 4796, 483, + 2960, 2505, 3939, 4782, 2656, 1648, 2553, 588, 2612, + 4485, 4017, 1943, 4451, 4661, 1851, 2653, 4614, 956, + 1822, 2814, 2160, 1989, 3032, 922, 291, 1256, 4491, + 941, 544, 161, 604, 1328, 4789, 747, 3093, 4018, + 1261, 4345, 1576, 1083, 2753, 4075, 244, 4712, 4715, + 4014, 1207, 4378, 15, 4207, 1970, 605, 1755, 1089, + 2896, 831, 501, 3378, 2699, 1900, 724, 1190, 1825, + 660, 181, 3354, 4952, 4827, 2686, 26, 1403, 2918, + 3156, 1375, 2817, 2786, 1609, 3155, 1989, 2470, 2850, + 3165, 3975, 2060, 233, 699, 4823, 3317, 293, 1836, + 3608, 3776, 669, 4280, 4958, 4125, 2468, 2256, 2146, + 4901, 2841, 3736, 283, 190, 3398, 1922]), + values=tensor([0.6695, 0.9833, 0.1432, 0.4161, 0.8392, 0.4519, 0.7335, + 0.9958, 0.0219, 0.7710, 0.5001, 0.2641, 0.3766, 0.7103, + 0.8540, 0.5709, 0.1682, 0.2996, 0.5530, 0.5173, 0.8745, + 0.0752, 0.4820, 0.5228, 0.0339, 0.6709, 0.2580, 0.8586, + 0.8878, 0.0878, 0.4393, 0.2211, 0.2258, 0.4333, 0.0038, + 0.6951, 0.6433, 0.6381, 0.3492, 0.3731, 0.0316, 0.8649, + 0.6734, 0.3206, 0.8321, 0.7226, 0.7357, 0.0634, 0.0931, + 0.4512, 0.1531, 0.6138, 0.4706, 0.7999, 0.4089, 0.8748, + 0.3486, 0.7322, 0.2439, 0.0715, 0.7807, 0.3511, 0.5350, + 0.1040, 0.6618, 0.9284, 0.6439, 0.1028, 0.6967, 0.1672, + 0.5232, 0.5990, 0.4131, 0.6209, 0.5668, 0.8927, 0.9754, + 0.2705, 0.6686, 0.2720, 0.2523, 0.2520, 0.2777, 0.2306, + 0.5601, 0.0701, 0.1220, 0.1669, 0.9340, 0.1957, 0.8919, + 0.8514, 0.7327, 0.5276, 0.8049, 0.2768, 0.0387, 0.1098, + 0.9042, 0.1414, 0.1252, 0.7087, 0.5489, 0.2450, 0.4588, + 0.9771, 0.4450, 0.1355, 0.9129, 0.4808, 0.5735, 0.9337, + 0.9658, 0.9256, 0.5364, 0.1244, 0.5347, 0.7434, 0.1846, + 0.7849, 0.7576, 0.0427, 0.2369, 0.3048, 0.5296, 0.9086, + 0.0541, 0.8841, 0.4305, 0.9907, 0.3676, 0.5804, 0.6895, + 0.9332, 0.0270, 0.3121, 0.8208, 0.8474, 0.2569, 0.4957, + 0.4133, 0.6520, 0.4588, 0.6225, 0.1027, 0.6632, 0.5190, + 0.0735, 0.1854, 0.8500, 0.6470, 0.2594, 0.7205, 0.8914, + 0.0489, 0.8156, 0.5306, 0.3119, 0.3137, 0.3120, 0.6417, + 0.2258, 0.6597, 0.8453, 0.6987, 0.4225, 0.5177, 0.2802, + 0.5315, 0.3767, 0.2520, 0.2831, 0.1536, 0.0334, 0.8465, + 0.7641, 0.9707, 0.5313, 0.7595, 0.4109, 0.8430, 0.9004, + 0.8413, 0.0821, 0.3632, 0.3777, 0.5912, 0.8961, 0.4075, + 0.0738, 0.9507, 0.9062, 0.2136, 0.1959, 0.6942, 0.6367, + 0.2811, 0.0027, 0.4216, 0.1826, 0.7776, 0.8261, 0.0554, + 0.1191, 0.5231, 0.1729, 0.5584, 0.7643, 0.0823, 0.4499, + 0.5024, 0.9288, 0.5019, 0.4372, 0.1384, 0.0776, 0.5461, + 0.7228, 0.2015, 0.8892, 0.2697, 0.1194, 0.6369, 0.9915, + 0.3322, 0.2044, 0.1389, 0.4917, 0.1141, 0.5811, 0.5234, + 0.7081, 0.5358, 0.2162, 0.4906, 0.8753, 0.4064, 0.6721, + 0.7143, 0.7824, 0.2108, 0.1572, 0.2915, 0.4564, 0.4382, + 0.0848, 0.7623, 0.7257, 0.3674, 0.7093]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.1760, 0.3447, 0.5672, ..., 0.4540, 0.2179, 0.2738]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 2.6936018466949463 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '359075', '-ss', '5000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.526627540588379} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([3778, 4984, 4122, 2676, 3957, 4059, 4909, 4911, 2572, + 1267, 1150, 3364, 3576, 4257, 4803, 3469, 2315, 1996, + 1589, 4554, 3627, 222, 735, 2019, 1196, 3402, 918, + 508, 1833, 3932, 3749, 3244, 4451, 1193, 3387, 2934, + 4933, 2676, 1892, 1253, 2562, 3303, 93, 1367, 4037, + 388, 4569, 3905, 2205, 438, 2955, 2830, 2546, 3603, + 3071, 4886, 2701, 3617, 3981, 2453, 1634, 906, 2460, + 4767, 4482, 2328, 3968, 2373, 709, 1470, 1396, 1265, + 427, 2495, 18, 4172, 3266, 4196, 702, 133, 2624, + 2942, 4262, 4579, 1940, 2403, 42, 1771, 590, 3624, + 3674, 2977, 3577, 3648, 673, 1388, 4388, 17, 194, + 155, 552, 2075, 1300, 4736, 849, 2848, 3737, 3431, + 4900, 4636, 211, 2218, 935, 599, 2948, 4874, 369, + 966, 947, 3488, 346, 1181, 1472, 1637, 372, 1874, + 4884, 172, 214, 771, 3131, 1713, 3058, 4267, 3602, + 2760, 3398, 3174, 9, 318, 4703, 779, 2824, 4515, + 2540, 3491, 647, 4310, 4641, 1357, 289, 349, 73, + 908, 1015, 1680, 677, 202, 1047, 1747, 4308, 1250, + 3160, 3099, 2970, 2272, 3209, 2339, 1660, 4649, 642, + 2647, 4042, 3441, 1713, 3501, 3454, 4660, 2114, 1751, + 4938, 3300, 396, 1888, 1868, 2474, 3021, 4177, 1556, + 3530, 583, 156, 782, 534, 780, 3712, 1163, 3018, + 2652, 2501, 1137, 3069, 4789, 548, 1908, 709, 3367, + 4443, 1991, 4909, 152, 2054, 2229, 14, 2251, 1027, + 3732, 288, 642, 4326, 2761, 4086, 1629, 946, 4083, + 1089, 2210, 3114, 3172, 376, 4660, 3852, 3198, 3613, + 592, 1388, 3114, 4183, 4318, 1850, 4771, 843, 2522, + 2774, 2939, 3529, 1857, 2895, 2137, 4447]), + values=tensor([0.4475, 0.8812, 0.1292, 0.7293, 0.6267, 0.0108, 0.5387, + 0.9156, 0.4928, 0.6543, 0.3448, 0.7375, 0.4487, 0.3828, + 0.2863, 0.2902, 0.7640, 0.5621, 0.0700, 0.7401, 0.8451, + 0.9099, 0.0211, 0.8004, 0.5172, 0.0685, 0.5469, 0.9562, + 0.9763, 0.1102, 0.0709, 0.8735, 0.6816, 0.5541, 0.7172, + 0.8388, 0.7596, 0.0622, 0.0743, 0.1726, 0.6490, 0.2165, + 0.6650, 0.7371, 0.8810, 0.8711, 0.2280, 0.6052, 0.7488, + 0.7562, 0.5277, 0.9948, 0.0106, 0.0299, 0.7667, 0.5618, + 0.6094, 0.9214, 0.6504, 0.8772, 0.7922, 0.0380, 0.8257, + 0.9627, 0.8457, 0.9488, 0.7481, 0.0656, 0.7384, 0.8073, + 0.8799, 0.1542, 0.7486, 0.0058, 0.8291, 0.9889, 0.8922, + 0.2911, 0.9747, 0.0465, 0.1509, 0.5817, 0.7676, 0.1559, + 0.4514, 0.2238, 0.9216, 0.0912, 0.0562, 0.6927, 0.2560, + 0.7407, 0.7561, 0.5126, 0.8908, 0.4965, 0.0086, 0.7725, + 0.2468, 0.7667, 0.7880, 0.6098, 0.9369, 0.5035, 0.3626, + 0.7343, 0.2151, 0.1827, 0.2696, 0.7224, 0.6480, 0.0746, + 0.6229, 0.9622, 0.8016, 0.2190, 0.3391, 0.8517, 0.1344, + 0.9710, 0.8151, 0.7634, 0.9047, 0.8447, 0.3478, 0.4789, + 0.5543, 0.6475, 0.6794, 0.8153, 0.2995, 0.6764, 0.2993, + 0.4440, 0.6818, 0.5702, 0.7074, 0.4488, 0.4032, 0.6268, + 0.7286, 0.4749, 0.3646, 0.0331, 0.4227, 0.8138, 0.3173, + 0.0403, 0.2636, 0.3980, 0.1390, 0.1641, 0.6671, 0.5330, + 0.3639, 0.7467, 0.8967, 0.7753, 0.2492, 0.1215, 0.6986, + 0.6107, 0.6922, 0.6270, 0.0513, 0.3708, 0.4140, 0.6870, + 0.6642, 0.1925, 0.0944, 0.4210, 0.5791, 0.4516, 0.5935, + 0.1022, 0.0482, 0.6022, 0.6705, 0.3885, 0.1005, 0.3611, + 0.3535, 0.1700, 0.7214, 0.8017, 0.2409, 0.4915, 0.6710, + 0.5749, 0.1541, 0.6514, 0.2028, 0.1566, 0.2795, 0.9275, + 0.1313, 0.4671, 0.8621, 0.0474, 0.9495, 0.4065, 0.1561, + 0.3930, 0.1891, 0.0713, 0.9951, 0.8365, 0.9415, 0.9314, + 0.4274, 0.7485, 0.9571, 0.9768, 0.5673, 0.4241, 0.5508, + 0.4033, 0.2950, 0.2855, 0.8415, 0.9844, 0.7770, 0.3923, + 0.5787, 0.9241, 0.3429, 0.2388, 0.7432, 0.5287, 0.4894, + 0.3564, 0.1539, 0.3683, 0.3338, 0.2500, 0.3763, 0.4479, + 0.2028, 0.8079, 0.0187, 0.3962, 0.2530, 0.6932, 0.4307, + 0.2510, 0.2498, 0.5817, 0.8657, 0.8402]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.7545, 0.7162, 0.2861, ..., 0.9381, 0.3630, 0.3493]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.526627540588379 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([3778, 4984, 4122, 2676, 3957, 4059, 4909, 4911, 2572, + 1267, 1150, 3364, 3576, 4257, 4803, 3469, 2315, 1996, + 1589, 4554, 3627, 222, 735, 2019, 1196, 3402, 918, + 508, 1833, 3932, 3749, 3244, 4451, 1193, 3387, 2934, + 4933, 2676, 1892, 1253, 2562, 3303, 93, 1367, 4037, + 388, 4569, 3905, 2205, 438, 2955, 2830, 2546, 3603, + 3071, 4886, 2701, 3617, 3981, 2453, 1634, 906, 2460, + 4767, 4482, 2328, 3968, 2373, 709, 1470, 1396, 1265, + 427, 2495, 18, 4172, 3266, 4196, 702, 133, 2624, + 2942, 4262, 4579, 1940, 2403, 42, 1771, 590, 3624, + 3674, 2977, 3577, 3648, 673, 1388, 4388, 17, 194, + 155, 552, 2075, 1300, 4736, 849, 2848, 3737, 3431, + 4900, 4636, 211, 2218, 935, 599, 2948, 4874, 369, + 966, 947, 3488, 346, 1181, 1472, 1637, 372, 1874, + 4884, 172, 214, 771, 3131, 1713, 3058, 4267, 3602, + 2760, 3398, 3174, 9, 318, 4703, 779, 2824, 4515, + 2540, 3491, 647, 4310, 4641, 1357, 289, 349, 73, + 908, 1015, 1680, 677, 202, 1047, 1747, 4308, 1250, + 3160, 3099, 2970, 2272, 3209, 2339, 1660, 4649, 642, + 2647, 4042, 3441, 1713, 3501, 3454, 4660, 2114, 1751, + 4938, 3300, 396, 1888, 1868, 2474, 3021, 4177, 1556, + 3530, 583, 156, 782, 534, 780, 3712, 1163, 3018, + 2652, 2501, 1137, 3069, 4789, 548, 1908, 709, 3367, + 4443, 1991, 4909, 152, 2054, 2229, 14, 2251, 1027, + 3732, 288, 642, 4326, 2761, 4086, 1629, 946, 4083, + 1089, 2210, 3114, 3172, 376, 4660, 3852, 3198, 3613, + 592, 1388, 3114, 4183, 4318, 1850, 4771, 843, 2522, + 2774, 2939, 3529, 1857, 2895, 2137, 4447]), + values=tensor([0.4475, 0.8812, 0.1292, 0.7293, 0.6267, 0.0108, 0.5387, + 0.9156, 0.4928, 0.6543, 0.3448, 0.7375, 0.4487, 0.3828, + 0.2863, 0.2902, 0.7640, 0.5621, 0.0700, 0.7401, 0.8451, + 0.9099, 0.0211, 0.8004, 0.5172, 0.0685, 0.5469, 0.9562, + 0.9763, 0.1102, 0.0709, 0.8735, 0.6816, 0.5541, 0.7172, + 0.8388, 0.7596, 0.0622, 0.0743, 0.1726, 0.6490, 0.2165, + 0.6650, 0.7371, 0.8810, 0.8711, 0.2280, 0.6052, 0.7488, + 0.7562, 0.5277, 0.9948, 0.0106, 0.0299, 0.7667, 0.5618, + 0.6094, 0.9214, 0.6504, 0.8772, 0.7922, 0.0380, 0.8257, + 0.9627, 0.8457, 0.9488, 0.7481, 0.0656, 0.7384, 0.8073, + 0.8799, 0.1542, 0.7486, 0.0058, 0.8291, 0.9889, 0.8922, + 0.2911, 0.9747, 0.0465, 0.1509, 0.5817, 0.7676, 0.1559, + 0.4514, 0.2238, 0.9216, 0.0912, 0.0562, 0.6927, 0.2560, + 0.7407, 0.7561, 0.5126, 0.8908, 0.4965, 0.0086, 0.7725, + 0.2468, 0.7667, 0.7880, 0.6098, 0.9369, 0.5035, 0.3626, + 0.7343, 0.2151, 0.1827, 0.2696, 0.7224, 0.6480, 0.0746, + 0.6229, 0.9622, 0.8016, 0.2190, 0.3391, 0.8517, 0.1344, + 0.9710, 0.8151, 0.7634, 0.9047, 0.8447, 0.3478, 0.4789, + 0.5543, 0.6475, 0.6794, 0.8153, 0.2995, 0.6764, 0.2993, + 0.4440, 0.6818, 0.5702, 0.7074, 0.4488, 0.4032, 0.6268, + 0.7286, 0.4749, 0.3646, 0.0331, 0.4227, 0.8138, 0.3173, + 0.0403, 0.2636, 0.3980, 0.1390, 0.1641, 0.6671, 0.5330, + 0.3639, 0.7467, 0.8967, 0.7753, 0.2492, 0.1215, 0.6986, + 0.6107, 0.6922, 0.6270, 0.0513, 0.3708, 0.4140, 0.6870, + 0.6642, 0.1925, 0.0944, 0.4210, 0.5791, 0.4516, 0.5935, + 0.1022, 0.0482, 0.6022, 0.6705, 0.3885, 0.1005, 0.3611, + 0.3535, 0.1700, 0.7214, 0.8017, 0.2409, 0.4915, 0.6710, + 0.5749, 0.1541, 0.6514, 0.2028, 0.1566, 0.2795, 0.9275, + 0.1313, 0.4671, 0.8621, 0.0474, 0.9495, 0.4065, 0.1561, + 0.3930, 0.1891, 0.0713, 0.9951, 0.8365, 0.9415, 0.9314, + 0.4274, 0.7485, 0.9571, 0.9768, 0.5673, 0.4241, 0.5508, + 0.4033, 0.2950, 0.2855, 0.8415, 0.9844, 0.7770, 0.3923, + 0.5787, 0.9241, 0.3429, 0.2388, 0.7432, 0.5287, 0.4894, + 0.3564, 0.1539, 0.3683, 0.3338, 0.2500, 0.3763, 0.4479, + 0.2028, 0.8079, 0.0187, 0.3962, 0.2530, 0.6932, 0.4307, + 0.2510, 0.2498, 0.5817, 0.8657, 0.8402]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.7545, 0.7162, 0.2861, ..., 0.9381, 0.3630, 0.3493]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.526627540588379 seconds + +[18.43, 17.7, 18.24, 17.84, 17.84, 17.8, 18.19, 17.69, 17.91, 17.97] +[50.77] +14.327181577682495 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 359075, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.526627540588379, 'TIME_S_1KI': 0.029315957782046587, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 727.3910086989404, 'W': 50.77} +[18.43, 17.7, 18.24, 17.84, 17.84, 17.8, 18.19, 17.69, 17.91, 17.97, 18.65, 17.88, 17.9, 17.81, 18.36, 17.89, 17.8, 17.9, 21.76, 18.54] +327.305 +16.36525 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 359075, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.526627540588379, 'TIME_S_1KI': 0.029315957782046587, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 727.3910086989404, 'W': 50.77, 'J_1KI': 2.0257355947892233, 'W_1KI': 0.14139107428810138, 'W_D': 34.40475000000001, 'J_D': 492.92310038477194, 'W_D_1KI': 0.09581494116827963, 'J_D_1KI': 0.00026683824039066943} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.0001.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.0001.json index 5cee3aa..41d6bb0 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.0001.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.0001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 22.577640295028687, "TIME_S_1KI": 22.577640295028687, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1196.5292521381377, "W": 64.81606725703797, "J_1KI": 1196.5292521381377, "W_1KI": 64.81606725703797, "W_D": 45.67406725703797, "J_D": 843.1606521334647, "W_D_1KI": 45.67406725703797, "J_D_1KI": 45.67406725703797} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 17.146100997924805, "TIME_S_1KI": 17.146100997924805, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1342.6130161285403, "W": 65.43610945887401, "J_1KI": 1342.6130161285403, "W_1KI": 65.43610945887401, "W_D": 46.466109458874016, "J_D": 953.388028173447, "W_D_1KI": 46.466109458874016, "J_D_1KI": 46.466109458874016} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.0001.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.0001.output index 047a391..451f542 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.0001.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.0001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 100000 -sd 0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 22.577640295028687} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 17.146100997924805} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 13, 26, ..., 999978, - 999989, 1000000]), - col_indices=tensor([16134, 16354, 24327, ..., 64689, 79970, 99510]), - values=tensor([0.0032, 0.4253, 0.4412, ..., 0.5357, 0.1333, 0.2349]), +tensor(crow_indices=tensor([ 0, 9, 17, ..., 999972, + 999990, 1000000]), + col_indices=tensor([13952, 31113, 48803, ..., 72766, 82982, 86351]), + values=tensor([0.4430, 0.0507, 0.5237, ..., 0.5341, 0.7602, 0.3481]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.8413, 0.1731, 0.9001, ..., 0.4021, 0.4850, 0.1983]) +tensor([0.1669, 0.1860, 0.1675, ..., 0.5137, 0.0308, 0.0638]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,16 +16,16 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 22.577640295028687 seconds +Time: 17.146100997924805 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 13, 26, ..., 999978, - 999989, 1000000]), - col_indices=tensor([16134, 16354, 24327, ..., 64689, 79970, 99510]), - values=tensor([0.0032, 0.4253, 0.4412, ..., 0.5357, 0.1333, 0.2349]), +tensor(crow_indices=tensor([ 0, 9, 17, ..., 999972, + 999990, 1000000]), + col_indices=tensor([13952, 31113, 48803, ..., 72766, 82982, 86351]), + values=tensor([0.4430, 0.0507, 0.5237, ..., 0.5341, 0.7602, 0.3481]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.8413, 0.1731, 0.9001, ..., 0.4021, 0.4850, 0.1983]) +tensor([0.1669, 0.1860, 0.1675, ..., 0.5137, 0.0308, 0.0638]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -33,13 +33,13 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 22.577640295028687 seconds +Time: 17.146100997924805 seconds -[21.64, 21.64, 21.48, 21.36, 21.64, 21.36, 21.36, 21.28, 21.44, 21.36] -[21.12, 21.24, 21.32, 22.2, 24.84, 39.96, 57.72, 73.16, 89.8, 95.92, 95.4, 95.4, 93.32, 91.16, 90.92, 93.4, 92.2, 91.64] -18.46038031578064 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 22.577640295028687, 'TIME_S_1KI': 22.577640295028687, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1196.5292521381377, 'W': 64.81606725703797} -[21.64, 21.64, 21.48, 21.36, 21.64, 21.36, 21.36, 21.28, 21.44, 21.36, 21.08, 20.96, 20.96, 21.0, 21.28, 21.08, 21.28, 21.12, 21.12, 20.88] -382.84000000000003 -19.142000000000003 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 22.577640295028687, 'TIME_S_1KI': 22.577640295028687, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1196.5292521381377, 'W': 64.81606725703797, 'J_1KI': 1196.5292521381377, 'W_1KI': 64.81606725703797, 'W_D': 45.67406725703797, 'J_D': 843.1606521334647, 'W_D_1KI': 45.67406725703797, 'J_D_1KI': 45.67406725703797} +[21.44, 21.28, 21.16, 21.16, 20.96, 20.96, 20.96, 21.0, 21.24, 21.4] +[21.44, 21.28, 21.92, 23.96, 24.84, 38.64, 55.84, 70.04, 84.52, 91.04, 91.04, 91.56, 90.84, 89.12, 88.88, 89.24, 89.64, 88.8, 89.08, 90.0] +20.517922401428223 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 17.146100997924805, 'TIME_S_1KI': 17.146100997924805, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1342.6130161285403, 'W': 65.43610945887401} +[21.44, 21.28, 21.16, 21.16, 20.96, 20.96, 20.96, 21.0, 21.24, 21.4, 21.16, 21.04, 21.08, 20.92, 20.88, 20.88, 21.04, 20.96, 21.2, 21.36] +379.4 +18.97 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 17.146100997924805, 'TIME_S_1KI': 17.146100997924805, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1342.6130161285403, 'W': 65.43610945887401, 'J_1KI': 1342.6130161285403, 'W_1KI': 65.43610945887401, 'W_D': 46.466109458874016, 'J_D': 953.388028173447, 'W_D_1KI': 46.466109458874016, 'J_D_1KI': 46.466109458874016} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.001.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.001.json new file mode 100644 index 0000000..c78bc8a --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 159.18061113357544, "TIME_S_1KI": 159.18061113357544, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 13305.551260681154, "W": 81.94491850885544, "J_1KI": 13305.551260681154, "W_1KI": 81.94491850885544, "W_D": 61.991918508855434, "J_D": 10065.7449476676, "W_D_1KI": 61.991918508855434, "J_D_1KI": 61.991918508855434} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.001.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.001.output new file mode 100644 index 0000000..a21bcbb --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.001.output @@ -0,0 +1,47 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 100000 -sd 0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 159.18061113357544} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 95, 182, ..., 9999785, + 9999891, 10000000]), + col_indices=tensor([ 2375, 2397, 2562, ..., 95994, 97725, 99229]), + values=tensor([3.2988e-01, 7.8520e-04, 8.6482e-01, ..., + 9.5198e-01, 4.5600e-01, 9.5863e-01]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.4135, 0.2091, 0.0976, ..., 0.1293, 0.9759, 0.9614]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 159.18061113357544 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 95, 182, ..., 9999785, + 9999891, 10000000]), + col_indices=tensor([ 2375, 2397, 2562, ..., 95994, 97725, 99229]), + values=tensor([3.2988e-01, 7.8520e-04, 8.6482e-01, ..., + 9.5198e-01, 4.5600e-01, 9.5863e-01]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.4135, 0.2091, 0.0976, ..., 0.1293, 0.9759, 0.9614]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 159.18061113357544 seconds + +[22.04, 21.88, 21.96, 22.12, 21.96, 21.92, 22.08, 22.08, 22.24, 22.08] +[22.12, 22.0, 22.4, 24.16, 25.0, 26.76, 35.48, 36.76, 45.64, 60.64, 68.76, 79.68, 90.72, 91.28, 91.28, 90.96, 89.36, 89.36, 87.68, 88.24, 87.52, 88.44, 91.68, 90.56, 91.8, 92.44, 92.04, 93.12, 92.52, 92.52, 92.88, 91.88, 91.24, 91.12, 90.44, 89.64, 89.48, 89.16, 87.68, 85.84, 85.0, 86.96, 88.04, 88.04, 88.0, 89.08, 87.48, 87.52, 86.2, 84.8, 84.64, 85.48, 85.76, 86.32, 87.44, 87.44, 88.64, 89.72, 89.72, 89.4, 87.64, 88.72, 89.0, 89.12, 89.76, 91.04, 88.36, 87.88, 86.68, 87.88, 86.48, 86.8, 86.68, 87.68, 87.68, 86.8, 87.56, 86.76, 84.72, 85.2, 85.08, 85.44, 86.48, 85.92, 86.4, 86.84, 84.56, 83.28, 84.6, 84.6, 85.76, 88.64, 88.68, 89.48, 90.88, 87.96, 88.04, 89.64, 89.6, 88.16, 88.6, 87.04, 86.96, 86.24, 86.24, 87.56, 87.32, 88.48, 89.36, 88.68, 89.56, 88.2, 85.8, 85.8, 86.36, 86.36, 88.2, 88.52, 91.48, 91.48, 91.08, 90.04, 89.24, 88.08, 88.36, 89.92, 89.76, 90.6, 89.4, 87.04, 84.72, 85.04, 83.32, 84.92, 84.92, 84.2, 85.16, 84.0, 84.2, 83.92, 84.84, 84.84, 87.04, 88.68, 91.04, 91.52, 89.88, 89.96, 89.44, 89.44, 88.36, 88.12, 90.24, 89.76, 88.8, 88.0, 86.72, 86.4] +162.37188959121704 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 159.18061113357544, 'TIME_S_1KI': 159.18061113357544, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 13305.551260681154, 'W': 81.94491850885544} +[22.04, 21.88, 21.96, 22.12, 21.96, 21.92, 22.08, 22.08, 22.24, 22.08, 21.88, 22.04, 22.48, 22.64, 22.36, 22.6, 22.6, 22.44, 21.76, 21.8] +399.06000000000006 +19.953000000000003 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 159.18061113357544, 'TIME_S_1KI': 159.18061113357544, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 13305.551260681154, 'W': 81.94491850885544, 'J_1KI': 13305.551260681154, 'W_1KI': 81.94491850885544, 'W_D': 61.991918508855434, 'J_D': 10065.7449476676, 'W_D_1KI': 61.991918508855434, 'J_D_1KI': 61.991918508855434} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_1e-05.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_1e-05.json index 15d76ee..dc43877 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_1e-05.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_1e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 5444, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 15.66837453842163, "TIME_S_1KI": 2.878099658049528, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1209.8989781379698, "W": 61.815457006610345, "J_1KI": 222.24448533026631, "W_1KI": 11.354786371530189, "W_D": 42.416457006610344, "J_D": 830.2070464842318, "W_D_1KI": 7.791413851324457, "J_D_1KI": 1.4311928455776004} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 3301, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 12.605360507965088, "TIME_S_1KI": 3.8186490481566455, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 881.8972729587555, "W": 57.01281240597429, "J_1KI": 267.1606400965633, "W_1KI": 17.27137606966807, "W_D": 37.77781240597429, "J_D": 584.3625026237966, "W_D_1KI": 11.444353955157311, "J_D_1KI": 3.466935460514181} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_1e-05.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_1e-05.output index 529e04c..842e972 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_1e-05.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_1e-05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 100000 -sd 1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 2.735213041305542} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 3.180464267730713} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 3, ..., 99998, 100000, +tensor(crow_indices=tensor([ 0, 0, 2, ..., 99997, 99997, 100000]), - col_indices=tensor([47108, 85356, 39968, ..., 81528, 26483, 51109]), - values=tensor([0.3148, 0.6992, 0.6314, ..., 0.5894, 0.0851, 0.0670]), + col_indices=tensor([ 3926, 50379, 15277, ..., 29136, 40772, 68436]), + values=tensor([0.5699, 0.5366, 0.1661, ..., 0.2141, 0.3018, 0.3946]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.4890, 0.3896, 0.3852, ..., 0.6786, 0.1828, 0.3984]) +tensor([0.8865, 0.6102, 0.2945, ..., 0.5701, 0.8700, 0.6634]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 2.735213041305542 seconds +Time: 3.180464267730713 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 3838 -ss 100000 -sd 1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 7.402097463607788} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 3301 -ss 100000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 12.605360507965088} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 3, ..., 99999, 99999, +tensor(crow_indices=tensor([ 0, 0, 0, ..., 99997, 99998, 100000]), - col_indices=tensor([ 1694, 16648, 92396, ..., 98787, 30932, 62089]), - values=tensor([0.4689, 0.5529, 0.8985, ..., 0.1212, 0.7499, 0.9985]), + col_indices=tensor([33916, 32242, 16140, ..., 45457, 58350, 84955]), + values=tensor([0.9718, 0.7827, 0.4187, ..., 0.1750, 0.8602, 0.7313]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.8040, 0.7540, 0.7072, ..., 0.4394, 0.3265, 0.7941]) +tensor([0.7754, 0.6786, 0.3605, ..., 0.9739, 0.1301, 0.4075]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,19 +36,16 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 7.402097463607788 seconds - -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 5444 -ss 100000 -sd 1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 15.66837453842163} +Time: 12.605360507965088 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 99997, 100000, +tensor(crow_indices=tensor([ 0, 0, 0, ..., 99997, 99998, 100000]), - col_indices=tensor([ 8956, 7966, 63353, ..., 28673, 30724, 93829]), - values=tensor([0.9652, 0.8395, 0.8363, ..., 0.6704, 0.2134, 0.9962]), + col_indices=tensor([33916, 32242, 16140, ..., 45457, 58350, 84955]), + values=tensor([0.9718, 0.7827, 0.4187, ..., 0.1750, 0.8602, 0.7313]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.1215, 0.6198, 0.6986, ..., 0.9502, 0.5989, 0.9473]) +tensor([0.7754, 0.6786, 0.3605, ..., 0.9739, 0.1301, 0.4075]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -56,30 +53,13 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 15.66837453842163 seconds +Time: 12.605360507965088 seconds -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 99997, 100000, - 100000]), - col_indices=tensor([ 8956, 7966, 63353, ..., 28673, 30724, 93829]), - values=tensor([0.9652, 0.8395, 0.8363, ..., 0.6704, 0.2134, 0.9962]), - size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.1215, 0.6198, 0.6986, ..., 0.9502, 0.5989, 0.9473]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 100000 -Density: 1e-05 -Time: 15.66837453842163 seconds - -[21.92, 21.92, 22.08, 21.96, 21.68, 21.6, 21.44, 21.8, 21.6, 21.76] -[21.64, 21.56, 21.96, 22.76, 26.44, 43.68, 43.68, 58.28, 74.48, 87.48, 92.36, 91.24, 90.2, 88.56, 87.2, 86.36, 85.72, 85.72, 85.2] -19.572757959365845 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 5444, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 15.66837453842163, 'TIME_S_1KI': 2.878099658049528, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1209.8989781379698, 'W': 61.815457006610345} -[21.92, 21.92, 22.08, 21.96, 21.68, 21.6, 21.44, 21.8, 21.6, 21.76, 21.4, 21.32, 21.48, 21.48, 21.32, 21.52, 21.24, 21.16, 21.2, 21.28] -387.98 -19.399 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 5444, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 15.66837453842163, 'TIME_S_1KI': 2.878099658049528, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1209.8989781379698, 'W': 61.815457006610345, 'J_1KI': 222.24448533026631, 'W_1KI': 11.354786371530189, 'W_D': 42.416457006610344, 'J_D': 830.2070464842318, 'W_D_1KI': 7.791413851324457, 'J_D_1KI': 1.4311928455776004} +[21.24, 21.24, 21.36, 21.4, 21.52, 21.72, 21.56, 21.64, 21.48, 21.48] +[21.4, 21.4, 21.8, 22.88, 23.96, 35.92, 52.84, 66.08, 81.2, 91.72, 90.6, 91.36, 91.84, 91.24, 91.24] +15.46840500831604 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 3301, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 12.605360507965088, 'TIME_S_1KI': 3.8186490481566455, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 881.8972729587555, 'W': 57.01281240597429} +[21.24, 21.24, 21.36, 21.4, 21.52, 21.72, 21.56, 21.64, 21.48, 21.48, 21.28, 21.28, 21.32, 21.48, 21.6, 21.48, 21.28, 21.12, 20.84, 20.76] +384.69999999999993 +19.234999999999996 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 3301, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 12.605360507965088, 'TIME_S_1KI': 3.8186490481566455, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 881.8972729587555, 'W': 57.01281240597429, 'J_1KI': 267.1606400965633, 'W_1KI': 17.27137606966807, 'W_D': 37.77781240597429, 'J_D': 584.3625026237966, 'W_D_1KI': 11.444353955157311, 'J_D_1KI': 3.466935460514181} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.0001.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.0001.json index b31b4ad..b901590 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.0001.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.0001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 31990, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.162655591964722, "TIME_S_1KI": 0.3176822629560713, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 281.0764434432984, "W": 21.267196136821532, "J_1KI": 8.786384602791447, "W_1KI": 0.6648076316605669, "W_D": 2.733196136821533, "J_D": 36.12309984016423, "W_D_1KI": 0.08543907898785662, "J_D_1KI": 0.0026708058451971432} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 32089, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.109246492385864, "TIME_S_1KI": 0.3150377541333748, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 318.60778367996215, "W": 22.396548812340328, "J_1KI": 9.92887854654125, "W_1KI": 0.6979509742385342, "W_D": 4.033548812340328, "J_D": 57.38027131915093, "W_D_1KI": 0.1256988005964763, "J_D_1KI": 0.003917192826092315} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.0001.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.0001.output index 3e5c97c..8b8a1a7 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.0001.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.0001.output @@ -1,13 +1,13 @@ ['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 10000 -sd 0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.32822322845458984} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.3272056579589844} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 4, ..., 9997, 9998, 10000]), - col_indices=tensor([2721, 4826, 6729, ..., 6567, 802, 8084]), - values=tensor([0.9788, 0.8960, 0.9515, ..., 0.3823, 0.9672, 0.4403]), +tensor(crow_indices=tensor([ 0, 3, 3, ..., 10000, 10000, 10000]), + col_indices=tensor([ 654, 4587, 9013, ..., 1787, 1854, 8773]), + values=tensor([0.1124, 0.2109, 0.1818, ..., 0.9520, 0.5472, 0.0091]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.1442, 0.5021, 0.5745, ..., 0.9716, 0.6255, 0.3521]) +tensor([0.1189, 0.4488, 0.9345, ..., 0.0324, 0.3464, 0.4030]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -15,18 +15,18 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 0.32822322845458984 seconds +Time: 0.3272056579589844 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 31990 -ss 10000 -sd 0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.162655591964722} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 32089 -ss 10000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.109246492385864} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 2, ..., 9997, 10000, 10000]), - col_indices=tensor([1219, 6055, 1582, ..., 3506, 4664, 5684]), - values=tensor([0.3475, 0.3226, 0.1217, ..., 0.8742, 0.3097, 0.9052]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 9996, 9998, 10000]), + col_indices=tensor([6261, 1350, 3983, ..., 9586, 2579, 6781]), + values=tensor([0.3771, 0.7405, 0.3284, ..., 0.1626, 0.7239, 0.9996]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.3839, 0.3550, 0.5972, ..., 0.2550, 0.5835, 0.6125]) +tensor([0.1818, 0.1444, 0.2139, ..., 0.0964, 0.7255, 0.0411]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -34,15 +34,15 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 10.162655591964722 seconds +Time: 10.109246492385864 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 2, ..., 9997, 10000, 10000]), - col_indices=tensor([1219, 6055, 1582, ..., 3506, 4664, 5684]), - values=tensor([0.3475, 0.3226, 0.1217, ..., 0.8742, 0.3097, 0.9052]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 9996, 9998, 10000]), + col_indices=tensor([6261, 1350, 3983, ..., 9586, 2579, 6781]), + values=tensor([0.3771, 0.7405, 0.3284, ..., 0.1626, 0.7239, 0.9996]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.3839, 0.3550, 0.5972, ..., 0.2550, 0.5835, 0.6125]) +tensor([0.1818, 0.1444, 0.2139, ..., 0.0964, 0.7255, 0.0411]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -50,13 +50,13 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 10.162655591964722 seconds +Time: 10.109246492385864 seconds -[20.68, 20.44, 20.44, 20.76, 20.52, 20.44, 20.48, 20.32, 20.48, 20.48] -[20.68, 20.88, 21.64, 22.32, 23.4, 23.4, 23.8, 24.24, 23.6, 23.6, 23.44, 23.48, 23.64] -13.216431617736816 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 31990, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.162655591964722, 'TIME_S_1KI': 0.3176822629560713, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 281.0764434432984, 'W': 21.267196136821532} -[20.68, 20.44, 20.44, 20.76, 20.52, 20.44, 20.48, 20.32, 20.48, 20.48, 20.56, 20.64, 20.48, 20.52, 20.48, 20.72, 20.72, 20.8, 20.92, 21.32] -370.67999999999995 -18.534 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 31990, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.162655591964722, 'TIME_S_1KI': 0.3176822629560713, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 281.0764434432984, 'W': 21.267196136821532, 'J_1KI': 8.786384602791447, 'W_1KI': 0.6648076316605669, 'W_D': 2.733196136821533, 'J_D': 36.12309984016423, 'W_D_1KI': 0.08543907898785662, 'J_D_1KI': 0.0026708058451971432} +[20.52, 20.4, 20.52, 20.52, 20.52, 20.32, 20.32, 20.2, 20.36, 20.24] +[20.32, 20.48, 20.88, 25.52, 26.52, 27.24, 27.6, 24.68, 23.88, 23.88, 23.36, 23.6, 23.68, 23.6] +14.225753545761108 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 32089, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.109246492385864, 'TIME_S_1KI': 0.3150377541333748, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 318.60778367996215, 'W': 22.396548812340328} +[20.52, 20.4, 20.52, 20.52, 20.52, 20.32, 20.32, 20.2, 20.36, 20.24, 20.72, 20.52, 20.6, 20.4, 20.32, 20.52, 20.44, 20.32, 20.16, 20.16] +367.26 +18.363 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 32089, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.109246492385864, 'TIME_S_1KI': 0.3150377541333748, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 318.60778367996215, 'W': 22.396548812340328, 'J_1KI': 9.92887854654125, 'W_1KI': 0.6979509742385342, 'W_D': 4.033548812340328, 'J_D': 57.38027131915093, 'W_D_1KI': 0.1256988005964763, 'J_D_1KI': 0.003917192826092315} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.001.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.001.json index 44e0916..28a5b99 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.001.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 4642, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.39481520652771, "TIME_S_1KI": 2.2392966838706827, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 335.1584589004516, "W": 23.611594488388015, "J_1KI": 72.20130523490987, "W_1KI": 5.086513246098236, "W_D": 5.040594488388017, "J_D": 71.54950427007671, "W_D_1KI": 1.0858669729401156, "J_D_1KI": 0.23392222596728038} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 4566, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.211250305175781, "TIME_S_1KI": 2.236366689701222, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 296.4135251998902, "W": 22.432139051903395, "J_1KI": 64.91754822599435, "W_1KI": 4.912864444131274, "W_D": 4.068139051903394, "J_D": 53.75552614879615, "W_D_1KI": 0.8909634366849307, "J_D_1KI": 0.19512996861255602} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.001.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.001.output index 12aea87..f081110 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.001.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 10000 -sd 0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.2614803314208984} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.2992472648620605} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 12, 19, ..., 99975, 99988, +tensor(crow_indices=tensor([ 0, 7, 16, ..., 99980, 99989, 100000]), - col_indices=tensor([ 662, 710, 3445, ..., 9576, 9602, 9965]), - values=tensor([0.0517, 0.2381, 0.9401, ..., 0.3987, 0.7682, 0.4070]), + col_indices=tensor([ 655, 1592, 1705, ..., 9238, 9783, 9811]), + values=tensor([0.0624, 0.8226, 0.1738, ..., 0.6448, 0.8074, 0.7220]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.1766, 0.1636, 0.7477, ..., 0.1192, 0.5625, 0.2605]) +tensor([0.5841, 0.1855, 0.2176, ..., 0.5967, 0.9561, 0.0240]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 2.2614803314208984 seconds +Time: 2.2992472648620605 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 4642 -ss 10000 -sd 0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.39481520652771} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 4566 -ss 10000 -sd 0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.211250305175781} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 19, ..., 99983, 99997, +tensor(crow_indices=tensor([ 0, 14, 28, ..., 99989, 99992, 100000]), - col_indices=tensor([ 82, 3146, 3840, ..., 8041, 8695, 8893]), - values=tensor([0.8450, 0.6541, 0.7727, ..., 0.8034, 0.8111, 0.1952]), + col_indices=tensor([ 778, 1147, 3454, ..., 4854, 5919, 8867]), + values=tensor([0.6002, 0.4939, 0.0259, ..., 0.9282, 0.0584, 0.5342]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.0486, 0.3621, 0.6684, ..., 0.7127, 0.4964, 0.1751]) +tensor([0.8806, 0.9663, 0.5124, ..., 0.2617, 0.2277, 0.6355]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,16 +36,16 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 10.39481520652771 seconds +Time: 10.211250305175781 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 19, ..., 99983, 99997, +tensor(crow_indices=tensor([ 0, 14, 28, ..., 99989, 99992, 100000]), - col_indices=tensor([ 82, 3146, 3840, ..., 8041, 8695, 8893]), - values=tensor([0.8450, 0.6541, 0.7727, ..., 0.8034, 0.8111, 0.1952]), + col_indices=tensor([ 778, 1147, 3454, ..., 4854, 5919, 8867]), + values=tensor([0.6002, 0.4939, 0.0259, ..., 0.9282, 0.0584, 0.5342]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.0486, 0.3621, 0.6684, ..., 0.7127, 0.4964, 0.1751]) +tensor([0.8806, 0.9663, 0.5124, ..., 0.2617, 0.2277, 0.6355]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,13 +53,13 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 10.39481520652771 seconds +Time: 10.211250305175781 seconds -[20.6, 20.72, 20.92, 21.0, 20.88, 20.88, 20.64, 20.36, 20.08, 19.92] -[20.16, 20.2, 20.48, 21.88, 25.12, 28.64, 29.48, 29.64, 29.64, 29.56, 24.6, 24.56, 24.44, 24.24] -14.194655895233154 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 4642, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.39481520652771, 'TIME_S_1KI': 2.2392966838706827, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 335.1584589004516, 'W': 23.611594488388015} -[20.6, 20.72, 20.92, 21.0, 20.88, 20.88, 20.64, 20.36, 20.08, 19.92, 20.36, 20.52, 20.6, 20.84, 20.8, 20.56, 20.64, 20.64, 20.64, 20.52] -371.41999999999996 -18.570999999999998 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 4642, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.39481520652771, 'TIME_S_1KI': 2.2392966838706827, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 335.1584589004516, 'W': 23.611594488388015, 'J_1KI': 72.20130523490987, 'W_1KI': 5.086513246098236, 'W_D': 5.040594488388017, 'J_D': 71.54950427007671, 'W_D_1KI': 1.0858669729401156, 'J_D_1KI': 0.23392222596728038} +[20.52, 20.28, 20.28, 20.24, 20.2, 20.2, 20.16, 20.44, 20.56, 20.64] +[20.68, 20.52, 20.84, 21.8, 22.68, 26.2, 26.72, 26.88, 26.88, 26.92, 24.56, 24.6, 24.48] +13.21378779411316 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 4566, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.211250305175781, 'TIME_S_1KI': 2.236366689701222, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 296.4135251998902, 'W': 22.432139051903395} +[20.52, 20.28, 20.28, 20.24, 20.2, 20.2, 20.16, 20.44, 20.56, 20.64, 20.56, 20.52, 20.32, 20.28, 20.24, 20.4, 20.6, 20.68, 20.68, 20.68] +367.28000000000003 +18.364 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 4566, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.211250305175781, 'TIME_S_1KI': 2.236366689701222, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 296.4135251998902, 'W': 22.432139051903395, 'J_1KI': 64.91754822599435, 'W_1KI': 4.912864444131274, 'W_D': 4.068139051903394, 'J_D': 53.75552614879615, 'W_D_1KI': 0.8909634366849307, 'J_D_1KI': 0.19512996861255602} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.01.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.01.json index 6da6654..123a4ab 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.01.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.01.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 21.402220964431763, "TIME_S_1KI": 21.402220964431763, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 598.721107492447, "W": 23.65441632405048, "J_1KI": 598.721107492447, "W_1KI": 23.65441632405048, "W_D": 5.107416324050483, "J_D": 129.27471623349203, "W_D_1KI": 5.107416324050483, "J_D_1KI": 5.107416324050483} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 21.205244779586792, "TIME_S_1KI": 21.205244779586792, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 620.3200452423096, "W": 24.48389957916532, "J_1KI": 620.3200452423096, "W_1KI": 24.48389957916532, "W_D": 6.114899579165321, "J_D": 154.92608811497686, "W_D_1KI": 6.114899579165321, "J_D_1KI": 6.114899579165321} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.01.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.01.output index 5b10379..9e8858d 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.01.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.01.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 10000 -sd 0.01'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 21.402220964431763} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 21.205244779586792} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 109, 232, ..., 999786, - 999885, 1000000]), - col_indices=tensor([ 48, 108, 238, ..., 9836, 9911, 9942]), - values=tensor([0.7065, 0.8335, 0.4165, ..., 0.0617, 0.0653, 0.1993]), +tensor(crow_indices=tensor([ 0, 87, 190, ..., 999787, + 999893, 1000000]), + col_indices=tensor([ 40, 232, 261, ..., 9741, 9779, 9904]), + values=tensor([0.6083, 0.3635, 0.2569, ..., 0.1971, 0.1171, 0.3174]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.6170, 0.2022, 0.1812, ..., 0.2173, 0.9754, 0.3705]) +tensor([0.7546, 0.0325, 0.8716, ..., 0.3834, 0.9539, 0.7452]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,16 +16,16 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 21.402220964431763 seconds +Time: 21.205244779586792 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 109, 232, ..., 999786, - 999885, 1000000]), - col_indices=tensor([ 48, 108, 238, ..., 9836, 9911, 9942]), - values=tensor([0.7065, 0.8335, 0.4165, ..., 0.0617, 0.0653, 0.1993]), +tensor(crow_indices=tensor([ 0, 87, 190, ..., 999787, + 999893, 1000000]), + col_indices=tensor([ 40, 232, 261, ..., 9741, 9779, 9904]), + values=tensor([0.6083, 0.3635, 0.2569, ..., 0.1971, 0.1171, 0.3174]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.6170, 0.2022, 0.1812, ..., 0.2173, 0.9754, 0.3705]) +tensor([0.7546, 0.0325, 0.8716, ..., 0.3834, 0.9539, 0.7452]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -33,13 +33,13 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 21.402220964431763 seconds +Time: 21.205244779586792 seconds -[20.56, 20.56, 20.76, 20.64, 20.84, 20.76, 21.0, 20.84, 20.84, 20.8] -[20.64, 20.52, 20.44, 21.52, 22.84, 29.04, 29.96, 30.44, 29.96, 27.0, 24.4, 24.4, 24.24, 24.08, 24.0, 24.16, 24.04, 24.2, 24.28, 24.16, 24.12, 23.92, 23.72, 23.72, 23.72] -25.311176538467407 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 21.402220964431763, 'TIME_S_1KI': 21.402220964431763, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 598.721107492447, 'W': 23.65441632405048} -[20.56, 20.56, 20.76, 20.64, 20.84, 20.76, 21.0, 20.84, 20.84, 20.8, 20.76, 20.56, 20.4, 20.28, 20.24, 20.28, 20.36, 20.6, 20.6, 20.64] -370.93999999999994 -18.546999999999997 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 21.402220964431763, 'TIME_S_1KI': 21.402220964431763, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 598.721107492447, 'W': 23.65441632405048, 'J_1KI': 598.721107492447, 'W_1KI': 23.65441632405048, 'W_D': 5.107416324050483, 'J_D': 129.27471623349203, 'W_D_1KI': 5.107416324050483, 'J_D_1KI': 5.107416324050483} +[20.12, 20.4, 20.4, 20.36, 20.16, 20.24, 20.32, 20.32, 20.32, 20.52] +[20.52, 20.76, 21.12, 22.84, 24.76, 33.76, 34.6, 34.52, 33.84, 26.88, 24.24, 24.24, 24.12, 24.12, 24.04, 24.0, 24.0, 24.04, 24.12, 23.96, 24.0, 24.12, 23.96, 23.96, 24.0] +25.335835218429565 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 21.205244779586792, 'TIME_S_1KI': 21.205244779586792, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 620.3200452423096, 'W': 24.48389957916532} +[20.12, 20.4, 20.4, 20.36, 20.16, 20.24, 20.32, 20.32, 20.32, 20.52, 20.52, 20.56, 20.6, 20.44, 20.44, 20.32, 20.28, 20.64, 20.68, 20.64] +367.38 +18.369 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 21.205244779586792, 'TIME_S_1KI': 21.205244779586792, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 620.3200452423096, 'W': 24.48389957916532, 'J_1KI': 620.3200452423096, 'W_1KI': 24.48389957916532, 'W_D': 6.114899579165321, 'J_D': 154.92608811497686, 'W_D_1KI': 6.114899579165321, 'J_D_1KI': 6.114899579165321} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.05.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.05.json index 23a9329..054792b 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.05.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 108.30107378959656, "TIME_S_1KI": 108.30107378959656, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2693.844181451797, "W": 24.185192869557547, "J_1KI": 2693.844181451797, "W_1KI": 24.185192869557547, "W_D": 5.823192869557545, "J_D": 648.6106732220641, "W_D_1KI": 5.823192869557545, "J_D_1KI": 5.823192869557545} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 106.56615614891052, "TIME_S_1KI": 106.56615614891052, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2781.6071025085453, "W": 24.542113905929394, "J_1KI": 2781.6071025085453, "W_1KI": 24.542113905929394, "W_D": 6.099113905929396, "J_D": 691.2745423955923, "W_D_1KI": 6.099113905929396, "J_D_1KI": 6.099113905929396} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.05.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.05.output index f20f6e0..1064fac 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.05.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 10000 -sd 0.05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 108.30107378959656} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 106.56615614891052} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 484, 1003, ..., 4999033, - 4999518, 5000000]), - col_indices=tensor([ 10, 43, 51, ..., 9955, 9982, 9992]), - values=tensor([0.0167, 0.2062, 0.3972, ..., 0.2194, 0.0680, 0.6916]), +tensor(crow_indices=tensor([ 0, 509, 1020, ..., 4998992, + 4999488, 5000000]), + col_indices=tensor([ 3, 11, 31, ..., 9971, 9976, 9990]), + values=tensor([0.8435, 0.0304, 0.5451, ..., 0.3255, 0.3710, 0.6386]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.1565, 0.8667, 0.6742, ..., 0.1248, 0.3395, 0.1639]) +tensor([0.1386, 0.0671, 0.1165, ..., 0.0400, 0.5375, 0.5366]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,16 +16,16 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 108.30107378959656 seconds +Time: 106.56615614891052 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 484, 1003, ..., 4999033, - 4999518, 5000000]), - col_indices=tensor([ 10, 43, 51, ..., 9955, 9982, 9992]), - values=tensor([0.0167, 0.2062, 0.3972, ..., 0.2194, 0.0680, 0.6916]), +tensor(crow_indices=tensor([ 0, 509, 1020, ..., 4998992, + 4999488, 5000000]), + col_indices=tensor([ 3, 11, 31, ..., 9971, 9976, 9990]), + values=tensor([0.8435, 0.0304, 0.5451, ..., 0.3255, 0.3710, 0.6386]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.1565, 0.8667, 0.6742, ..., 0.1248, 0.3395, 0.1639]) +tensor([0.1386, 0.0671, 0.1165, ..., 0.0400, 0.5375, 0.5366]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -33,13 +33,13 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 108.30107378959656 seconds +Time: 106.56615614891052 seconds -[20.52, 20.36, 20.4, 20.4, 20.48, 20.48, 20.48, 20.12, 20.12, 20.2] -[20.12, 20.52, 21.16, 22.2, 23.84, 29.72, 33.04, 33.04, 32.72, 32.68, 29.16, 24.72, 24.68, 24.72, 24.52, 24.44, 24.24, 24.24, 24.12, 24.08, 24.08, 24.28, 24.12, 24.28, 24.44, 24.44, 24.64, 24.6, 24.64, 24.52, 24.56, 24.4, 24.4, 24.28, 24.48, 24.36, 24.44, 24.48, 24.36, 24.24, 24.28, 24.12, 24.28, 24.32, 24.32, 24.28, 24.28, 24.4, 24.44, 24.36, 24.16, 24.0, 23.88, 23.92, 24.0, 24.08, 24.2, 24.2, 24.24, 24.16, 24.28, 24.36, 24.24, 24.36, 24.44, 24.48, 24.76, 24.56, 24.4, 24.4, 24.32, 24.28, 24.12, 24.4, 24.8, 24.76, 24.84, 24.84, 24.4, 24.4, 24.4, 24.2, 24.2, 24.16, 24.16, 23.84, 23.92, 24.12, 24.52, 24.52, 24.68, 24.52, 24.48, 24.24, 24.24, 24.28, 24.28, 24.52, 24.64, 24.48, 24.32, 24.16, 24.16, 24.0, 23.96, 23.96, 24.08, 24.08, 23.96, 24.28] -111.38402724266052 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 108.30107378959656, 'TIME_S_1KI': 108.30107378959656, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2693.844181451797, 'W': 24.185192869557547} -[20.52, 20.36, 20.4, 20.4, 20.48, 20.48, 20.48, 20.12, 20.12, 20.2, 20.64, 20.72, 20.68, 20.72, 20.36, 20.32, 20.24, 20.16, 20.24, 20.56] -367.24 -18.362000000000002 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 108.30107378959656, 'TIME_S_1KI': 108.30107378959656, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2693.844181451797, 'W': 24.185192869557547, 'J_1KI': 2693.844181451797, 'W_1KI': 24.185192869557547, 'W_D': 5.823192869557545, 'J_D': 648.6106732220641, 'W_D_1KI': 5.823192869557545, 'J_D_1KI': 5.823192869557545} +[20.4, 20.64, 20.52, 20.56, 20.6, 20.52, 20.56, 20.52, 20.2, 20.56] +[20.56, 20.68, 20.68, 24.36, 26.16, 32.64, 39.96, 40.52, 36.88, 36.0, 27.96, 24.36, 24.12, 24.28, 24.28, 24.44, 24.64, 24.72, 24.6, 24.36, 24.2, 23.96, 24.12, 24.08, 24.12, 24.24, 24.24, 24.28, 24.68, 24.56, 24.72, 24.84, 24.88, 24.56, 24.48, 24.32, 24.44, 24.52, 24.52, 24.72, 24.68, 24.68, 24.28, 24.0, 23.92, 23.84, 24.0, 24.24, 24.44, 24.48, 24.44, 24.44, 24.64, 24.68, 24.76, 24.72, 24.68, 24.52, 24.56, 24.64, 24.52, 24.68, 24.44, 24.44, 24.4, 24.64, 24.8, 24.68, 24.76, 24.64, 24.4, 24.12, 24.48, 24.56, 24.72, 24.72, 24.8, 24.96, 24.52, 24.32, 24.36, 24.24, 24.08, 24.04, 23.96, 24.08, 24.4, 24.48, 24.48, 24.64, 24.76, 24.4, 24.36, 24.4, 24.56, 24.68, 24.68, 24.48, 24.36, 24.2, 24.2, 24.56, 24.48, 24.4, 24.36, 24.44, 24.36, 24.44, 24.36, 24.64, 24.52, 24.48] +113.34015941619873 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 106.56615614891052, 'TIME_S_1KI': 106.56615614891052, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2781.6071025085453, 'W': 24.542113905929394} +[20.4, 20.64, 20.52, 20.56, 20.6, 20.52, 20.56, 20.52, 20.2, 20.56, 20.52, 20.48, 20.48, 20.56, 20.48, 20.36, 20.48, 20.52, 20.44, 20.4] +368.85999999999996 +18.442999999999998 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 106.56615614891052, 'TIME_S_1KI': 106.56615614891052, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2781.6071025085453, 'W': 24.542113905929394, 'J_1KI': 2781.6071025085453, 'W_1KI': 24.542113905929394, 'W_D': 6.099113905929396, 'J_D': 691.2745423955923, 'W_D_1KI': 6.099113905929396, 'J_D_1KI': 6.099113905929396} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.1.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.1.json new file mode 100644 index 0000000..175168f --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 210.99842429161072, "TIME_S_1KI": 210.99842429161072, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 5320.685762977602, "W": 24.45864835325204, "J_1KI": 5320.685762977602, "W_1KI": 24.45864835325204, "W_D": 6.0876483532520425, "J_D": 1324.2949264960312, "W_D_1KI": 6.0876483532520425, "J_D_1KI": 6.0876483532520425} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.1.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.1.output new file mode 100644 index 0000000..1928ba2 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.1.output @@ -0,0 +1,45 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 10000 -sd 0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 210.99842429161072} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1065, 2071, ..., 9998045, + 9999047, 10000000]), + col_indices=tensor([ 6, 19, 22, ..., 9974, 9992, 9993]), + values=tensor([0.1921, 0.6014, 0.9806, ..., 0.7679, 0.7737, 0.6028]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.1676, 0.2617, 0.2303, ..., 0.3636, 0.4445, 0.4181]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 210.99842429161072 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1065, 2071, ..., 9998045, + 9999047, 10000000]), + col_indices=tensor([ 6, 19, 22, ..., 9974, 9992, 9993]), + values=tensor([0.1921, 0.6014, 0.9806, ..., 0.7679, 0.7737, 0.6028]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.1676, 0.2617, 0.2303, ..., 0.3636, 0.4445, 0.4181]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 210.99842429161072 seconds + +[20.48, 20.52, 20.52, 20.68, 20.64, 20.64, 20.6, 20.56, 20.36, 20.48] +[20.52, 20.6, 21.44, 23.16, 24.32, 34.12, 34.12, 35.68, 38.2, 37.44, 37.12, 27.84, 27.04, 24.4, 24.44, 24.4, 24.32, 24.48, 24.48, 24.36, 24.44, 24.72, 25.0, 24.92, 24.96, 24.96, 24.52, 24.4, 24.32, 24.24, 24.36, 24.36, 24.64, 24.56, 24.44, 24.52, 24.4, 24.6, 24.68, 24.84, 24.88, 24.64, 24.52, 24.52, 24.6, 24.48, 24.4, 24.28, 24.28, 24.32, 24.44, 24.44, 24.56, 24.52, 24.2, 24.16, 24.16, 24.12, 24.32, 24.36, 24.28, 24.28, 24.0, 24.12, 24.24, 24.4, 24.56, 25.04, 25.04, 24.84, 24.8, 24.76, 24.48, 24.44, 24.56, 24.48, 24.48, 24.4, 24.48, 24.36, 24.48, 24.48, 24.6, 24.68, 25.0, 25.0, 24.84, 24.76, 24.68, 24.44, 24.52, 24.52, 24.6, 24.6, 24.76, 24.48, 24.52, 24.4, 24.16, 24.24, 24.0, 24.24, 24.12, 24.24, 24.44, 24.44, 24.8, 24.8, 24.72, 24.64, 24.52, 24.28, 24.24, 24.16, 24.2, 24.32, 24.48, 24.32, 24.32, 24.28, 24.28, 24.32, 24.52, 24.56, 24.56, 24.6, 24.48, 24.4, 24.28, 24.24, 24.24, 24.24, 24.32, 24.48, 24.4, 24.4, 24.2, 24.08, 24.24, 24.4, 24.64, 24.68, 24.64, 24.64, 24.8, 24.6, 24.72, 24.8, 24.76, 24.76, 24.92, 25.08, 24.92, 24.88, 24.68, 24.68, 24.48, 24.32, 24.64, 24.68, 24.92, 24.92, 24.8, 24.68, 24.64, 24.44, 24.6, 24.6, 24.68, 24.52, 24.4, 24.44, 24.36, 24.12, 24.32, 24.24, 24.16, 24.24, 24.0, 24.24, 24.24, 24.44, 24.44, 24.6, 24.64, 24.44, 24.36, 24.48, 24.4, 24.64, 24.44, 24.64, 24.64, 24.6, 24.44, 24.64, 24.64, 24.32, 24.36, 24.24, 24.08, 24.36, 24.4, 24.48, 24.56, 24.56, 24.44, 24.32, 24.2, 24.36, 24.56, 24.68, 24.76, 24.92, 24.88] +217.5380129814148 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 210.99842429161072, 'TIME_S_1KI': 210.99842429161072, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 5320.685762977602, 'W': 24.45864835325204} +[20.48, 20.52, 20.52, 20.68, 20.64, 20.64, 20.6, 20.56, 20.36, 20.48, 20.4, 20.36, 20.32, 20.32, 20.2, 20.2, 20.28, 20.16, 20.24, 20.28] +367.41999999999996 +18.371 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 210.99842429161072, 'TIME_S_1KI': 210.99842429161072, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 5320.685762977602, 'W': 24.45864835325204, 'J_1KI': 5320.685762977602, 'W_1KI': 24.45864835325204, 'W_D': 6.0876483532520425, 'J_D': 1324.2949264960312, 'W_D_1KI': 6.0876483532520425, 'J_D_1KI': 6.0876483532520425} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_1e-05.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_1e-05.json index c8342c5..40d44d2 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_1e-05.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_1e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 141816, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.164389848709106, "TIME_S_1KI": 0.07167308236524163, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 285.17019953727726, "W": 21.625492932171987, "J_1KI": 2.010846445656888, "W_1KI": 0.1524897961596152, "W_D": 3.365492932171989, "J_D": 44.37994981288918, "W_D_1KI": 0.023731405004879483, "J_D_1KI": 0.00016733940461499043} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 142926, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.514646053314209, "TIME_S_1KI": 0.07356706304880994, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 341.9344939994813, "W": 24.04792007204229, "J_1KI": 2.3923883268228403, "W_1KI": 0.1682543419114947, "W_D": 4.089920072042293, "J_D": 58.15408343601235, "W_D_1KI": 0.02861564776207473, "J_D_1KI": 0.00020021303165326625} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_1e-05.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_1e-05.output index c5a4de0..e3d4653 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_1e-05.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_1e-05.output @@ -1,373 +1,266 @@ ['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 10000 -sd 1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.07915711402893066} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.08297038078308105} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([8887, 6657, 7565, 3220, 7011, 960, 7687, 1638, 5195, - 5216, 8504, 887, 1870, 7762, 4871, 1881, 1312, 2356, - 8882, 564, 3323, 3943, 7528, 9194, 2802, 9093, 6109, - 8556, 927, 2210, 2106, 820, 4388, 6120, 3013, 4186, - 9725, 4312, 7062, 7727, 7858, 3639, 0, 955, 9212, - 3900, 2519, 3782, 2814, 6711, 4282, 9829, 2935, 5472, - 5069, 5474, 6384, 2189, 3553, 9092, 4939, 7190, 5600, - 4241, 9909, 3829, 8005, 1584, 4693, 2762, 7432, 5677, - 4550, 3593, 1945, 2933, 5983, 5180, 6269, 6691, 1646, - 3773, 6546, 3306, 693, 4467, 6900, 7830, 1109, 4818, - 9859, 245, 7505, 9264, 3708, 4499, 1575, 3766, 2431, - 3105, 5276, 7713, 8061, 1468, 9875, 2972, 4010, 5060, - 5944, 2540, 6479, 3011, 9049, 9192, 3917, 4370, 9436, - 2170, 5413, 6341, 7955, 756, 1941, 4548, 6653, 6105, - 6655, 371, 9243, 4712, 1764, 3647, 4532, 7542, 3855, - 701, 9379, 1959, 4900, 6181, 1070, 1534, 4303, 7198, - 8129, 4622, 9647, 1667, 5420, 404, 5307, 3269, 7357, - 9683, 4571, 4602, 9781, 8350, 968, 1863, 4392, 7887, - 8623, 7866, 8686, 931, 3235, 8580, 8592, 1774, 1791, - 4885, 5147, 6993, 391, 7696, 1435, 7008, 8681, 3384, - 7772, 6766, 9136, 5772, 5532, 4763, 1097, 3503, 2661, - 1199, 8747, 2373, 8288, 8987, 7989, 7009, 5851, 2781, - 8197, 3284, 7637, 1948, 1310, 3684, 7181, 9300, 1965, - 7082, 2105, 8226, 4401, 785, 1537, 6650, 7702, 6767, - 9286, 6120, 5857, 6414, 8812, 9360, 5725, 150, 867, - 1740, 4319, 7241, 5001, 8821, 526, 5415, 7843, 4481, - 2191, 9623, 2827, 5301, 341, 4635, 1949, 228, 2674, - 4843, 7932, 8636, 9999, 8927, 3866, 6804, 5632, 8294, - 5745, 5855, 6452, 7967, 1596, 7541, 1963, 6645, 6340, - 6058, 3781, 550, 9725, 2560, 5091, 8, 3323, 7037, - 4291, 2756, 27, 6726, 8154, 1196, 9556, 2602, 3116, - 6248, 6191, 6280, 7110, 1655, 2403, 5399, 2801, 5381, - 9390, 136, 8827, 4083, 6391, 3010, 952, 6732, 6238, - 2612, 1538, 867, 6657, 9210, 385, 2200, 1004, 5776, - 8332, 3443, 1716, 7647, 2989, 8296, 7265, 9569, 9141, - 321, 2256, 6340, 1623, 6267, 9242, 723, 8012, 5285, - 916, 1961, 9243, 9408, 9442, 5661, 8307, 7094, 6390, - 3421, 68, 3559, 7933, 7503, 7548, 7293, 4522, 1713, - 7678, 9470, 268, 1213, 7230, 7923, 856, 7247, 5880, - 3484, 1227, 3300, 4627, 8061, 1180, 1700, 1296, 1034, - 1004, 1067, 4596, 8259, 2423, 814, 4630, 3804, 3309, - 1619, 6828, 2502, 7605, 4685, 3019, 9130, 4620, 4569, - 2163, 8056, 2174, 6553, 1536, 8448, 2517, 620, 757, - 5326, 3833, 9578, 1759, 3548, 8424, 3163, 5428, 1887, - 1274, 8349, 8458, 9029, 8274, 140, 8789, 5215, 7103, - 4882, 2422, 2763, 954, 7400, 2556, 561, 8373, 8078, - 2595, 9986, 562, 79, 1993, 1013, 1172, 2226, 314, - 4866, 5412, 5351, 5648, 69, 7936, 8338, 8184, 674, - 7151, 5270, 1143, 6040, 6613, 1888, 6884, 2188, 2406, - 9349, 4853, 2537, 1250, 8384, 9865, 5595, 7996, 9401, - 4770, 1337, 9996, 8027, 1642, 1431, 5185, 188, 4258, - 5864, 1122, 652, 2537, 6723, 4096, 1689, 804, 6154, - 4505, 7840, 4329, 9805, 4198, 1451, 2264, 7700, 6859, - 4829, 840, 1331, 3545, 6718, 9780, 6839, 5411, 7328, - 1642, 5800, 1105, 1752, 3487, 2642, 409, 4333, 1966, - 6252, 618, 4107, 5209, 6398, 4835, 8816, 3849, 8435, - 3483, 3075, 6577, 9217, 4979, 914, 7020, 511, 8068, - 3235, 4761, 6012, 3485, 83, 4105, 233, 5388, 8565, - 709, 6099, 417, 3254, 4161, 7182, 6515, 3619, 651, - 6035, 3182, 2816, 1070, 1105, 2960, 118, 7896, 5349, - 5720, 2247, 1468, 2997, 3534, 7994, 6783, 774, 4224, - 1688, 4683, 1822, 2426, 4523, 4977, 2376, 8828, 6828, - 9060, 437, 4170, 9284, 8923, 9820, 507, 775, 7408, - 3736, 3532, 1951, 6412, 9144, 3571, 2896, 8946, 133, - 1005, 9994, 6696, 1636, 4808, 3058, 2030, 1275, 8551, - 9322, 5319, 907, 6649, 4422, 5714, 5380, 1517, 3833, - 8999, 2780, 375, 503, 6667, 8317, 8485, 820, 9516, - 6337, 3558, 3760, 4893, 2040, 3162, 2327, 7312, 8505, - 6798, 7032, 3707, 5985, 1589, 4795, 2643, 6736, 7780, - 7043, 6125, 5680, 5321, 4202, 158, 2162, 1838, 5360, - 4209, 2277, 4304, 2389, 9090, 99, 3472, 2993, 7538, - 9671, 2721, 7847, 1502, 1626, 9264, 5824, 1239, 4347, - 577, 8556, 1592, 1069, 1530, 6386, 2931, 1240, 3099, - 1014, 1603, 835, 305, 5553, 2047, 7189, 361, 9937, - 5054, 8951, 252, 2260, 3663, 8478, 4527, 9913, 2888, - 5908, 8032, 992, 389, 6440, 6370, 5962, 6590, 1487, - 1042, 8782, 1658, 2911, 7147, 4133, 7210, 8518, 1001, - 1282, 7983, 9936, 2406, 3848, 9210, 1622, 1334, 6223, - 2591, 4541, 4387, 6436, 2649, 122, 5758, 6465, 4821, - 3059, 8130, 2075, 5680, 753, 2471, 1159, 8391, 3988, - 3678, 2834, 8222, 2762, 5948, 9929, 6125, 3916, 2395, - 8622, 7666, 915, 2645, 8831, 8510, 9092, 3806, 498, - 4185, 3614, 7571, 7909, 7743, 4794, 5295, 6223, 6623, - 6583, 6713, 7364, 1792, 3792, 9985, 1476, 4407, 6263, - 1370, 8779, 230, 5604, 2199, 6697, 9730, 5902, 9551, - 1553, 2491, 1091, 7873, 5945, 9817, 476, 4053, 2660, - 2706, 3420, 1455, 375, 7502, 4159, 7324, 4860, 5587, - 6271, 2753, 9983, 8044, 5696, 9843, 3997, 8631, 7328, - 5279, 1110, 4661, 7566, 8626, 1784, 573, 6356, 5517, - 1192, 1431, 1879, 1796, 4040, 254, 8485, 5216, 6763, - 357, 8405, 8556, 5840, 2248, 2608, 6008, 5101, 7967, - 5739, 9849, 4550, 2489, 3039, 25, 2374, 904, 805, - 8710, 5027, 1789, 2228, 5223, 6113, 4331, 2161, 2124, - 8695, 8607, 3670, 175, 921, 4398, 9976, 6429, 1155, - 8564, 2432, 4522, 4533, 6492, 9786, 5540, 5902, 3158, - 8620, 4872, 8641, 3392, 240, 464, 9305, 8458, 6235, - 9184, 6516, 3063, 9247, 5646, 2064, 9748, 3812, 6501, - 7954, 8984, 7903, 9555, 7865, 6718, 8520, 1064, 3806, - 492, 4609, 6537, 761, 1233, 7753, 7086, 3694, 4284, - 857, 2653, 6661, 3093, 6288, 3189, 4080, 5143, 6716, - 7707, 2187, 8448, 253, 4631, 3235, 8416, 1853, 3894, - 545, 7250, 6083, 1479, 9953, 8191, 5805, 5430, 2352, - 2970, 7367, 8111, 8507, 4276, 2983, 8374, 8627, 8319, - 2331, 5041, 1777, 4281, 1272, 7299, 3348, 1178, 4640, - 8954, 5461, 7271, 8102, 7226, 6993, 9293, 9340, 3435, - 4202, 7285, 5737, 9775, 8016, 6575, 4814, 9918, 5533, - 7261, 4085, 6151, 330, 523, 8329, 5505, 1084, 1104, - 4474, 9640, 1004, 8868, 7052, 2157, 5365, 7931, 5204, - 1590, 5202, 6964, 4014, 8831, 9579, 3394, 681, 7521, - 1110, 5820, 4952, 4774, 6224, 5363, 8909, 1392, 6975, - 203, 1344, 3905, 6290, 6297, 9869, 8652, 9965, 4960, - 9765]), - values=tensor([9.1816e-01, 1.0056e-01, 8.3455e-01, 4.9401e-01, - 5.1788e-01, 7.9790e-01, 5.9107e-01, 1.9589e-01, - 3.7361e-01, 1.5392e-01, 2.1838e-01, 3.7216e-01, - 5.0103e-03, 8.9289e-01, 5.5436e-01, 2.7300e-01, - 9.0417e-01, 1.8971e-01, 3.2539e-01, 7.2729e-01, - 4.7876e-01, 1.1198e-01, 2.7973e-01, 9.9509e-01, - 2.2868e-01, 2.5159e-01, 1.6433e-01, 1.5749e-01, - 1.7052e-01, 4.4167e-01, 6.8471e-01, 1.7346e-01, - 6.3537e-01, 5.2518e-01, 3.3209e-01, 8.5467e-01, - 8.6825e-01, 1.5055e-01, 1.6340e-01, 1.3577e-01, - 4.9881e-01, 6.4956e-01, 2.2581e-01, 1.3965e-01, - 8.9003e-01, 1.6748e-01, 2.4828e-01, 7.3011e-01, - 3.7868e-01, 3.5484e-01, 6.0153e-01, 6.9457e-01, - 1.6644e-01, 9.2132e-01, 6.3805e-01, 3.7527e-01, - 4.3497e-01, 8.1449e-01, 1.0704e-02, 9.9986e-01, - 6.2014e-01, 3.5089e-01, 2.5154e-01, 2.9422e-01, - 1.3192e-01, 9.8133e-01, 4.2280e-01, 4.8166e-01, - 3.2448e-01, 8.8208e-01, 7.1062e-01, 9.2449e-01, - 5.3092e-01, 3.3073e-01, 2.0541e-01, 7.0914e-01, - 9.2026e-01, 6.7472e-01, 7.0878e-01, 8.7498e-01, - 1.6264e-01, 6.8012e-01, 2.5296e-01, 9.2499e-01, - 9.1036e-01, 9.8542e-01, 4.0924e-01, 6.5739e-01, - 8.1039e-01, 1.8085e-01, 3.8594e-01, 7.4307e-01, - 6.1123e-01, 7.2832e-01, 3.4987e-01, 8.2568e-01, - 4.7229e-01, 9.2822e-01, 9.5010e-01, 7.4010e-01, - 1.9359e-01, 3.6998e-01, 2.1068e-01, 5.8354e-01, - 2.6092e-01, 6.5678e-01, 8.0397e-01, 1.9206e-01, - 9.2830e-01, 9.0444e-01, 6.5885e-01, 6.4095e-01, - 3.1034e-01, 1.6964e-01, 4.0490e-01, 6.8714e-01, - 2.3928e-01, 7.5874e-01, 8.3858e-01, 4.9350e-01, - 7.8286e-01, 6.0832e-01, 5.1931e-01, 6.5691e-01, - 8.5466e-01, 6.5988e-01, 3.3136e-01, 6.4700e-01, - 6.3916e-01, 2.3446e-02, 7.5143e-02, 2.2272e-01, - 9.2781e-01, 1.3044e-02, 8.4248e-01, 6.5091e-01, - 9.0584e-01, 7.6630e-01, 7.2283e-01, 1.1793e-01, - 2.4103e-01, 4.3916e-01, 4.2399e-01, 4.9121e-01, - 3.4221e-01, 5.0459e-01, 8.8761e-01, 9.8120e-01, - 5.4676e-02, 2.6374e-02, 3.4874e-01, 2.8205e-01, - 2.0117e-01, 6.3053e-01, 6.0229e-01, 9.5126e-01, - 7.1311e-01, 7.9652e-01, 9.2944e-01, 8.8881e-01, - 3.5873e-02, 6.6588e-01, 3.6499e-02, 3.4913e-01, - 6.7968e-01, 9.8987e-01, 1.6090e-01, 1.8428e-01, - 7.3259e-01, 9.5962e-01, 8.5567e-01, 2.7735e-01, - 5.7339e-01, 9.9140e-01, 8.8419e-01, 3.7208e-01, - 6.0974e-01, 7.1044e-01, 3.0170e-01, 9.4617e-02, - 6.1406e-01, 1.9213e-01, 5.4457e-01, 5.8633e-01, - 9.4966e-01, 1.5253e-01, 3.3710e-01, 8.1343e-01, - 7.5181e-01, 4.6327e-01, 7.3512e-01, 7.2133e-01, - 2.7151e-01, 9.0746e-01, 5.8079e-01, 5.0598e-01, - 7.0086e-01, 9.0008e-01, 4.7321e-01, 3.0896e-01, - 2.8871e-02, 3.7236e-01, 3.9048e-01, 7.4433e-01, - 9.9416e-01, 2.2912e-01, 3.6267e-01, 6.7301e-01, - 8.0299e-01, 1.3822e-01, 7.8584e-01, 8.3181e-01, - 5.0366e-01, 3.4138e-01, 9.8237e-01, 9.5851e-01, - 7.8906e-01, 8.9995e-01, 8.0776e-01, 4.0043e-01, - 5.3367e-01, 6.8389e-01, 2.8047e-01, 3.6550e-01, - 8.3281e-01, 7.3455e-04, 8.6907e-02, 4.1214e-01, - 3.0553e-01, 5.5251e-01, 2.2118e-01, 1.1900e-01, - 4.5453e-01, 4.6127e-01, 3.1902e-01, 8.8099e-01, - 4.7670e-02, 9.5269e-01, 9.0599e-01, 9.0200e-01, - 6.1127e-01, 3.7701e-01, 6.2454e-01, 4.9416e-01, - 1.4405e-01, 9.4340e-01, 7.6097e-01, 3.1111e-01, - 6.6914e-01, 1.6385e-01, 1.3336e-01, 1.8604e-01, - 2.2905e-01, 3.8319e-01, 8.9209e-01, 9.9726e-01, - 8.7777e-01, 6.0539e-01, 3.6662e-01, 2.7918e-01, - 6.0749e-01, 8.7536e-02, 6.4316e-01, 1.5981e-01, - 2.8177e-01, 9.5691e-01, 8.4048e-01, 8.3338e-02, - 1.5081e-01, 7.7582e-02, 3.9449e-01, 8.5440e-01, - 6.0796e-01, 3.2600e-01, 6.2418e-01, 7.6839e-02, - 3.0664e-01, 3.2739e-01, 6.6048e-01, 3.8014e-01, - 5.1151e-01, 7.7506e-01, 2.1841e-02, 1.2341e-01, - 1.0072e-01, 6.9198e-01, 5.3058e-01, 9.6326e-01, - 5.2568e-01, 5.4784e-01, 1.4338e-01, 5.4407e-01, - 4.1409e-01, 4.6898e-01, 9.8951e-02, 1.0546e-01, - 8.0199e-01, 7.6598e-01, 4.5483e-01, 1.5641e-01, - 9.2098e-01, 6.7770e-01, 5.0760e-01, 5.5632e-01, - 3.6137e-01, 3.3508e-01, 5.9599e-01, 3.6756e-01, - 4.1378e-01, 9.9199e-01, 6.7374e-01, 4.8966e-01, - 7.3576e-01, 6.9524e-01, 5.5618e-01, 6.0703e-01, - 3.6832e-01, 2.2753e-01, 1.8109e-01, 4.2289e-01, - 5.7324e-01, 8.7307e-01, 1.0458e-01, 8.4548e-01, - 4.5329e-01, 7.0979e-02, 1.9282e-01, 5.1377e-03, - 4.0425e-02, 3.2026e-01, 6.1447e-01, 4.4646e-01, - 8.0835e-01, 3.9544e-01, 7.2351e-01, 2.2953e-01, - 6.5581e-01, 7.4538e-01, 9.0051e-01, 3.2091e-01, - 3.0483e-01, 8.5878e-01, 4.2062e-01, 7.6524e-01, - 4.8438e-01, 1.4893e-01, 5.4477e-01, 7.4463e-01, - 4.0281e-01, 8.9282e-01, 3.5734e-01, 8.4502e-01, - 5.7758e-01, 4.0753e-01, 2.3936e-01, 5.7984e-01, - 1.2596e-01, 1.0506e-01, 5.3306e-01, 1.2798e-01, - 4.3213e-02, 3.6882e-01, 1.6130e-01, 4.7150e-01, - 9.3138e-01, 6.0438e-01, 3.1401e-03, 7.1791e-01, - 4.3441e-01, 9.0765e-01, 7.0091e-02, 4.0801e-01, - 7.2909e-01, 5.5901e-01, 7.1350e-02, 7.4512e-02, - 4.1934e-01, 9.6563e-01, 8.8523e-01, 1.8177e-01, - 5.8354e-01, 3.9575e-01, 6.6693e-02, 7.5159e-01, - 5.0189e-01, 6.6046e-01, 5.7468e-01, 1.4039e-01, - 1.7243e-01, 9.4741e-01, 7.2128e-02, 7.5654e-01, - 6.5793e-01, 1.3738e-01, 3.5703e-01, 8.8200e-01, - 5.9310e-01, 3.7116e-01, 4.4413e-01, 9.5211e-01, - 3.2213e-02, 4.6352e-01, 1.2401e-01, 1.9383e-01, - 4.1520e-01, 5.3684e-01, 9.5974e-01, 4.3453e-01, - 6.7380e-01, 3.5117e-01, 8.8375e-01, 4.9149e-01, - 4.7584e-01, 7.6680e-01, 5.0605e-01, 5.4259e-01, - 3.1782e-02, 8.1916e-01, 5.6236e-01, 7.0322e-02, - 3.0076e-01, 3.7236e-01, 4.4999e-01, 2.4629e-01, - 7.8628e-01, 5.3838e-01, 1.8878e-01, 3.6490e-01, - 9.6722e-02, 3.7376e-02, 1.0592e-01, 8.1332e-01, - 3.0899e-01, 4.6040e-02, 1.8400e-01, 3.9960e-01, - 1.8388e-01, 9.3449e-01, 8.5176e-01, 3.6878e-01, - 5.7553e-01, 6.5055e-01, 7.9471e-01, 2.8149e-01, - 3.7543e-01, 8.4307e-01, 8.9627e-01, 7.5633e-01, - 3.5617e-01, 6.9103e-01, 3.5778e-01, 6.3010e-01, - 3.3315e-01, 7.2280e-02, 9.0937e-01, 7.1337e-01, - 3.9137e-01, 9.3037e-01, 5.8999e-01, 1.5294e-01, - 3.0835e-01, 3.1983e-01, 3.8701e-01, 4.6868e-01, - 9.3783e-01, 5.4351e-01, 3.4183e-01, 4.4611e-01, - 3.3621e-01, 9.9490e-02, 1.3700e-02, 7.0371e-01, - 9.5369e-03, 2.2249e-01, 4.9951e-01, 1.0140e-01, - 8.5242e-01, 9.4639e-01, 1.6058e-01, 1.6258e-01, - 3.4844e-01, 1.6819e-01, 2.8903e-01, 8.8768e-01, - 3.1540e-01, 9.1951e-01, 9.7167e-01, 5.0153e-01, - 4.7257e-01, 4.5687e-01, 1.5921e-01, 2.4562e-01, - 6.0697e-02, 4.4029e-01, 9.1039e-01, 2.5979e-01, - 4.0992e-01, 7.7663e-01, 7.7724e-01, 1.5090e-01, - 8.5900e-01, 2.8982e-02, 3.7795e-03, 5.8246e-01, - 4.9880e-01, 2.0485e-01, 4.2276e-01, 2.1280e-01, - 4.5690e-01, 5.9213e-01, 3.2956e-01, 1.5409e-01, - 5.8660e-01, 8.8005e-01, 8.9911e-01, 6.0337e-02, - 7.1061e-01, 1.0442e-01, 5.7918e-01, 9.3306e-01, - 5.1219e-01, 4.1652e-01, 6.4804e-01, 7.7464e-01, - 5.0314e-02, 8.9469e-01, 7.4405e-01, 4.3210e-01, - 2.7828e-01, 2.1534e-01, 6.4450e-01, 3.8160e-01, - 2.9848e-01, 4.8793e-01, 7.1970e-01, 9.8776e-01, - 5.7412e-01, 1.7962e-01, 4.4675e-01, 7.6705e-01, - 8.5803e-01, 1.1996e-01, 5.5360e-01, 5.2607e-01, - 3.8064e-01, 3.6147e-03, 9.6967e-01, 9.2945e-01, - 2.2587e-03, 5.7884e-01, 7.1192e-01, 6.0498e-01, - 5.2640e-01, 9.8261e-01, 7.1940e-01, 9.4668e-01, - 7.4228e-01, 1.3694e-01, 1.0410e-01, 1.2959e-01, - 3.5371e-01, 6.9605e-01, 4.9023e-01, 4.9412e-01, - 3.1448e-01, 6.9715e-01, 7.5290e-01, 3.3958e-01, - 4.3937e-01, 6.5379e-01, 8.5164e-01, 3.8019e-01, - 8.7873e-01, 8.0767e-03, 2.3596e-01, 1.7902e-01, - 5.7292e-01, 4.8103e-01, 1.0072e-01, 4.3923e-02, - 2.6610e-01, 8.1750e-01, 9.0729e-01, 1.1136e-01, - 8.6722e-01, 4.0481e-01, 6.7723e-01, 4.1020e-01, - 8.1097e-01, 6.3929e-02, 4.6936e-02, 7.6398e-01, - 9.1014e-01, 8.1936e-01, 6.6357e-01, 5.0768e-01, - 1.5502e-01, 8.2746e-01, 4.9313e-01, 5.3942e-01, - 5.2489e-01, 9.9488e-01, 8.1487e-01, 5.1081e-01, - 7.8702e-01, 1.5604e-01, 1.7260e-01, 3.2286e-01, - 1.7719e-01, 6.8326e-01, 2.9854e-01, 8.2931e-01, - 7.2193e-01, 3.8553e-01, 8.2932e-02, 2.4682e-01, - 7.0800e-01, 3.3477e-02, 6.1871e-01, 8.1973e-01, - 1.0388e-01, 9.1066e-01, 4.5246e-01, 3.8378e-01, - 5.3738e-01, 7.5175e-01, 8.1556e-01, 7.3781e-01, - 7.0504e-01, 8.9030e-01, 6.3940e-01, 5.9759e-01, - 9.7746e-01, 3.4186e-02, 7.8468e-01, 4.8855e-01, - 6.5585e-01, 1.5988e-01, 5.4604e-01, 9.7090e-01, - 7.4413e-01, 8.7259e-02, 9.0187e-01, 3.4464e-02, - 2.0455e-01, 6.6205e-01, 1.4350e-01, 1.5094e-01, - 1.6797e-01, 8.3789e-01, 1.1554e-01, 8.5879e-01, - 5.7774e-01, 7.9623e-01, 4.8633e-02, 2.4730e-01, - 4.1750e-02, 5.0146e-01, 5.4241e-01, 7.5681e-01, - 2.9902e-01, 5.4496e-01, 8.6511e-01, 1.8136e-01, - 7.5735e-01, 2.5526e-01, 2.7050e-01, 3.0676e-01, - 2.2450e-01, 2.5394e-01, 8.8327e-01, 3.0172e-01, - 6.4102e-01, 1.6565e-01, 4.3199e-01, 2.8793e-01, - 3.7305e-01, 1.3904e-01, 7.7512e-01, 5.0053e-01, - 7.5270e-01, 2.2475e-01, 3.3951e-01, 1.9012e-02, - 6.2560e-01, 4.3614e-01, 6.3881e-01, 2.0244e-01, - 2.2924e-01, 8.1471e-01, 6.9332e-01, 5.8077e-01, - 2.6657e-01, 6.0886e-02, 8.9582e-01, 2.9665e-01, - 9.1985e-01, 1.9895e-01, 5.2447e-01, 2.7301e-01, - 7.5669e-01, 1.7010e-01, 8.7730e-02, 2.9161e-01, - 9.2160e-01, 5.3655e-01, 2.4259e-01, 8.0699e-02, - 1.1672e-01, 1.6745e-01, 9.5233e-02, 4.1844e-01, - 4.6995e-01, 5.6290e-01, 6.2333e-01, 2.3950e-01, - 7.2046e-01, 6.0602e-01, 4.2767e-01, 6.2854e-01, - 5.0176e-03, 1.8205e-01, 7.2275e-01, 4.8755e-01, - 1.7151e-01, 9.2735e-01, 9.8824e-01, 4.4083e-01, - 2.4951e-01, 2.9792e-01, 5.6965e-02, 4.6045e-01, - 2.7247e-01, 6.0166e-01, 4.4300e-01, 8.4340e-02, - 5.2114e-01, 8.7333e-01, 8.3597e-01, 3.7496e-01, - 5.1327e-01, 7.6537e-01, 9.4934e-01, 5.2477e-01, - 1.6338e-01, 8.0987e-01, 7.8629e-01, 7.5501e-01, - 3.5845e-01, 3.3452e-01, 5.2293e-01, 5.2330e-01, - 3.9246e-01, 2.0239e-01, 3.7181e-01, 5.4493e-01, - 1.9378e-01, 8.5026e-02, 7.5606e-01, 6.4257e-01, - 6.3940e-01, 3.1292e-01, 8.8050e-01, 4.0043e-01, - 2.1620e-01, 5.9146e-01, 2.6386e-01, 6.4687e-01, - 2.2471e-01, 7.5267e-01, 5.2917e-01, 2.1213e-01, - 8.4583e-01, 3.7476e-01, 4.2410e-01, 6.8464e-01, - 1.4995e-02, 3.3691e-01, 1.3516e-02, 5.2181e-01, - 7.4635e-01, 9.4113e-01, 7.9229e-02, 7.6484e-01, - 4.9854e-01, 4.4013e-01, 4.8178e-01, 3.8801e-01, - 6.3690e-01, 3.0241e-01, 9.8122e-01, 8.5268e-01, - 1.3579e-01, 7.5927e-01, 6.5030e-01, 5.5904e-01, - 5.3291e-01, 3.8234e-01, 1.8204e-01, 5.8867e-02, - 1.2404e-02, 8.8712e-01, 3.2173e-01, 9.2022e-01, - 7.9135e-01, 8.9467e-01, 8.3079e-01, 8.1209e-01, - 2.5463e-01, 6.5680e-01, 5.5293e-01, 1.3993e-01, - 3.5481e-01, 8.4648e-01, 9.6119e-01, 6.7458e-01, - 9.1012e-01, 2.8003e-01, 3.7273e-02, 7.9784e-01, - 9.0025e-01, 2.8522e-01, 1.1653e-01, 4.2112e-01, - 2.3142e-01, 5.1203e-01, 4.6552e-01, 1.0825e-01, - 2.6191e-01, 3.7008e-01, 8.3410e-01, 9.4793e-01, - 9.4600e-01, 5.8473e-01, 1.3957e-01, 9.8234e-01, - 8.9868e-01, 8.1273e-01, 7.5800e-01, 5.5921e-01, - 4.5453e-01, 2.3077e-01, 8.6072e-01, 1.2992e-01, - 6.3607e-01, 8.8713e-01, 4.4198e-01, 2.4138e-01, - 3.3843e-01, 2.7523e-01, 5.3152e-01, 7.1777e-01, - 7.6129e-01, 3.8293e-01, 7.7646e-01, 4.2633e-01, - 3.0830e-01, 3.7612e-01, 4.6028e-01, 3.7857e-01, - 8.7302e-01, 1.0316e-01, 6.0765e-01, 3.4387e-01, - 7.8725e-01, 1.5606e-01, 9.3539e-01, 8.7938e-01, - 4.8138e-01, 5.6849e-01, 7.7224e-01, 3.6936e-01, - 8.4701e-01, 8.2521e-01, 1.4421e-01, 2.9370e-01, - 1.4391e-02, 4.1091e-01, 3.8605e-01, 4.6313e-01, - 3.1919e-01, 4.5735e-01, 6.4363e-01, 6.6653e-01, - 4.6964e-01, 6.9800e-01, 8.2382e-02, 1.3131e-01, - 5.7590e-01, 6.3297e-01, 8.9063e-01, 2.8157e-01, - 3.1231e-01, 2.8244e-01, 7.9429e-01, 6.6369e-01, - 7.1310e-01, 3.2836e-01, 5.6521e-01, 1.5836e-01, - 5.1110e-02, 4.3549e-01, 6.4228e-01, 6.6466e-01, - 3.8126e-01, 6.4327e-01, 9.9962e-01, 5.8379e-01, - 5.4148e-01, 1.0675e-01, 5.7636e-01, 1.1120e-01, - 8.6602e-01, 3.1601e-01, 8.8296e-01, 2.3215e-01, - 9.2228e-02, 6.5512e-01, 2.8197e-01, 7.6639e-01, - 4.1955e-01, 6.7177e-01, 1.2141e-01, 3.3267e-01, - 1.6749e-01, 4.1088e-01, 2.8360e-01, 3.9061e-01, - 6.4098e-02, 1.4131e-01, 9.8964e-01, 3.1334e-01, - 5.7250e-01, 2.8781e-01, 6.9604e-01, 7.9814e-01, - 6.7089e-01, 1.3042e-01, 8.2591e-01, 3.9613e-02, - 3.0492e-01, 2.2895e-01, 9.5397e-01, 3.6673e-01, - 8.0038e-01, 6.6781e-01, 3.4711e-01, 2.9668e-02, - 8.7182e-01, 8.6393e-02, 5.7309e-01, 9.6534e-01, - 4.5090e-01, 3.4242e-01, 8.6828e-01, 9.9417e-01, - 9.1192e-01, 5.0104e-01, 5.0505e-01, 5.4064e-01, - 2.8746e-01, 1.4854e-01, 1.0731e-01, 4.8918e-01, - 7.9998e-02, 3.5250e-01, 7.1745e-01, 2.3441e-01, - 4.5486e-01, 6.2764e-01, 6.1764e-01, 4.5336e-01, - 8.4348e-02, 5.5913e-01, 4.0829e-01, 7.6562e-02, - 8.0261e-01, 2.9340e-01, 2.5262e-01, 5.0934e-01, - 5.5562e-02, 1.2272e-01, 1.9260e-02, 5.1804e-01, - 9.0714e-01, 1.9684e-01, 1.3426e-01, 2.7331e-01, - 3.2961e-01, 4.5633e-02, 5.7125e-01, 3.1779e-01, - 9.7359e-01, 6.4771e-01, 8.8411e-01, 2.6973e-01, - 1.3044e-01, 1.4941e-01, 1.8696e-02, 2.4935e-02]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 999, 999, 1000]), + col_indices=tensor([9541, 4744, 3707, 3759, 5602, 648, 5760, 9547, 5348, + 1622, 5522, 1425, 7886, 8949, 1399, 4987, 2045, 2195, + 1877, 8225, 7439, 3742, 1704, 9009, 2824, 6038, 7459, + 2392, 4934, 1115, 7057, 1326, 9427, 1487, 6052, 271, + 2404, 6442, 2401, 5281, 847, 9237, 5881, 2117, 9030, + 4040, 5058, 3876, 5704, 2576, 376, 9599, 6293, 631, + 3850, 2531, 5459, 4807, 1976, 4129, 6044, 7528, 128, + 620, 4258, 707, 3186, 4767, 6203, 7053, 3266, 3634, + 6287, 7059, 8930, 8238, 249, 9263, 6668, 2968, 8693, + 1448, 3963, 9472, 8236, 7658, 21, 522, 5082, 5332, + 4645, 2332, 6408, 7231, 8736, 9779, 4215, 3799, 2187, + 9640, 706, 3366, 478, 6738, 3826, 4896, 4399, 9716, + 617, 1830, 9549, 4294, 2027, 1533, 9629, 7815, 4667, + 4504, 5733, 7722, 3123, 8029, 3228, 5140, 2985, 4485, + 906, 8472, 993, 7143, 5174, 5432, 8332, 4083, 2173, + 2104, 5924, 4632, 1377, 420, 8402, 112, 8458, 5830, + 9221, 5106, 3118, 6351, 6259, 8778, 5907, 9815, 391, + 7254, 9485, 4166, 1814, 2095, 177, 6065, 4798, 3294, + 9118, 4002, 6468, 4676, 4594, 1033, 5568, 2567, 2946, + 7002, 6944, 4198, 2354, 5428, 7206, 480, 6548, 8449, + 121, 5762, 8140, 9120, 3426, 1673, 7470, 1305, 2311, + 979, 2867, 3482, 4187, 1379, 5373, 5104, 9058, 7249, + 5269, 8473, 4187, 4514, 9226, 1893, 4287, 720, 3491, + 9637, 605, 6889, 21, 1152, 4929, 3127, 8079, 678, + 4219, 9741, 975, 5212, 6243, 1307, 5836, 8196, 5327, + 8749, 6752, 6677, 4788, 6827, 8125, 2049, 1591, 7592, + 449, 4957, 6437, 2880, 1837, 2987, 3733, 5822, 2888, + 2962, 4631, 3914, 466, 7123, 9568, 7077, 239, 8789, + 8002, 7541, 1395, 9017, 1554, 8609, 1114, 359, 1644, + 9296, 2622, 6861, 1946, 6878, 3199, 6756, 6576, 8441, + 6782, 9208, 3800, 6755, 9307, 2546, 2751, 9936, 3072, + 5887, 1740, 5436, 8353, 856, 3061, 4683, 282, 2344, + 2737, 8185, 6739, 1483, 9825, 4566, 2572, 5286, 3306, + 6854, 1701, 8164, 6844, 5021, 3885, 1016, 8535, 7619, + 101, 9406, 391, 7997, 5109, 9604, 6641, 4700, 8743, + 1701, 220, 1307, 2989, 4960, 7899, 4634, 4188, 3055, + 6943, 153, 1825, 1183, 7470, 7761, 3324, 6926, 5802, + 3938, 9690, 4429, 4674, 8612, 5692, 7360, 969, 4901, + 9007, 2615, 7523, 6755, 185, 2934, 1782, 331, 1633, + 3371, 7658, 1018, 3353, 1634, 820, 3145, 941, 5177, + 2302, 2110, 1609, 719, 4268, 1250, 9968, 7113, 8855, + 3206, 8475, 311, 8174, 4801, 1395, 6530, 9861, 2603, + 6826, 85, 7072, 714, 1333, 6273, 6592, 6272, 6781, + 7226, 9564, 844, 2453, 3858, 1470, 282, 2103, 8485, + 5275, 7636, 8340, 1822, 1514, 8469, 2978, 3108, 133, + 6732, 4349, 1378, 4402, 8475, 2621, 7780, 7910, 7351, + 454, 5154, 5529, 239, 9795, 2440, 372, 7445, 5618, + 6124, 7886, 9761, 4160, 4373, 8833, 4133, 7330, 6076, + 8022, 6116, 6026, 2241, 568, 8188, 7934, 3070, 687, + 6581, 2801, 8072, 9446, 7672, 7543, 2735, 8790, 6750, + 2435, 364, 2240, 8373, 4384, 6069, 763, 1226, 5310, + 4872, 229, 6569, 7105, 3232, 7585, 1692, 1949, 6461, + 3474, 9239, 5258, 3042, 3559, 3298, 2221, 889, 3662, + 4913, 8642, 9462, 7196, 5711, 4175, 8477, 909, 5344, + 471, 1089, 1178, 9304, 462, 5816, 9392, 8552, 8175, + 4186, 1379, 132, 2058, 1333, 3506, 5750, 5286, 6785, + 2512, 1490, 3067, 5556, 8616, 8391, 5802, 7892, 2802, + 9850, 4207, 3567, 9679, 6481, 8657, 4164, 9760, 863, + 4307, 1945, 8026, 9148, 1985, 8852, 8820, 9340, 1797, + 5164, 4959, 4437, 4761, 4864, 7285, 7565, 196, 3013, + 6185, 7044, 4674, 3370, 4764, 350, 5347, 7665, 8484, + 7882, 7045, 4719, 9202, 8269, 765, 5695, 517, 2931, + 2386, 5592, 6863, 5538, 4404, 2308, 140, 8498, 6481, + 2265, 5235, 2164, 5730, 3545, 7134, 5747, 2128, 7195, + 7411, 666, 9008, 6534, 5424, 5680, 9864, 5882, 2993, + 123, 686, 3088, 5442, 7924, 8422, 78, 1660, 7484, + 4893, 3377, 1762, 2672, 4381, 1747, 4470, 9134, 3417, + 7518, 9749, 6445, 7612, 2235, 7530, 4349, 4645, 9668, + 7802, 5907, 8700, 3950, 7901, 9114, 7278, 8890, 5296, + 4639, 4921, 100, 8333, 2024, 8807, 8382, 8961, 3676, + 2295, 2336, 2200, 3037, 3243, 5295, 6730, 4483, 5652, + 662, 617, 8803, 5537, 638, 1053, 1772, 5947, 1838, + 3087, 9574, 1480, 8561, 8008, 4768, 8041, 7377, 8623, + 9299, 762, 4570, 491, 7087, 8236, 7559, 9409, 9162, + 524, 7031, 471, 6861, 9065, 3463, 2863, 291, 855, + 7520, 779, 5540, 546, 7857, 2216, 6601, 8433, 3148, + 3193, 5437, 5398, 8297, 4601, 2085, 446, 1186, 7874, + 49, 2902, 4494, 4795, 9053, 5402, 2927, 1586, 2565, + 3662, 7583, 3948, 9561, 4265, 1480, 6933, 3561, 9357, + 6654, 9272, 16, 5294, 4582, 434, 9323, 4257, 6550, + 439, 4475, 1292, 7441, 2779, 3364, 303, 1742, 8608, + 4940, 283, 1474, 5120, 6422, 7514, 8440, 98, 5308, + 6398, 8201, 2915, 9890, 80, 1028, 4946, 7280, 4830, + 5039, 531, 9983, 6776, 3446, 1751, 7222, 3393, 3069, + 6417, 6795, 7190, 5, 7815, 7449, 9933, 448, 5005, + 3829, 3087, 5073, 7828, 5094, 8389, 6515, 6607, 694, + 4895, 9154, 3645, 7477, 5173, 7516, 3264, 395, 7092, + 0, 2973, 5818, 7270, 3567, 4322, 9395, 3622, 9001, + 5300, 9919, 8509, 5820, 8624, 3338, 5648, 7551, 2911, + 8001, 6269, 2252, 5808, 6497, 1687, 100, 4383, 6655, + 6964, 378, 3127, 7892, 1232, 3286, 761, 4445, 6154, + 8590, 8402, 3197, 6599, 6950, 3285, 6632, 2219, 1903, + 1826, 7632, 4217, 5122, 8776, 2643, 6045, 3552, 2331, + 3115, 6795, 2145, 8980, 5758, 4362, 2066, 2549, 375, + 3104, 1826, 892, 4202, 5456, 3650, 3778, 9617, 654, + 4447, 3824, 8476, 8365, 3371, 9281, 1588, 6978, 5331, + 4140, 6941, 2280, 1280, 6139, 3922, 8516, 7621, 4047, + 7929, 3546, 9284, 4482, 292, 4047, 8471, 4714, 3211, + 1190, 3085, 6119, 5172, 4634, 7577, 7971, 8378, 4996, + 8046, 3791, 1349, 5753, 7442, 9559, 4997, 9437, 2243, + 3590, 695, 6145, 4980, 1687, 3024, 1752, 1880, 5394, + 7171, 5473, 5654, 108, 1736, 1313, 9637, 2835, 5680, + 1109, 5141, 2910, 4505, 9202, 7567, 2403, 6573, 6279, + 3929, 4270, 7176, 7680, 2471, 8, 2651, 502, 6640, + 5575, 9260, 684, 8227, 8967, 6539, 4250, 5738, 1807, + 6590, 5958, 2048, 5979, 5237, 871, 7398, 4074, 7923, + 9737, 9408, 5924, 8041, 490, 617, 2006, 8839, 6330, + 6648, 948, 1030, 6508, 4791, 181, 1638, 4576, 7413, + 2243, 4743, 7952, 5078, 5305, 8747, 3414, 8140, 4079, + 4138, 6251, 453, 3816, 2039, 2570, 2032, 6570, 2327, + 6142, 2428, 8486, 6130, 1400, 6957, 3996, 5971, 9089, + 4617]), + values=tensor([0.0817, 0.3324, 0.2756, 0.6529, 0.0896, 0.5656, 0.0562, + 0.4958, 0.4951, 0.3804, 0.3913, 0.4986, 0.1152, 0.1822, + 0.5453, 0.4131, 0.4576, 0.1197, 0.2604, 0.1916, 0.8289, + 0.6367, 0.0901, 0.5957, 0.0365, 0.0416, 0.8103, 0.1183, + 0.0534, 0.1256, 0.7461, 0.7831, 0.6998, 0.1898, 0.8061, + 0.6526, 0.2984, 0.8242, 0.5560, 0.1775, 0.6071, 0.6878, + 0.3123, 0.5956, 0.9749, 0.6123, 0.4526, 0.9383, 0.2173, + 0.4194, 0.3042, 0.2147, 0.3605, 0.3101, 0.4656, 0.6783, + 0.8804, 0.5205, 0.2973, 0.3305, 0.1315, 0.3205, 0.6762, + 0.7007, 0.6919, 0.1581, 0.4172, 0.2771, 0.4730, 0.9555, + 0.1116, 0.2319, 0.9065, 0.7883, 0.2534, 0.5646, 0.0194, + 0.9142, 0.2745, 0.3458, 0.3269, 0.0011, 0.3403, 0.7824, + 0.7760, 0.6047, 0.7120, 0.2145, 0.3576, 0.3861, 0.9892, + 0.0628, 0.3553, 0.7317, 0.2389, 0.8487, 0.7424, 0.5264, + 0.0977, 0.3521, 0.5096, 0.4635, 0.3465, 0.4397, 0.6133, + 0.4956, 0.5784, 0.0080, 0.2648, 0.3588, 0.5910, 0.9636, + 0.2005, 0.4097, 0.7264, 0.2984, 0.7441, 0.6180, 0.2407, + 0.3332, 0.2608, 0.2432, 0.7611, 0.9934, 0.4942, 0.8188, + 0.7399, 0.1434, 0.0613, 0.0393, 0.3178, 0.5208, 0.4807, + 0.9491, 0.1732, 0.3057, 0.6323, 0.2840, 0.0857, 0.3659, + 0.6422, 0.3427, 0.7232, 0.7929, 0.7683, 0.7166, 0.7357, + 0.4486, 0.4382, 0.6550, 0.8314, 0.6931, 0.9646, 0.1363, + 0.3561, 0.7267, 0.3216, 0.7646, 0.3590, 0.4617, 0.6470, + 0.2146, 0.9869, 0.9043, 0.4331, 0.1225, 0.8001, 0.8156, + 0.4221, 0.2266, 0.0278, 0.3100, 0.5343, 0.5996, 0.0218, + 0.2255, 0.8666, 0.0921, 0.5170, 0.5002, 0.8058, 0.9057, + 0.7074, 0.4990, 0.2247, 0.0751, 0.7773, 0.4371, 0.1745, + 0.8389, 0.7204, 0.8341, 0.7818, 0.1989, 0.1221, 0.1256, + 0.0509, 0.2962, 0.3159, 0.7937, 0.4473, 0.9974, 0.7679, + 0.8988, 0.8844, 0.3530, 0.1562, 0.1444, 0.5256, 0.9271, + 0.1544, 0.8028, 0.8695, 0.9904, 0.5524, 0.4400, 0.3464, + 0.5814, 0.1913, 0.3013, 0.3557, 0.5804, 0.2758, 0.6828, + 0.7603, 0.3179, 0.2023, 0.7568, 0.7129, 0.6977, 0.7234, + 0.5232, 0.1407, 0.1867, 0.6312, 0.3493, 0.2358, 0.2023, + 0.0344, 0.4407, 0.5310, 0.0044, 0.2378, 0.6973, 0.0137, + 0.5245, 0.7504, 0.6317, 0.7374, 0.0195, 0.6845, 0.0597, + 0.6321, 0.4952, 0.0872, 0.8851, 0.2804, 0.4050, 0.0050, + 0.1130, 0.6505, 0.7950, 0.2115, 0.2993, 0.1143, 0.4572, + 0.8875, 0.8814, 0.8625, 0.5617, 0.2849, 0.3643, 0.8161, + 0.6732, 0.6625, 0.4192, 0.6920, 0.8966, 0.1811, 0.8719, + 0.4458, 0.4760, 0.1150, 0.9313, 0.4163, 0.1915, 0.2079, + 0.0153, 0.4544, 0.0330, 0.2888, 0.1259, 0.9069, 0.5548, + 0.1893, 0.3501, 0.4221, 0.9917, 0.0121, 0.3270, 0.7156, + 0.2522, 0.6695, 0.7573, 0.2663, 0.5688, 0.2007, 0.7497, + 0.7560, 0.7047, 0.2696, 0.0660, 0.0453, 0.4273, 0.1997, + 0.5561, 0.7041, 0.1107, 0.1287, 0.5815, 0.8808, 0.2873, + 0.5226, 0.7030, 0.4660, 0.0939, 0.3134, 0.9852, 0.6741, + 0.8244, 0.4734, 0.7806, 0.6942, 0.2882, 0.9682, 0.2573, + 0.7582, 0.4071, 0.6318, 0.4490, 0.5707, 0.5003, 0.1227, + 0.8669, 0.8759, 0.8600, 0.6640, 0.7634, 0.8157, 0.5906, + 0.4248, 0.0924, 0.7679, 0.2320, 0.0924, 0.3941, 0.2849, + 0.3425, 0.0017, 0.7135, 0.3043, 0.2758, 0.9349, 0.5956, + 0.8303, 0.0031, 0.6584, 0.3357, 0.8757, 0.0305, 0.6211, + 0.2884, 0.0857, 0.6294, 0.3816, 0.7433, 0.3867, 0.8719, + 0.5933, 0.0204, 0.3468, 0.5173, 0.0113, 0.6487, 0.2899, + 0.5825, 0.3059, 0.2157, 0.0161, 0.3893, 0.9304, 0.9388, + 0.5243, 0.0960, 0.4812, 0.4725, 0.9715, 0.1631, 0.7809, + 0.6492, 0.1889, 0.1408, 0.9297, 0.0583, 0.6888, 0.7196, + 0.4419, 0.4790, 0.9863, 0.1516, 0.3857, 0.7772, 0.5479, + 0.1246, 0.1139, 0.8454, 0.9043, 0.4449, 0.0395, 0.3981, + 0.6749, 0.9915, 0.0804, 0.9193, 0.4484, 0.5317, 0.4042, + 0.1191, 0.2550, 0.5178, 0.8187, 0.1465, 0.9720, 0.9634, + 0.1792, 0.6615, 0.5088, 0.5711, 0.2939, 0.0127, 0.9277, + 0.1686, 0.1396, 0.2487, 0.3406, 0.4117, 0.2784, 0.6035, + 0.0437, 0.5559, 0.9654, 0.8026, 0.4075, 0.3219, 0.7411, + 0.4938, 0.1727, 0.0259, 0.4851, 0.4359, 0.8209, 0.4590, + 0.5953, 0.4870, 0.7732, 0.0140, 0.5012, 0.3847, 0.3942, + 0.7438, 0.2238, 0.2840, 0.7133, 0.4377, 0.5948, 0.0613, + 0.9004, 0.2558, 0.2815, 0.7586, 0.1277, 0.1779, 0.7971, + 0.8889, 0.6839, 0.9304, 0.1337, 0.0863, 0.5723, 0.7632, + 0.8215, 0.4734, 0.0235, 0.4371, 0.2377, 0.2194, 0.9009, + 0.1041, 0.1399, 0.5555, 0.2350, 0.9756, 0.8238, 0.9442, + 0.2386, 0.7770, 0.6882, 0.8382, 0.9836, 0.4286, 0.0273, + 0.1999, 0.4582, 0.6792, 0.0648, 0.2691, 0.5502, 0.4637, + 0.7617, 0.0493, 0.7476, 0.9823, 0.6312, 0.2691, 0.2460, + 0.3885, 0.6715, 0.1922, 0.9393, 0.3584, 0.8462, 0.5889, + 0.4623, 0.7005, 0.5433, 0.8171, 0.3575, 0.3988, 0.6973, + 0.3605, 0.5650, 0.3760, 0.3743, 0.8906, 0.6556, 0.7020, + 0.5717, 0.5752, 0.3086, 0.4426, 0.8997, 0.2466, 0.9552, + 0.6396, 0.3873, 0.9308, 0.0452, 0.3753, 0.1970, 0.1210, + 0.0206, 0.2376, 0.4144, 0.8517, 0.8711, 0.1329, 0.6279, + 0.2544, 0.4642, 0.3918, 0.9007, 0.8631, 0.0275, 0.1831, + 0.6674, 0.8479, 0.6164, 0.6963, 0.1093, 0.9533, 0.4838, + 0.2932, 0.4954, 0.1620, 0.9153, 0.7978, 0.3760, 0.8694, + 0.6228, 0.4794, 0.2675, 0.3026, 0.1733, 0.8510, 0.9908, + 0.1169, 0.4773, 0.6671, 0.4542, 0.6433, 0.0132, 0.6663, + 0.3987, 0.6998, 0.2512, 0.4261, 0.8144, 0.0960, 0.7782, + 0.9103, 0.4597, 0.3097, 0.1387, 0.0805, 0.8676, 0.5323, + 0.3246, 0.3286, 0.4306, 0.9227, 0.1738, 0.2852, 0.8836, + 0.0592, 0.1647, 0.2312, 0.2516, 0.5752, 0.2805, 0.3541, + 0.7951, 0.0344, 0.4524, 0.1500, 0.0111, 0.3956, 0.6602, + 0.5921, 0.1514, 0.7099, 0.6694, 0.4804, 0.6940, 0.8492, + 0.1261, 0.3396, 0.5449, 0.2074, 0.6074, 0.3429, 0.2216, + 0.6768, 0.1514, 0.7176, 0.0448, 0.9166, 0.7625, 0.5947, + 0.1545, 0.5043, 0.0812, 0.5959, 0.6349, 0.5955, 0.1004, + 0.3380, 0.0700, 0.0342, 0.0885, 0.3430, 0.2529, 0.0027, + 0.7070, 0.1130, 0.2282, 0.2090, 0.0431, 0.9000, 0.6519, + 0.2230, 0.0220, 0.9269, 0.3636, 0.6531, 0.1093, 0.7667, + 0.2473, 0.7301, 0.0039, 0.0397, 0.7610, 0.3959, 0.5400, + 0.2628, 0.0274, 0.9532, 0.9422, 0.7334, 0.9018, 0.8690, + 0.8687, 0.3716, 0.6446, 0.8175, 0.1866, 0.3804, 0.9523, + 0.4158, 0.7545, 0.9008, 0.2091, 0.7603, 0.5331, 0.1940, + 0.1623, 0.1586, 0.1761, 0.3807, 0.0418, 0.4181, 0.0160, + 0.9350, 0.6667, 0.6291, 0.5327, 0.5502, 0.5467, 0.9540, + 0.0470, 0.6373, 0.7509, 0.3675, 0.1645, 0.1072, 0.0673, + 0.8707, 0.0270, 0.9565, 0.5479, 0.2174, 0.4603, 0.9839, + 0.4645, 0.2758, 0.4296, 0.3252, 0.1194, 0.1371, 0.9899, + 0.1554, 0.7690, 0.5689, 0.2289, 0.9166, 0.9043, 0.3346, + 0.8792, 0.6861, 0.5513, 0.2430, 0.1260, 0.0470, 0.9050, + 0.4004, 0.0033, 0.0611, 0.1735, 0.1599, 0.4032, 0.4661, + 0.0732, 0.2595, 0.2423, 0.9369, 0.7653, 0.8036, 0.9951, + 0.0034, 0.4292, 0.1415, 0.9688, 0.9983, 0.6291, 0.5436, + 0.0863, 0.6037, 0.5761, 0.4015, 0.7436, 0.9753, 0.2716, + 0.8796, 0.0534, 0.4733, 0.8091, 0.9332, 0.2263, 0.3176, + 0.3512, 0.2493, 0.5729, 0.0704, 0.8962, 0.1707, 0.1135, + 0.8467, 0.6882, 0.9935, 0.3400, 0.2498, 0.2447, 0.8298, + 0.8150, 0.8426, 0.1330, 0.3878, 0.9726, 0.9707, 0.7150, + 0.4710, 0.0623, 0.6977, 0.7884, 0.4121, 0.3697, 0.3835, + 0.4860, 0.0844, 0.4479, 0.3716, 0.2737, 0.9644, 0.4482, + 0.9930, 0.8318, 0.2931, 0.8408, 0.2954, 0.0520, 0.5236, + 0.4472, 0.9887, 0.5716, 0.7562, 0.6651, 0.3047, 0.3675, + 0.8282, 0.9803, 0.2975, 0.5105, 0.4052, 0.0133, 0.9321, + 0.4885, 0.0556, 0.6918, 0.2006, 0.1741, 0.5760, 0.8190, + 0.0021, 0.7613, 0.0689, 0.7771, 0.5264, 0.8286, 0.2391, + 0.6763, 0.5535, 0.1525, 0.2182, 0.4348, 0.3820, 0.9335, + 0.5563, 0.9960, 0.3871, 0.0877, 0.0141, 0.6386, 0.5328, + 0.1731, 0.1060, 0.9121, 0.8926, 0.1029, 0.0562, 0.0653, + 0.6791, 0.8703, 0.7902, 0.7955, 0.5877, 0.6016, 0.4912, + 0.6143, 0.5149, 0.0770, 0.7110, 0.5924, 0.8024, 0.9858, + 0.9002, 0.2315, 0.6982, 0.8543, 0.1439, 0.4223, 0.7190, + 0.8957, 0.6891, 0.7359, 0.3991, 0.2488, 0.2533, 0.5914, + 0.3081, 0.0652, 0.8204, 0.9876, 0.3309, 0.3805, 0.1439, + 0.6788, 0.0601, 0.5734, 0.5869, 0.0099, 0.8126, 0.8660, + 0.9370, 0.5609, 0.3940, 0.9325, 0.4409, 0.8003, 0.0643, + 0.4975, 0.5948, 0.3429, 0.7397, 0.1599, 0.8032, 0.6662, + 0.1716, 0.3067, 0.0747, 0.6650, 0.1468, 0.8085, 0.0751, + 0.1078, 0.9092, 0.2473, 0.7171, 0.8866, 0.9998, 0.0188, + 0.9325, 0.8602, 0.9123, 0.0834, 0.7823, 0.5623, 0.8899, + 0.0826, 0.3431, 0.6992, 0.8431, 0.0184, 0.7131, 0.6557, + 0.5015, 0.1589, 0.7364, 0.6955, 0.5067, 0.8526, 0.8096, + 0.3737, 0.9192, 0.4640, 0.3804, 0.1650, 0.8279, 0.3043, + 0.6932, 0.1415, 0.2659, 0.9686, 0.1255, 0.9335, 0.5951, + 0.1200, 0.6279, 0.3021, 0.5054, 0.7498, 0.9300]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.9198, 0.2486, 0.6139, ..., 0.7346, 0.8053, 0.7353]) +tensor([0.3956, 0.7164, 0.0973, ..., 0.3827, 0.2591, 0.9120]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -375,378 +268,378 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 0.07915711402893066 seconds +Time: 0.08297038078308105 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 132647 -ss 10000 -sd 1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.821086883544922} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 126551 -ss 10000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.296976089477539} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([9896, 2294, 608, 7455, 4786, 9947, 6306, 3161, 2752, - 3769, 8365, 7822, 2650, 3972, 4525, 3555, 390, 931, - 2637, 9922, 8440, 7065, 7479, 7024, 5903, 1510, 7327, - 4589, 3801, 959, 4616, 1851, 8424, 5751, 466, 7240, - 2164, 4942, 3099, 6866, 3692, 3245, 1569, 1786, 834, - 2878, 8450, 562, 8579, 8350, 4382, 4571, 2230, 1625, - 3124, 6145, 6696, 7788, 711, 1615, 7369, 3625, 7867, - 7330, 6146, 1902, 296, 9427, 4612, 950, 3058, 3653, - 2098, 9957, 1836, 5903, 9459, 1827, 2742, 6093, 8427, - 2467, 2948, 3117, 9056, 5451, 1784, 336, 1205, 5825, - 9644, 9509, 9139, 6860, 3591, 7747, 1333, 6979, 3236, - 3937, 6062, 4432, 6485, 5241, 6733, 3552, 6786, 9248, - 6777, 6449, 1983, 8864, 9572, 9119, 62, 8989, 7326, - 738, 3062, 1891, 7582, 6731, 4795, 8955, 5876, 9183, - 492, 5210, 239, 1997, 3278, 2556, 5768, 2678, 6024, - 6689, 7761, 9772, 5230, 256, 8454, 7193, 9641, 1793, - 1386, 2670, 4303, 4899, 6109, 32, 6002, 6275, 5289, - 8096, 5499, 3157, 5595, 6515, 6658, 8717, 6316, 8087, - 4958, 3092, 7067, 3822, 9708, 7382, 9487, 9406, 1087, - 9352, 5752, 3373, 8042, 3846, 300, 2304, 1453, 565, - 7389, 6027, 2200, 1376, 9247, 1040, 7531, 7341, 2822, - 8196, 9844, 6322, 8766, 1035, 889, 7035, 9823, 8702, - 4606, 1985, 8551, 3694, 5878, 8651, 3978, 4944, 7842, - 3461, 1926, 1851, 8100, 6746, 7543, 1835, 7605, 3542, - 8053, 3683, 9433, 1809, 5110, 8421, 5218, 2148, 37, - 4037, 1098, 6007, 6003, 4795, 9196, 2179, 8236, 5419, - 8295, 1075, 2083, 4346, 6769, 9065, 1027, 9599, 3795, - 6559, 9341, 5665, 6551, 2113, 4600, 2073, 1072, 7264, - 5766, 3645, 7777, 379, 2128, 204, 142, 8527, 3482, - 8094, 2725, 7469, 4390, 2032, 7446, 5393, 2570, 3060, - 8761, 8295, 3100, 7151, 6720, 2530, 8609, 9441, 6159, - 8710, 1890, 818, 9688, 2154, 7546, 8539, 9462, 5585, - 8719, 9613, 5807, 7134, 549, 1236, 9899, 1389, 5850, - 6589, 3584, 2421, 5299, 7484, 8435, 6020, 9264, 5131, - 6382, 3584, 2890, 7073, 8722, 3579, 1135, 3793, 1379, - 6246, 5228, 305, 3531, 3778, 4740, 8670, 6232, 5941, - 8820, 2754, 2765, 6838, 7330, 9897, 3696, 2166, 5468, - 6484, 8666, 6774, 6953, 2835, 2823, 8419, 3724, 8197, - 7956, 2462, 8728, 8544, 2134, 7924, 4123, 6881, 834, - 1355, 6095, 2196, 349, 152, 1748, 1972, 2616, 8999, - 9770, 8151, 1707, 2923, 2044, 2775, 8914, 3020, 397, - 9127, 8132, 5827, 1668, 3805, 4336, 4225, 8956, 582, - 8792, 4475, 712, 8732, 5637, 2746, 6120, 4903, 9753, - 5872, 1165, 5049, 3489, 5821, 6318, 1962, 6983, 7282, - 4336, 2616, 7459, 277, 3180, 2702, 9764, 1958, 4767, - 1913, 9524, 7748, 561, 9946, 569, 4821, 6591, 9695, - 3476, 7023, 2895, 5322, 283, 8392, 3685, 8235, 9462, - 4253, 3683, 340, 8310, 486, 4196, 9204, 5385, 44, - 2050, 7769, 3049, 1280, 8815, 2028, 8850, 3335, 2501, - 9288, 8918, 9722, 2757, 9451, 706, 7211, 6764, 7298, - 3039, 8462, 5935, 2554, 5418, 996, 7023, 8999, 7975, - 2346, 119, 4875, 3914, 6747, 3953, 4647, 2193, 4387, - 2338, 1669, 9233, 8727, 8487, 5872, 4292, 7426, 2293, - 7992, 6459, 4534, 1688, 5081, 4627, 3667, 5148, 9861, - 8564, 1404, 9776, 7539, 6790, 5390, 677, 8173, 9225, - 7452, 7721, 755, 6717, 4978, 1260, 7675, 2916, 4995, - 2394, 682, 6355, 754, 6151, 4829, 8246, 4620, 8909, - 1100, 511, 5603, 4303, 1815, 3683, 7878, 5711, 5615, - 2488, 7169, 2432, 3962, 355, 4889, 3473, 640, 1508, - 1601, 325, 9526, 997, 9666, 2826, 2606, 8082, 2812, - 9819, 3223, 1911, 3924, 8060, 2031, 5706, 9275, 462, - 3808, 190, 8140, 8943, 3369, 5330, 4987, 4601, 8251, - 1715, 4026, 4790, 6758, 5407, 9827, 6991, 55, 3664, - 5197, 8450, 7957, 7655, 8057, 9897, 7358, 3254, 8867, - 739, 5856, 6060, 9954, 4729, 9221, 4866, 8845, 6409, - 2192, 5002, 2376, 9903, 6819, 9593, 8230, 7836, 9523, - 8243, 6484, 6984, 3691, 7668, 688, 3857, 3616, 821, - 3745, 8075, 7678, 2887, 4722, 3611, 8800, 344, 8296, - 9758, 1347, 2470, 8722, 1252, 5362, 8386, 8196, 8038, - 8906, 7131, 7363, 5167, 9568, 3843, 2101, 211, 266, - 6601, 6305, 4260, 8610, 5288, 6092, 839, 1389, 9324, - 8990, 6778, 357, 9901, 974, 3450, 4646, 5266, 6499, - 9574, 698, 6454, 8546, 642, 4566, 9534, 5629, 3389, - 2301, 9080, 223, 6613, 6455, 628, 8978, 5613, 1223, - 1348, 5621, 1039, 9243, 7071, 540, 1007, 9867, 5937, - 1553, 4290, 5982, 6483, 652, 6973, 2314, 1902, 921, - 4523, 831, 447, 7989, 1124, 9819, 2213, 6782, 713, - 5647, 2123, 9759, 9811, 6295, 9215, 8497, 8045, 1549, - 3829, 9733, 9488, 8478, 4602, 5587, 814, 3794, 1677, - 7495, 7380, 1929, 8286, 3882, 5872, 3386, 9625, 2254, - 5938, 820, 3610, 7955, 5494, 279, 6459, 5040, 2130, - 4529, 1464, 5867, 2373, 2964, 6875, 1181, 4387, 2206, - 6866, 9906, 7926, 21, 3669, 2596, 5783, 3787, 7157, - 5630, 1759, 2223, 4366, 5511, 227, 8520, 2665, 1054, - 3891, 1630, 2643, 4027, 8872, 1885, 1964, 9415, 3006, - 2310, 3317, 611, 5083, 5953, 5909, 3230, 2053, 1728, - 31, 6667, 8857, 4691, 9505, 4998, 6321, 8141, 4969, - 5456, 2268, 6492, 6622, 9485, 3625, 4414, 2505, 5473, - 2738, 6931, 5647, 6599, 6183, 6288, 9738, 3002, 9540, - 5853, 5934, 3318, 7413, 8581, 9006, 151, 6634, 5686, - 8775, 5756, 4026, 3548, 2576, 9854, 1591, 3869, 4316, - 7529, 3212, 2440, 1603, 5668, 7196, 6221, 3190, 7986, - 3271, 2698, 9125, 9232, 1031, 8965, 450, 2291, 5587, - 6902, 435, 8549, 526, 3918, 1724, 6940, 9977, 1527, - 8102, 356, 9543, 6038, 4658, 9471, 9500, 8602, 6352, - 2238, 853, 1210, 2510, 1510, 2023, 5756, 8805, 5530, - 2709, 2719, 7538, 8404, 1726, 7778, 8582, 6685, 8228, - 4393, 1016, 2230, 7643, 8958, 9791, 7857, 2210, 7309, - 8575, 8260, 1203, 2725, 7197, 8334, 9528, 7109, 9832, - 5883, 8391, 4243, 493, 3345, 4094, 6878, 6913, 8059, - 7746, 1565, 1716, 4366, 9143, 6431, 169, 3160, 8018, - 4280, 8602, 6331, 7118, 3171, 4398, 7571, 2904, 2776, - 6876, 8123, 4068, 4297, 5962, 3220, 8451, 790, 5884, - 7424, 178, 4641, 5221, 7241, 6300, 2231, 4565, 1894, - 4230, 6103, 8220, 4346, 6276, 5051, 8766, 1946, 6624, - 6670, 2125, 2733, 5408, 4755, 6962, 7371, 9438, 1494, - 6892, 9384, 3598, 2767, 2285, 4386, 5296, 1607, 1528, - 1225, 2304, 2168, 3676, 9649, 6148, 9358, 1569, 6313, - 3401, 1404, 3390, 2969, 2500, 1423, 1604, 8240, 8052, - 9396, 1378, 4091, 7110, 4400, 8047, 5864, 3115, 7935, - 4128, 399, 6366, 2243, 3402, 5046, 2113, 5030, 4099, - 1700]), - values=tensor([9.8657e-01, 6.8017e-01, 5.9022e-02, 4.8063e-01, - 1.8697e-01, 4.4385e-01, 6.9391e-01, 8.3625e-01, - 1.7672e-01, 7.2174e-01, 4.6967e-01, 6.4861e-01, - 6.3794e-01, 3.9399e-01, 8.7530e-01, 4.9904e-01, - 8.3206e-01, 6.6493e-02, 2.2310e-01, 1.9008e-01, - 2.8789e-01, 9.0711e-01, 6.5785e-02, 6.4877e-01, - 7.1176e-01, 9.4587e-01, 5.2800e-01, 8.0382e-01, - 5.4674e-01, 4.7233e-01, 5.0026e-01, 1.4438e-01, - 4.9761e-01, 8.9960e-01, 2.1832e-01, 5.3157e-01, - 1.9084e-01, 2.3008e-01, 4.6183e-01, 2.6507e-01, - 7.0130e-01, 7.1507e-01, 5.9371e-01, 8.4885e-01, - 7.1662e-01, 9.6998e-01, 8.4112e-01, 2.1296e-01, - 2.0175e-01, 6.5871e-01, 4.6262e-01, 4.0127e-01, - 7.0860e-01, 3.8362e-01, 6.0489e-01, 3.0190e-01, - 6.8903e-01, 6.4856e-01, 8.5783e-02, 4.6953e-01, - 3.3232e-01, 1.4302e-01, 9.1148e-01, 2.0996e-01, - 3.2027e-01, 6.2572e-01, 2.8405e-01, 5.5547e-01, - 6.4271e-01, 1.3591e-01, 3.9741e-01, 1.1633e-01, - 1.7203e-01, 3.9350e-01, 6.7056e-01, 7.3724e-01, - 1.8016e-01, 6.3732e-01, 3.4346e-01, 6.9875e-01, - 8.7932e-01, 7.8631e-01, 3.4078e-01, 8.9240e-01, - 8.5760e-02, 3.6185e-01, 9.1460e-01, 4.3307e-02, - 8.4032e-02, 4.7212e-01, 8.0055e-01, 9.9888e-01, - 8.0489e-01, 6.5549e-01, 2.9076e-01, 3.7294e-01, - 2.9971e-01, 4.0164e-01, 3.2874e-01, 9.3830e-03, - 8.8356e-01, 7.4339e-01, 5.9986e-01, 3.1253e-01, - 8.6312e-01, 8.6569e-01, 9.3944e-01, 5.3998e-01, - 3.7985e-02, 3.7747e-01, 2.2014e-01, 9.4907e-01, - 1.9790e-01, 2.8362e-01, 5.9145e-01, 6.1426e-01, - 5.7464e-01, 3.9214e-01, 7.0507e-01, 1.8174e-01, - 6.9014e-01, 1.9349e-01, 6.3708e-01, 2.1715e-01, - 8.6582e-01, 7.3151e-01, 3.1880e-01, 8.5025e-01, - 2.5533e-01, 8.8835e-01, 5.2483e-01, 1.0277e-01, - 8.3245e-01, 7.9357e-01, 5.2672e-02, 1.0682e-01, - 5.0113e-01, 4.9015e-01, 2.2016e-01, 1.1082e-01, - 4.0155e-01, 2.4413e-01, 8.4404e-01, 1.9221e-01, - 3.6094e-01, 2.7083e-01, 2.6853e-01, 1.5021e-01, - 4.0131e-01, 7.1453e-01, 2.8592e-01, 9.5338e-01, - 2.0474e-01, 9.3394e-01, 2.4722e-02, 4.4165e-01, - 9.3481e-01, 4.4711e-01, 4.4178e-01, 6.5012e-01, - 5.9035e-01, 3.2891e-01, 8.7656e-01, 6.0752e-01, - 3.7596e-01, 5.0862e-01, 7.5444e-01, 4.6019e-01, - 3.9466e-03, 3.1021e-01, 3.8321e-01, 6.1185e-01, - 6.6206e-03, 9.6546e-01, 7.3684e-01, 4.3789e-01, - 2.3688e-02, 7.4620e-01, 7.9828e-01, 7.2192e-01, - 6.2974e-01, 5.6341e-02, 8.2188e-02, 7.4590e-01, - 2.0376e-01, 4.6178e-01, 4.3863e-01, 4.2009e-01, - 1.5075e-01, 2.3388e-02, 4.1834e-01, 9.5610e-01, - 6.3185e-02, 4.0945e-01, 9.7439e-01, 4.6463e-01, - 4.2124e-02, 9.0775e-01, 4.2827e-01, 8.2278e-02, - 1.4690e-01, 4.6693e-01, 9.5192e-01, 5.7203e-01, - 6.2811e-01, 3.2224e-01, 9.9626e-01, 8.3408e-01, - 6.6144e-01, 5.8853e-01, 5.8047e-01, 8.3278e-01, - 2.5654e-01, 2.8605e-01, 4.4783e-01, 2.1991e-01, - 7.4339e-01, 6.3494e-01, 3.0373e-01, 1.4345e-01, - 7.4749e-01, 4.5109e-01, 8.6356e-01, 2.0773e-01, - 2.4500e-01, 6.2315e-01, 1.8660e-01, 7.3484e-01, - 1.1023e-01, 6.1309e-01, 6.2293e-01, 2.9065e-01, - 2.9666e-02, 1.4567e-01, 3.0125e-01, 7.9120e-01, - 5.2020e-01, 3.8899e-01, 5.1695e-01, 9.5133e-01, - 8.9874e-01, 4.0797e-01, 2.6028e-01, 3.6012e-02, - 1.1411e-01, 6.2144e-04, 8.0430e-01, 6.8260e-01, - 3.5790e-01, 1.6475e-01, 3.0474e-02, 8.1837e-01, - 6.9421e-01, 7.6068e-01, 2.6430e-01, 1.4334e-01, - 9.2430e-01, 3.5461e-01, 2.7750e-01, 1.4061e-01, - 8.7641e-02, 5.5670e-01, 6.6083e-01, 2.2299e-01, - 9.3532e-01, 1.4369e-01, 2.7515e-01, 4.0042e-01, - 3.2958e-01, 2.8062e-01, 9.3684e-01, 9.7266e-01, - 8.1233e-01, 4.9834e-01, 8.6323e-01, 3.3073e-01, - 8.6503e-01, 9.4119e-01, 1.5656e-01, 6.2703e-01, - 4.7449e-01, 9.0910e-01, 9.0667e-01, 1.4864e-02, - 6.1756e-01, 6.1558e-01, 5.1946e-01, 3.7586e-01, - 7.9960e-01, 9.4788e-01, 5.8479e-02, 8.1758e-01, - 1.0844e-01, 6.5197e-01, 6.9786e-01, 7.0444e-01, - 1.1409e-01, 4.5295e-01, 2.3132e-01, 9.6105e-01, - 1.7126e-01, 1.5977e-01, 6.7439e-01, 4.9423e-01, - 5.9848e-01, 3.7000e-01, 9.2792e-01, 2.8565e-01, - 4.9052e-01, 6.2456e-01, 6.0144e-01, 6.7402e-01, - 5.2520e-01, 5.1904e-01, 1.7123e-01, 1.7574e-01, - 5.1374e-01, 4.7803e-02, 3.7040e-01, 8.5162e-01, - 1.9813e-01, 7.4990e-01, 3.5188e-01, 9.9631e-02, - 3.8064e-01, 6.9207e-01, 1.3196e-01, 8.7248e-01, - 7.4460e-01, 7.7849e-02, 9.2453e-01, 2.7564e-01, - 9.4749e-01, 4.7632e-01, 5.8892e-01, 8.6200e-01, - 2.7858e-01, 2.4776e-01, 2.3953e-01, 2.8346e-01, - 7.7608e-01, 3.5583e-01, 6.4390e-01, 5.0393e-01, - 5.2838e-01, 6.0690e-01, 8.9440e-01, 5.7217e-01, - 8.1939e-01, 9.5907e-01, 9.8495e-01, 8.3958e-01, - 6.5237e-01, 1.3036e-01, 8.8983e-01, 2.1372e-01, - 2.6750e-01, 4.1958e-01, 7.4189e-01, 2.7326e-01, - 8.9967e-01, 6.8523e-01, 2.1589e-01, 4.2844e-01, - 9.6983e-01, 6.1399e-01, 6.2247e-01, 7.8241e-02, - 4.9567e-01, 6.9988e-01, 2.0647e-01, 8.8775e-01, - 9.7469e-01, 2.4890e-01, 6.3162e-01, 5.7063e-01, - 9.9026e-01, 8.1827e-01, 4.3918e-02, 6.5583e-02, - 9.3028e-01, 3.0040e-01, 8.9397e-02, 9.2410e-01, - 9.8820e-01, 4.1131e-01, 6.5320e-02, 5.2211e-01, - 1.1657e-01, 4.1395e-01, 9.2667e-01, 3.8039e-01, - 6.0454e-01, 7.5260e-01, 2.8980e-01, 4.0425e-01, - 1.1440e-01, 8.1706e-01, 1.6108e-01, 6.4378e-01, - 6.1024e-01, 7.3011e-01, 1.8247e-01, 7.2259e-01, - 8.5112e-01, 8.0473e-01, 6.4230e-01, 3.7048e-01, - 2.9053e-01, 9.6763e-01, 2.7425e-01, 5.6426e-01, - 9.8254e-01, 9.1592e-01, 2.0548e-01, 7.9210e-02, - 2.4414e-01, 6.0082e-02, 6.8170e-02, 3.3937e-01, - 3.4001e-01, 7.0646e-01, 6.9797e-01, 5.4520e-01, - 1.4137e-01, 9.9426e-01, 3.4954e-01, 9.9650e-01, - 2.7306e-01, 6.7992e-01, 1.2041e-01, 6.6471e-01, - 2.3938e-01, 3.1122e-01, 4.5743e-01, 1.4852e-01, - 6.9910e-01, 1.0113e-01, 7.9302e-01, 7.3545e-01, - 3.1563e-01, 1.4512e-01, 7.0647e-01, 8.7140e-01, - 9.1965e-01, 1.4186e-01, 2.8393e-02, 9.7708e-01, - 5.8513e-01, 9.3481e-01, 6.7558e-01, 8.9500e-01, - 9.0003e-02, 8.9775e-01, 8.4167e-01, 2.1818e-02, - 1.8609e-01, 5.8508e-01, 3.0947e-01, 6.0761e-01, - 3.6254e-01, 8.2974e-01, 9.9355e-01, 5.4378e-02, - 3.3947e-01, 7.5909e-02, 9.1135e-01, 1.0430e-02, - 8.7321e-02, 7.7744e-01, 1.9790e-01, 4.6172e-01, - 7.3318e-01, 5.0106e-02, 5.5228e-01, 2.9738e-01, - 3.3039e-01, 8.1165e-01, 9.8748e-01, 5.8300e-01, - 7.3523e-01, 5.2394e-02, 8.1360e-01, 7.4104e-01, - 5.7121e-01, 8.8552e-01, 8.8784e-01, 7.1876e-01, - 5.8136e-01, 3.9530e-01, 3.8115e-01, 2.1594e-01, - 8.0066e-01, 1.6795e-01, 3.6684e-01, 5.8371e-01, - 8.4887e-01, 9.4641e-02, 1.8288e-01, 1.5935e-01, - 7.1104e-02, 3.8324e-02, 1.4557e-02, 3.3003e-01, - 1.6650e-01, 9.7656e-01, 7.4992e-01, 4.5991e-01, - 5.0462e-01, 1.4413e-01, 3.0839e-01, 9.9849e-01, - 8.5491e-01, 1.4405e-01, 4.9426e-01, 2.4395e-01, - 3.7398e-01, 3.6176e-01, 5.4007e-01, 8.6105e-01, - 7.2086e-01, 4.6801e-01, 4.7844e-02, 7.7902e-03, - 7.7455e-01, 6.2883e-01, 7.0898e-03, 6.7850e-01, - 3.1714e-01, 4.7251e-01, 2.2450e-01, 9.0123e-01, - 4.9847e-02, 7.4752e-01, 1.2487e-01, 1.3041e-01, - 6.3166e-01, 4.9554e-02, 3.6541e-01, 1.5774e-03, - 3.3968e-01, 1.2319e-01, 5.3567e-01, 5.5351e-01, - 4.3618e-01, 4.2525e-01, 1.4155e-01, 7.3161e-01, - 6.2485e-01, 4.6709e-01, 1.4876e-01, 3.3483e-01, - 1.4932e-01, 7.7303e-01, 6.3207e-01, 1.2562e-01, - 1.3108e-01, 4.7283e-01, 1.9793e-01, 2.3952e-01, - 6.8768e-01, 1.8916e-01, 7.2788e-01, 6.7333e-01, - 7.0169e-01, 5.8876e-01, 3.3174e-01, 4.3460e-01, - 8.7772e-01, 1.0461e-01, 9.9017e-01, 3.6264e-01, - 4.6641e-01, 6.0481e-01, 7.8528e-01, 6.9758e-02, - 1.3203e-01, 3.3813e-01, 6.9587e-01, 7.9392e-01, - 6.3101e-01, 7.4237e-01, 2.3833e-01, 4.4577e-01, - 1.2353e-01, 4.9956e-02, 7.6642e-01, 9.1417e-01, - 7.7560e-01, 8.5817e-01, 2.4853e-01, 9.9014e-01, - 2.2743e-01, 7.3631e-01, 7.5091e-01, 8.2808e-01, - 2.7309e-01, 7.5095e-01, 1.2653e-01, 2.2765e-01, - 6.8022e-01, 3.8368e-01, 7.3961e-01, 5.6540e-01, - 3.7841e-01, 5.2990e-01, 7.2884e-01, 6.9300e-01, - 8.2386e-01, 6.6427e-01, 3.3618e-01, 9.9200e-01, - 6.2470e-01, 4.5833e-01, 2.7161e-01, 3.9472e-01, - 4.0491e-01, 3.9032e-01, 2.7888e-01, 8.7694e-01, - 5.1462e-01, 8.2352e-02, 7.7431e-01, 1.5667e-01, - 3.0758e-01, 3.9598e-01, 7.3692e-01, 8.8800e-01, - 6.3257e-01, 7.9905e-01, 1.9757e-01, 3.7481e-01, - 3.9508e-01, 5.0595e-02, 5.6589e-01, 1.7036e-01, - 7.5594e-01, 9.5069e-01, 9.4936e-01, 1.7508e-01, - 6.8394e-01, 5.3056e-01, 2.1254e-01, 8.4106e-01, - 9.9549e-01, 8.0636e-01, 8.5248e-01, 2.0504e-01, - 3.6483e-01, 6.5467e-01, 5.5258e-01, 2.2799e-01, - 1.0442e-01, 6.6789e-01, 2.6148e-01, 3.0575e-01, - 8.8862e-01, 6.8539e-01, 1.3239e-01, 2.2736e-01, - 5.3108e-01, 1.1433e-01, 7.5717e-01, 5.6507e-02, - 9.7215e-01, 8.1763e-01, 4.9474e-01, 6.6409e-01, - 9.8648e-02, 1.2658e-01, 3.6398e-02, 8.8992e-01, - 6.1677e-01, 6.2160e-01, 3.7350e-01, 2.7875e-01, - 9.8345e-01, 6.4174e-01, 9.2495e-02, 1.5332e-01, - 9.5105e-01, 2.3525e-01, 1.7796e-01, 8.5492e-01, - 5.4722e-01, 4.7154e-01, 6.7081e-01, 2.1261e-01, - 5.2561e-01, 2.3949e-01, 8.3256e-01, 5.8994e-02, - 5.5763e-01, 1.2102e-01, 5.5056e-01, 8.8141e-01, - 4.1634e-01, 4.8507e-01, 5.0130e-01, 5.5949e-01, - 2.4877e-01, 4.0674e-02, 5.8669e-01, 1.1644e-01, - 4.6171e-02, 4.1344e-01, 6.6066e-02, 6.6950e-01, - 3.5460e-01, 2.1220e-01, 9.2831e-01, 5.4601e-01, - 9.1261e-01, 6.8221e-01, 8.3089e-01, 9.7705e-01, - 5.0187e-01, 1.7175e-01, 6.5814e-01, 8.4571e-01, - 4.5138e-01, 1.2624e-01, 8.4142e-01, 9.7453e-01, - 6.0153e-01, 6.6171e-01, 3.4468e-01, 6.1526e-01, - 1.7076e-01, 5.8555e-01, 8.4470e-01, 5.9419e-01, - 3.2242e-01, 9.3308e-01, 6.6230e-01, 4.9998e-01, - 9.9167e-01, 6.4702e-01, 2.7641e-01, 7.3458e-01, - 2.0248e-01, 5.3550e-01, 6.0692e-01, 1.2875e-01, - 4.5059e-01, 6.2396e-01, 7.7281e-01, 1.8342e-01, - 2.6246e-01, 8.9389e-01, 4.2697e-01, 3.9648e-01, - 8.2698e-01, 5.4663e-01, 4.7922e-01, 3.0357e-03, - 3.8921e-01, 9.7708e-01, 2.7712e-01, 8.0466e-01, - 9.7230e-01, 3.2244e-01, 1.1846e-01, 7.2860e-01, - 6.0089e-01, 7.1125e-01, 5.7237e-02, 5.8015e-01, - 4.5166e-01, 1.9292e-01, 9.5705e-01, 1.5807e-01, - 8.6989e-01, 6.2471e-02, 2.9501e-02, 4.7247e-02, - 3.2791e-01, 9.3142e-01, 7.7352e-02, 5.7935e-01, - 3.5842e-01, 2.0744e-01, 7.7836e-01, 7.2019e-01, - 6.4434e-01, 8.5632e-01, 9.1666e-01, 8.9326e-02, - 4.9498e-01, 5.5369e-01, 3.2841e-02, 1.2522e-01, - 1.4327e-01, 2.5462e-01, 7.7178e-01, 6.6082e-01, - 9.0379e-01, 8.8557e-02, 9.4237e-01, 7.4741e-01, - 6.0992e-02, 5.0186e-02, 2.6914e-01, 7.0952e-01, - 2.5865e-01, 5.3132e-02, 2.6880e-01, 2.5089e-02, - 1.2712e-02, 9.2156e-01, 4.1951e-01, 1.7086e-01, - 7.9658e-01, 5.6186e-02, 2.2761e-01, 2.0472e-01, - 4.7993e-01, 5.6562e-01, 9.4194e-01, 8.2094e-01, - 6.5261e-01, 4.7094e-01, 3.8292e-02, 9.9779e-01, - 5.3916e-01, 2.4333e-01, 3.5925e-01, 7.0751e-01, - 5.6442e-01, 9.9979e-01, 2.4833e-01, 5.3667e-04, - 1.3273e-01, 8.0482e-01, 7.8942e-01, 3.3046e-01, - 5.3648e-01, 5.3137e-01, 9.0742e-01, 7.3540e-02, - 7.0616e-01, 5.8158e-01, 4.5235e-01, 6.1205e-02, - 1.1846e-01, 1.1400e-02, 9.2984e-01, 4.7889e-01, - 6.7780e-01, 3.4355e-01, 2.1503e-01, 2.8741e-01, - 5.2396e-01, 2.7170e-02, 8.6659e-01, 6.5162e-01, - 4.2836e-01, 6.4557e-01, 4.6836e-01, 3.9251e-01, - 6.3169e-02, 4.7721e-01, 4.5198e-01, 9.4583e-01, - 2.5670e-01, 8.6367e-01, 8.0301e-01, 5.6540e-01, - 1.2275e-01, 6.7538e-01, 7.8630e-01, 1.3301e-01, - 9.5714e-01, 8.1175e-04, 6.6861e-01, 7.4866e-01, - 9.6597e-02, 2.2323e-03, 2.1415e-01, 5.2451e-01, - 5.6467e-02, 5.1534e-01, 9.0306e-01, 2.6919e-01, - 1.8990e-01, 6.9750e-01, 3.6248e-01, 9.1221e-01, - 1.6548e-01, 7.4743e-01, 1.2102e-01, 3.4027e-02, - 6.4307e-01, 1.3740e-01, 9.6974e-01, 1.9220e-01, - 1.0194e-01, 6.4127e-01, 2.7749e-02, 2.8389e-01, - 5.4854e-01, 4.3292e-01, 1.1213e-01, 4.7669e-01, - 6.2121e-01, 2.8733e-01, 5.7691e-01, 4.1102e-01, - 8.3169e-01, 9.7701e-01, 4.8136e-01, 8.4949e-01, - 7.3493e-01, 3.0034e-01, 6.3975e-01, 7.2441e-01, - 6.6048e-02, 3.6236e-01, 4.8974e-01, 8.9768e-01, - 7.1274e-01, 3.4042e-01, 7.2374e-01, 5.2380e-01, - 9.3108e-01, 4.4617e-01, 1.0445e-01, 2.9912e-01, - 8.0795e-01, 8.4551e-01, 3.4207e-01, 5.8552e-01, - 4.1282e-01, 6.4154e-01, 9.3890e-01, 8.1135e-01, - 9.6445e-01, 8.1856e-01, 7.0867e-01, 1.1983e-01, - 1.5357e-02, 5.0553e-01, 5.2357e-01, 9.1204e-01, - 9.2089e-01, 8.6254e-01, 5.1382e-01, 7.8138e-01, - 7.7134e-01, 8.0806e-01, 4.0440e-01, 7.9203e-01, - 7.6575e-01, 5.1216e-01, 4.8368e-01, 3.8368e-01, - 7.7193e-01, 7.9025e-02, 9.9133e-01, 6.3104e-01, - 4.2101e-01, 7.5156e-01, 9.8417e-01, 2.0940e-01, - 2.1270e-01, 6.6799e-01, 8.9773e-01, 6.8893e-01, - 5.6065e-02, 7.2714e-01, 5.6206e-01, 8.5754e-01, - 4.8712e-01, 5.0712e-01, 3.5141e-01, 6.7838e-01, - 7.9974e-01, 8.9667e-01, 9.2216e-01, 5.5618e-01, - 5.1697e-01, 5.9385e-01, 4.2023e-01, 5.1671e-01, - 7.5104e-01, 6.0102e-01, 9.2901e-01, 6.6097e-01, - 2.0293e-01, 4.8836e-01, 1.0099e-02, 9.2603e-01, - 9.5249e-01, 8.6286e-01, 9.1517e-01, 3.6906e-01, - 9.6104e-01, 6.8823e-01, 9.8204e-01, 8.2582e-01]), + col_indices=tensor([2203, 8070, 6104, 2428, 4613, 3691, 9216, 6131, 986, + 1730, 6958, 2260, 4558, 2092, 2006, 9924, 3804, 2564, + 5756, 576, 814, 3989, 9107, 9017, 2713, 9863, 8001, + 9467, 7238, 1363, 9398, 4896, 8714, 9465, 4527, 6808, + 4270, 8132, 5071, 5387, 3531, 2819, 3588, 8860, 7711, + 4509, 4060, 8225, 9781, 1914, 9703, 2545, 3005, 3104, + 4703, 8485, 6067, 7353, 1027, 5410, 1587, 9191, 8130, + 8157, 1425, 6163, 9593, 3371, 7685, 6829, 4626, 8915, + 1311, 7290, 440, 3282, 7867, 22, 3899, 6800, 1514, + 595, 3471, 8537, 4459, 3417, 3245, 2994, 2279, 3612, + 5485, 4122, 7064, 2573, 859, 2863, 2677, 882, 3375, + 6097, 560, 5991, 5047, 8895, 9241, 4674, 9649, 6858, + 3246, 9585, 8205, 7288, 4121, 4129, 6302, 1407, 2683, + 4197, 6829, 2730, 4635, 6651, 3516, 2085, 6454, 3953, + 3127, 1809, 5559, 9738, 8898, 9261, 9515, 3314, 7768, + 794, 6834, 3227, 9446, 1230, 827, 1252, 220, 8489, + 855, 7889, 5405, 7748, 9778, 5951, 5522, 7387, 4027, + 3243, 2512, 3211, 5658, 9449, 7125, 3650, 4431, 1409, + 5023, 3664, 6578, 4422, 81, 2134, 2573, 2082, 4265, + 1, 1299, 2520, 9560, 369, 834, 6073, 607, 9635, + 3220, 8991, 3844, 3111, 79, 5873, 1469, 1414, 1669, + 2402, 7083, 8005, 8131, 3849, 8975, 1406, 1709, 9509, + 9322, 5478, 6392, 8167, 4964, 8012, 1247, 6947, 492, + 5502, 9258, 8435, 4381, 4384, 4616, 5035, 5332, 9216, + 4758, 223, 5905, 5295, 6281, 6325, 874, 4275, 1196, + 907, 8753, 7533, 2899, 3573, 8331, 4144, 6932, 4514, + 5706, 2018, 1635, 5997, 6880, 9687, 5733, 968, 3215, + 4789, 3046, 2766, 1354, 6560, 250, 4919, 3881, 4599, + 9929, 49, 5524, 3820, 6379, 6125, 7324, 7190, 1053, + 8572, 4649, 4497, 6964, 4702, 3371, 3519, 4705, 5770, + 7207, 3247, 8382, 5606, 5189, 7084, 2091, 851, 2970, + 4260, 7264, 3037, 7294, 2799, 2435, 994, 7786, 50, + 137, 3024, 4329, 8246, 3487, 9160, 4357, 6374, 4578, + 3084, 8512, 3805, 8783, 4545, 3075, 8998, 6150, 8002, + 9235, 8013, 1410, 6201, 7896, 1803, 1659, 2365, 7524, + 2540, 860, 2113, 3665, 220, 6885, 2996, 3121, 6717, + 3124, 3123, 2327, 5986, 7952, 6749, 8836, 1431, 1233, + 8546, 1284, 3995, 6295, 7141, 933, 5680, 698, 7824, + 3798, 2416, 833, 2206, 390, 4971, 3542, 1017, 4917, + 1705, 3232, 6117, 2366, 8495, 987, 9155, 7947, 4804, + 2278, 9809, 2782, 3734, 8724, 7913, 6403, 133, 2239, + 3641, 3889, 9871, 1811, 3463, 3717, 1593, 8115, 2235, + 8102, 5120, 4120, 3687, 2270, 4118, 9945, 2153, 8052, + 4067, 1046, 7678, 1145, 7144, 9857, 8603, 8656, 1825, + 6561, 3115, 4772, 4801, 1445, 6506, 7814, 2334, 669, + 7071, 8094, 8732, 9343, 2995, 4089, 3929, 4195, 3364, + 4826, 1046, 1965, 9119, 2221, 5697, 4306, 8392, 4000, + 1314, 7733, 222, 4923, 8727, 377, 7813, 2942, 6781, + 2779, 6680, 275, 1927, 578, 901, 4815, 7847, 4500, + 5895, 7430, 9148, 8737, 7766, 4137, 1921, 3545, 3372, + 6961, 5424, 1908, 7475, 978, 5922, 7184, 5876, 1595, + 1870, 4510, 1358, 7136, 6410, 4251, 6841, 8561, 8172, + 1896, 9607, 9709, 8921, 1183, 656, 8788, 942, 9442, + 7234, 4436, 1048, 9534, 4815, 2582, 3070, 9281, 203, + 5641, 6727, 2072, 9560, 39, 6723, 4687, 2794, 9220, + 177, 9385, 4212, 4876, 3680, 6089, 3597, 863, 1316, + 5476, 2167, 3459, 5624, 189, 3976, 8052, 2759, 3827, + 8852, 4077, 1047, 5939, 4584, 8043, 124, 6754, 8135, + 5064, 4280, 7450, 2649, 5581, 9046, 9374, 9717, 2516, + 7802, 7519, 6921, 1554, 6572, 1784, 9115, 2535, 4537, + 1672, 3052, 889, 8741, 6704, 7355, 4983, 8502, 8868, + 9650, 885, 4823, 436, 9214, 2460, 8284, 6491, 676, + 4008, 7147, 3384, 4842, 9737, 9603, 3241, 4443, 1705, + 9234, 443, 3961, 7764, 4999, 8724, 5854, 7791, 2685, + 6856, 7159, 7750, 3061, 3138, 153, 9827, 5145, 1256, + 2986, 2702, 2566, 7183, 1523, 9458, 4652, 9100, 467, + 7615, 1579, 8406, 9693, 6591, 7334, 5403, 5974, 5067, + 9785, 8874, 199, 8449, 2400, 6888, 3685, 9886, 7200, + 1332, 5963, 110, 4201, 2711, 4898, 740, 2675, 7214, + 7643, 9912, 5306, 9704, 7070, 5421, 1246, 848, 2476, + 2436, 1632, 2063, 9359, 7466, 5474, 7388, 670, 3441, + 7099, 2564, 7117, 3094, 8348, 5006, 5310, 8977, 9764, + 7476, 4698, 5674, 9514, 8801, 2326, 4772, 6895, 9024, + 7485, 59, 3242, 4738, 9347, 9411, 51, 128, 7669, + 2153, 9786, 7877, 6652, 500, 2831, 5627, 9814, 7421, + 3662, 7258, 6824, 2732, 779, 782, 1347, 1128, 3384, + 9653, 2064, 6895, 3777, 2192, 2350, 6464, 4053, 8115, + 715, 8676, 697, 9651, 8044, 6194, 8860, 2577, 5601, + 8500, 1536, 6432, 2853, 8362, 2559, 7948, 1547, 4483, + 124, 1248, 4441, 7765, 9200, 7275, 5824, 6575, 5666, + 7475, 8698, 524, 6309, 5746, 9469, 7136, 7881, 5591, + 4343, 4183, 6021, 8006, 8580, 2479, 8443, 5007, 9798, + 6901, 4183, 1862, 7458, 6952, 8784, 5289, 9179, 505, + 7718, 254, 4280, 6727, 899, 8563, 2051, 2526, 5757, + 4380, 9893, 5125, 7484, 7297, 9995, 8131, 215, 7074, + 7162, 2006, 4499, 4869, 3518, 2378, 4153, 6968, 1442, + 8757, 3057, 6701, 8852, 4010, 4108, 9282, 2784, 874, + 9790, 2747, 5860, 7947, 6373, 6086, 4578, 8630, 4151, + 6801, 6433, 7435, 2601, 5810, 5068, 9640, 2033, 3837, + 6373, 7479, 6685, 9978, 8435, 6793, 1755, 4535, 158, + 4180, 1458, 6643, 4030, 7015, 6456, 6425, 5795, 66, + 4025, 1892, 4234, 1818, 5326, 2573, 2047, 2380, 8013, + 6938, 6487, 1993, 4660, 1169, 7130, 4275, 6198, 8838, + 2966, 2990, 8290, 3481, 124, 5066, 725, 7696, 4989, + 606, 9009, 2894, 9478, 2578, 9312, 9050, 8788, 1973, + 3445, 6134, 7656, 4090, 7761, 5634, 762, 1647, 5677, + 593, 3370, 5429, 533, 1628, 9370, 9263, 8062, 4918, + 344, 3503, 9937, 4395, 9484, 2793, 388, 5323, 4811, + 6635, 2130, 5954, 8462, 6924, 7812, 5275, 7164, 8533, + 9766, 3406, 1290, 4866, 8350, 3852, 1536, 6847, 2750, + 5182, 414, 4463, 7754, 7042, 7946, 1638, 1936, 5714, + 8230, 7178, 1457, 2906, 3679, 3332, 9755, 3508, 9815, + 3722, 3280, 3392, 3, 4826, 8383, 2706, 1081, 9374, + 2696, 4302, 6134, 7001, 9622, 4154, 5304, 8119, 8707, + 6705, 6702, 9552, 218, 832, 5899, 6057, 4190, 9314, + 7211, 2465, 7158, 795, 6968, 2907, 6533, 9384, 4871, + 7063, 9105, 1997, 4143, 6738, 8938, 3326, 7281, 1919, + 6127, 4657, 717, 8817, 2622, 7951, 9908, 6193, 7461, + 464, 8858, 4626, 1061, 7945, 8814, 4776, 6101, 583, + 6964, 956, 3562, 8191, 266, 9137, 1584, 6212, 6132, + 879, 3947, 2479, 3185, 6697, 2396, 6654, 1304, 5344, + 5776]), + values=tensor([2.5644e-01, 4.1340e-01, 8.8889e-01, 1.9996e-01, + 4.1859e-02, 4.5272e-01, 7.2267e-01, 9.2313e-01, + 9.7517e-01, 7.7641e-01, 6.5399e-01, 8.9647e-01, + 1.7929e-01, 2.1581e-01, 5.3999e-01, 2.7286e-01, + 3.3424e-01, 6.1854e-01, 7.0136e-01, 9.5008e-01, + 4.9345e-02, 3.2535e-01, 1.8620e-01, 9.8061e-01, + 1.8253e-01, 2.0059e-01, 5.9405e-01, 3.2028e-01, + 2.1950e-01, 9.9764e-02, 2.7925e-01, 1.2994e-01, + 6.0075e-01, 3.7837e-01, 4.7839e-02, 8.9279e-01, + 8.9909e-01, 1.8536e-02, 3.2654e-01, 9.2951e-01, + 5.0888e-01, 6.3988e-01, 3.9143e-01, 6.6039e-01, + 3.8083e-01, 8.8251e-01, 2.6190e-01, 9.5779e-01, + 4.0488e-01, 3.0840e-01, 7.4333e-01, 4.2414e-01, + 1.3552e-01, 6.3083e-01, 4.7621e-01, 8.6985e-01, + 1.4839e-01, 1.0087e-01, 3.0747e-01, 3.6247e-01, + 6.8986e-01, 2.7555e-03, 2.8920e-01, 8.2021e-01, + 4.1169e-01, 3.4318e-01, 2.5672e-01, 8.2270e-01, + 1.1811e-01, 3.1785e-01, 7.1889e-01, 4.2142e-01, + 3.0678e-01, 7.3338e-01, 5.9492e-01, 7.2248e-01, + 5.5072e-01, 3.5848e-01, 1.9054e-01, 9.7639e-01, + 7.4231e-01, 7.7266e-01, 2.5707e-01, 5.9393e-01, + 2.6795e-01, 7.7478e-01, 6.9466e-01, 4.8878e-01, + 6.4934e-01, 7.5196e-01, 6.9616e-01, 2.4523e-01, + 6.2998e-01, 8.6084e-01, 4.4823e-01, 5.1248e-01, + 6.0152e-01, 9.6506e-01, 1.5632e-01, 5.6839e-01, + 5.8601e-01, 3.5900e-01, 4.6999e-01, 2.0880e-01, + 6.6149e-01, 3.9173e-01, 7.6911e-01, 4.0235e-01, + 3.9630e-01, 9.2252e-01, 4.9607e-01, 2.4618e-02, + 9.5154e-01, 8.1269e-01, 1.9962e-01, 8.8695e-01, + 1.6389e-02, 9.9990e-01, 5.5126e-01, 5.8829e-01, + 3.1227e-01, 9.1196e-01, 7.8596e-01, 7.8259e-01, + 4.5294e-02, 4.8300e-02, 1.2913e-01, 8.2020e-01, + 3.9272e-01, 2.1128e-01, 8.6470e-01, 5.2628e-01, + 3.2533e-01, 1.7183e-01, 9.7494e-01, 5.3183e-01, + 1.9788e-01, 3.0667e-01, 1.2459e-01, 9.5696e-01, + 7.1166e-02, 9.8344e-01, 9.4402e-01, 5.9193e-01, + 3.8362e-02, 5.8093e-01, 2.7557e-01, 3.8264e-01, + 4.8925e-01, 2.6829e-01, 2.9458e-01, 7.6507e-01, + 8.6870e-01, 1.0706e-01, 8.2619e-01, 5.1984e-01, + 9.0042e-01, 2.2765e-01, 1.4051e-01, 9.3497e-01, + 6.1117e-01, 4.5945e-01, 4.1776e-01, 6.4888e-02, + 1.7439e-01, 7.3618e-01, 7.2456e-01, 7.2943e-01, + 6.3655e-01, 1.4152e-01, 9.1240e-01, 4.9941e-01, + 8.3074e-02, 6.2482e-01, 3.1784e-01, 9.5719e-01, + 6.2235e-01, 5.8051e-01, 9.7046e-01, 3.8059e-01, + 7.2437e-02, 3.0079e-01, 5.6747e-01, 1.2020e-01, + 9.7281e-01, 3.0302e-01, 6.5420e-01, 9.3100e-01, + 2.9623e-01, 9.7796e-01, 6.4885e-01, 2.5046e-01, + 2.2500e-01, 5.1568e-01, 7.4939e-01, 6.9754e-01, + 2.4394e-02, 9.2791e-01, 6.2239e-01, 5.8880e-01, + 7.9577e-02, 6.7327e-01, 3.3226e-01, 7.1189e-01, + 5.6001e-01, 9.0082e-01, 7.1409e-01, 5.3241e-01, + 9.4765e-02, 4.5222e-01, 8.1863e-01, 2.6081e-01, + 6.5638e-01, 8.6753e-01, 9.3524e-01, 2.5574e-01, + 8.2265e-01, 9.2571e-01, 4.3655e-01, 1.6080e-01, + 6.6905e-01, 5.8232e-01, 2.7328e-01, 3.9513e-01, + 1.7389e-01, 3.4148e-01, 7.5659e-01, 8.8088e-01, + 8.3409e-01, 7.5722e-02, 2.0101e-01, 9.8377e-01, + 8.3191e-01, 9.4678e-01, 2.2290e-01, 4.9474e-01, + 2.4686e-01, 4.7861e-01, 5.3301e-02, 5.0420e-01, + 9.7119e-01, 9.6644e-01, 3.6103e-01, 8.9844e-01, + 1.4760e-01, 5.5127e-01, 4.1634e-01, 4.5118e-01, + 5.3946e-01, 9.3326e-03, 9.8142e-02, 6.4986e-01, + 4.0779e-01, 3.3042e-01, 9.3167e-01, 8.0811e-01, + 1.0668e-01, 7.3926e-01, 3.5195e-01, 1.8629e-01, + 9.7074e-01, 7.5546e-01, 5.5758e-01, 9.6364e-01, + 5.4358e-01, 1.6670e-01, 7.5163e-01, 8.1226e-01, + 6.1329e-01, 5.3328e-01, 9.6244e-01, 3.1540e-01, + 7.4758e-01, 3.0115e-01, 8.5486e-01, 7.3290e-02, + 9.8182e-01, 6.7740e-02, 9.5535e-01, 4.4933e-01, + 7.5007e-01, 3.6242e-01, 7.1763e-01, 2.6345e-01, + 4.6614e-01, 5.1043e-01, 9.3626e-01, 6.3715e-01, + 6.5932e-01, 2.7592e-01, 7.6444e-01, 6.0384e-01, + 7.8070e-01, 6.4287e-01, 4.5029e-01, 6.7633e-02, + 2.6546e-01, 6.0429e-01, 8.7571e-01, 1.9989e-01, + 8.8128e-01, 7.5303e-01, 9.9882e-01, 9.3341e-01, + 2.7616e-01, 4.8772e-01, 4.2568e-01, 7.3833e-01, + 7.7753e-01, 7.5636e-01, 8.3979e-01, 4.9876e-01, + 5.4339e-01, 6.1354e-02, 6.5538e-01, 8.6207e-01, + 2.8227e-02, 9.6964e-01, 9.2079e-01, 1.9484e-01, + 3.1203e-01, 1.6012e-01, 7.6943e-01, 6.5909e-01, + 6.5009e-01, 7.3086e-01, 6.2292e-01, 2.0275e-01, + 8.5049e-01, 4.8021e-01, 6.8391e-01, 8.2231e-01, + 6.5688e-01, 1.7624e-01, 2.9741e-01, 9.7814e-01, + 9.1427e-02, 7.6719e-01, 8.6114e-01, 2.3766e-03, + 4.8613e-01, 3.3859e-01, 7.3026e-01, 5.4892e-01, + 6.0711e-01, 3.1343e-02, 3.5572e-01, 2.7095e-01, + 4.5192e-01, 2.3350e-02, 2.0788e-01, 7.3319e-01, + 3.5718e-01, 8.3615e-01, 6.7478e-01, 2.1315e-01, + 5.8013e-01, 4.3802e-01, 7.8964e-01, 9.2608e-01, + 7.6007e-01, 8.7289e-01, 8.7554e-02, 7.4286e-01, + 8.6107e-01, 8.4040e-01, 6.1997e-01, 1.6524e-02, + 5.4761e-01, 1.9732e-01, 1.1518e-02, 1.7061e-01, + 8.1962e-01, 4.6316e-01, 1.8305e-02, 9.2708e-01, + 6.5729e-01, 4.8195e-01, 3.6484e-01, 6.7632e-01, + 6.5134e-01, 9.5068e-01, 1.0191e-01, 5.3523e-02, + 1.7437e-01, 9.4209e-01, 9.2081e-01, 6.7183e-02, + 4.5612e-01, 1.5881e-01, 2.6373e-01, 8.2337e-01, + 1.4498e-01, 4.5870e-01, 6.1453e-01, 5.0354e-01, + 7.5720e-01, 3.8739e-01, 7.3122e-01, 4.6802e-01, + 8.0048e-01, 4.9856e-01, 5.5596e-01, 7.2690e-01, + 3.5513e-01, 6.6211e-01, 9.6917e-01, 6.1725e-01, + 5.2916e-01, 8.4048e-01, 1.5088e-01, 7.7128e-01, + 1.9156e-01, 5.4016e-01, 5.7565e-01, 2.1934e-01, + 9.9619e-01, 8.1849e-04, 1.5205e-01, 5.7917e-01, + 8.4933e-01, 4.1169e-02, 7.4237e-01, 7.6301e-01, + 5.0152e-01, 8.0053e-01, 3.0855e-01, 1.0288e-01, + 2.5025e-01, 4.0389e-01, 7.0350e-01, 3.0858e-02, + 6.7947e-01, 8.4275e-01, 8.8528e-01, 3.6206e-01, + 5.9201e-02, 3.5994e-01, 2.0028e-01, 8.5520e-01, + 9.1181e-01, 3.9883e-01, 8.3835e-01, 4.3379e-01, + 2.6269e-01, 2.2338e-01, 3.8007e-01, 4.8995e-02, + 4.3151e-01, 5.4724e-01, 6.2329e-01, 7.1941e-01, + 3.5283e-01, 4.0846e-01, 4.0751e-02, 2.5253e-01, + 6.0487e-01, 3.9308e-01, 7.1244e-01, 1.3455e-01, + 6.4672e-01, 6.2530e-01, 5.3092e-01, 2.7846e-01, + 6.9308e-01, 1.4305e-01, 8.8981e-02, 7.4971e-01, + 7.6341e-01, 7.2273e-02, 1.8169e-01, 4.3213e-01, + 9.9740e-01, 3.9864e-02, 6.4685e-01, 2.8629e-01, + 7.2650e-02, 6.4796e-01, 2.3873e-01, 3.8527e-01, + 8.9744e-01, 3.7869e-01, 7.5353e-01, 2.6838e-01, + 2.9179e-01, 4.0297e-01, 2.2256e-01, 5.4265e-01, + 5.3049e-01, 8.5609e-01, 8.1699e-01, 6.7556e-01, + 4.5882e-03, 5.5027e-01, 3.6230e-01, 6.4707e-01, + 4.1756e-01, 9.2974e-01, 9.8266e-01, 7.8941e-01, + 4.2315e-03, 2.7125e-01, 6.0007e-01, 4.5247e-01, + 1.3985e-01, 4.4653e-01, 2.4965e-01, 7.8715e-01, + 4.5617e-02, 4.9657e-01, 9.0861e-01, 1.6663e-01, + 3.1933e-01, 2.2962e-01, 5.5708e-01, 1.0271e-02, + 8.4316e-01, 5.4472e-01, 7.3045e-01, 7.1526e-01, + 4.4407e-01, 6.9262e-01, 1.7777e-01, 4.1474e-01, + 4.1441e-01, 2.8247e-01, 8.2215e-01, 5.4061e-01, + 5.8557e-01, 1.2954e-01, 8.6512e-01, 3.7346e-01, + 9.7639e-01, 6.2218e-01, 6.4892e-01, 4.7474e-01, + 2.9746e-01, 7.8039e-02, 7.7508e-02, 3.9386e-02, + 8.9450e-02, 4.1531e-02, 3.3179e-01, 2.5929e-01, + 8.8076e-01, 4.5451e-01, 6.5253e-01, 6.2584e-01, + 2.1029e-01, 5.9882e-01, 6.7269e-01, 6.1554e-01, + 2.1835e-01, 1.3371e-01, 2.1393e-01, 3.2298e-01, + 6.2778e-01, 5.3887e-01, 8.5574e-01, 8.9290e-01, + 4.7363e-01, 5.0497e-01, 8.9594e-01, 8.7902e-01, + 3.8137e-01, 8.1184e-01, 8.8479e-01, 1.5249e-01, + 6.7019e-01, 6.9002e-01, 1.2700e-01, 6.0877e-01, + 1.0355e-01, 4.8291e-01, 6.0360e-01, 8.7069e-01, + 1.5130e-01, 1.8137e-01, 5.4702e-01, 4.7769e-01, + 2.4143e-01, 9.2093e-01, 4.0588e-01, 7.2948e-01, + 1.4843e-02, 2.6549e-01, 5.1936e-01, 6.5236e-02, + 4.8623e-01, 8.5154e-01, 9.1855e-01, 1.5040e-01, + 4.8009e-01, 6.0119e-01, 4.3431e-01, 5.5707e-01, + 4.0824e-02, 3.3741e-01, 8.5017e-01, 7.1405e-01, + 1.0416e-01, 7.9003e-02, 1.7283e-01, 3.4951e-01, + 4.9567e-01, 8.6852e-01, 8.4830e-01, 6.2344e-01, + 3.1355e-01, 9.9906e-01, 8.1478e-01, 9.5230e-01, + 1.6444e-01, 9.8398e-01, 6.6864e-01, 3.2892e-01, + 9.3508e-01, 6.5058e-01, 6.8678e-01, 5.5765e-01, + 7.3767e-01, 6.6605e-01, 2.1626e-01, 1.7300e-01, + 1.0184e-01, 2.7474e-01, 7.6279e-02, 3.4215e-01, + 5.9838e-01, 3.9154e-01, 7.0390e-01, 9.7144e-01, + 9.7173e-01, 2.4035e-02, 7.6844e-01, 5.2377e-01, + 8.2957e-01, 2.1413e-01, 5.4171e-01, 3.9619e-01, + 2.8764e-02, 4.1875e-02, 1.4137e-01, 5.0321e-01, + 1.8595e-01, 5.1899e-01, 8.3020e-01, 4.2297e-01, + 2.6879e-01, 6.9489e-01, 8.5420e-01, 5.4989e-01, + 2.3704e-02, 9.4223e-01, 3.4710e-01, 6.2781e-01, + 1.7831e-01, 6.3560e-01, 9.5015e-01, 4.8756e-01, + 4.9172e-01, 4.4354e-02, 8.0058e-01, 1.9326e-01, + 8.8187e-01, 4.5301e-01, 4.7846e-01, 4.6055e-01, + 6.9147e-01, 6.3841e-01, 5.5214e-02, 9.2087e-01, + 6.9432e-01, 7.5196e-01, 4.1958e-01, 6.8695e-01, + 2.6582e-01, 3.0347e-01, 9.1425e-01, 8.9327e-02, + 7.6973e-01, 7.6899e-02, 9.2230e-01, 3.1543e-01, + 4.4251e-01, 9.0630e-01, 6.7820e-01, 8.3933e-01, + 9.4875e-01, 1.8882e-01, 9.9541e-01, 7.1216e-01, + 8.7728e-01, 4.3430e-01, 5.3255e-01, 2.2379e-01, + 7.1012e-01, 7.4961e-01, 5.3055e-02, 4.8019e-01, + 4.3443e-01, 6.9804e-01, 2.4958e-01, 7.7851e-01, + 6.8751e-01, 8.9559e-01, 9.8910e-01, 5.4960e-01, + 6.7338e-01, 7.5675e-01, 3.9315e-01, 7.4544e-02, + 7.8019e-01, 1.6178e-01, 8.4115e-02, 1.0903e-01, + 7.6844e-01, 4.5180e-03, 5.9662e-01, 1.2902e-01, + 5.1261e-01, 1.4645e-01, 6.8718e-01, 7.0734e-01, + 2.4311e-01, 7.0618e-01, 8.5873e-01, 8.2221e-01, + 2.5583e-01, 4.2945e-01, 5.8827e-01, 8.1189e-01, + 4.3496e-01, 6.7312e-01, 3.8112e-01, 1.4863e-01, + 3.4030e-01, 3.5958e-01, 9.2091e-01, 9.9895e-01, + 5.1970e-01, 1.0638e-01, 5.3834e-01, 3.0344e-01, + 6.5353e-02, 6.3903e-02, 6.8394e-01, 7.1237e-01, + 7.2614e-01, 2.8107e-01, 9.4215e-01, 1.9339e-01, + 3.2766e-01, 6.6328e-01, 4.6436e-01, 7.2002e-01, + 3.7498e-01, 9.1908e-02, 2.8070e-02, 3.6489e-01, + 8.7565e-01, 8.9904e-01, 1.4724e-01, 1.8502e-01, + 6.2364e-01, 2.5974e-01, 9.4038e-01, 7.0849e-01, + 5.6427e-01, 4.7039e-01, 3.1376e-03, 2.8992e-01, + 8.4344e-01, 8.2341e-01, 3.0603e-01, 3.0914e-01, + 6.9608e-02, 4.2120e-01, 7.8318e-02, 6.2189e-01, + 1.1195e-01, 8.9439e-01, 9.9288e-01, 5.7460e-01, + 3.6105e-01, 9.9265e-01, 9.1951e-02, 2.6031e-01, + 9.2668e-01, 2.1713e-01, 7.0902e-01, 8.1788e-01, + 7.4976e-01, 4.7434e-01, 1.1619e-01, 1.2965e-01, + 6.0275e-01, 1.7535e-01, 3.6080e-01, 8.9028e-01, + 3.9348e-01, 1.5106e-01, 3.7276e-01, 5.8945e-01, + 2.9236e-01, 2.4562e-01, 3.0066e-01, 7.6287e-01, + 9.9890e-01, 5.1364e-01, 9.7297e-02, 2.6798e-01, + 5.1337e-02, 7.7922e-01, 9.0723e-01, 4.5419e-01, + 4.9457e-01, 1.6032e-01, 7.3101e-01, 5.3987e-01, + 7.2490e-01, 6.1795e-01, 7.4445e-01, 6.6125e-01, + 7.6080e-01, 4.2675e-01, 4.2235e-01, 1.7015e-01, + 7.2438e-01, 9.4198e-01, 4.6468e-01, 9.0142e-02, + 4.7565e-01, 2.3678e-01, 8.3100e-02, 9.5983e-01, + 8.9786e-01, 6.5702e-01, 6.2276e-01, 5.7525e-01, + 6.5825e-01, 9.4434e-01, 9.8952e-01, 2.5318e-01, + 4.7310e-01, 1.2816e-02, 7.7734e-01, 4.2004e-01, + 4.3964e-01, 1.3846e-02, 6.6626e-01, 3.4767e-01, + 6.3387e-01, 5.3276e-01, 7.5902e-01, 2.3507e-01, + 2.0220e-01, 5.1960e-01, 7.1115e-01, 3.4339e-01, + 7.4781e-01, 8.1715e-01, 8.2597e-01, 6.3771e-01, + 5.5599e-01, 2.8860e-01, 9.7519e-01, 1.7405e-01, + 2.4765e-01, 3.9022e-01, 7.0591e-01, 3.5410e-01, + 5.0492e-01, 7.7335e-01, 6.5399e-01, 2.5890e-01, + 1.7992e-01, 2.8937e-02, 3.9978e-01, 6.4485e-01, + 9.8804e-01, 2.6525e-01, 8.2352e-02, 4.5200e-01, + 3.7706e-01, 3.6888e-01, 4.6742e-01, 2.2734e-01, + 8.0517e-01, 5.3933e-01, 9.4715e-01, 9.0092e-01, + 1.7050e-01, 5.7693e-01, 8.3203e-01, 1.3006e-01, + 1.0404e-03, 1.5028e-01, 4.2248e-01, 2.9590e-01, + 1.7078e-01, 7.3796e-01, 7.4155e-01, 8.7998e-01, + 4.7809e-01, 7.3390e-02, 9.7648e-01, 5.1828e-01, + 1.7042e-01, 3.3970e-01, 9.5290e-01, 4.0381e-01, + 7.7718e-01, 3.9948e-01, 6.7108e-01, 4.2412e-01, + 3.3090e-01, 7.4833e-01, 7.3278e-01, 7.2853e-01, + 6.9950e-01, 8.1746e-01, 3.1753e-01, 1.0600e-01, + 4.4370e-01, 6.9105e-01, 8.7701e-01, 7.3030e-02, + 5.1421e-01, 2.8882e-01, 6.5366e-01, 2.3271e-01, + 7.2580e-01, 7.4486e-02, 8.4800e-02, 5.6108e-01, + 4.2216e-01, 7.9148e-01, 6.7937e-01, 3.8844e-01, + 5.1370e-01, 8.9399e-01, 7.3522e-01, 9.0791e-01, + 2.4611e-01, 4.7796e-01, 1.5963e-01, 9.2416e-01, + 1.1957e-01, 2.7998e-01, 5.6346e-01, 2.9692e-01, + 6.8561e-01, 4.5870e-03, 9.7147e-01, 7.5045e-01, + 3.6538e-01, 4.5510e-01, 5.6387e-01, 3.5971e-01, + 3.3289e-01, 6.0219e-01, 6.2316e-01, 4.1027e-01, + 8.5536e-01, 2.2988e-01, 2.9483e-01, 5.2633e-01, + 3.4637e-01, 8.8604e-01, 8.9721e-01, 8.2901e-02, + 9.3086e-01, 8.5600e-01, 7.4177e-01, 6.5607e-01, + 8.4765e-01, 8.9582e-01, 3.7468e-01, 5.4245e-01, + 6.8428e-01, 4.0500e-01, 5.1662e-01, 1.8751e-01, + 5.3619e-01, 2.3998e-01, 3.6662e-01, 3.1969e-01, + 4.1226e-01, 7.0012e-01, 3.1767e-01, 2.0586e-01, + 8.8678e-01, 4.7686e-01, 9.4057e-01, 6.6187e-01, + 6.3070e-01, 8.9669e-01, 3.3070e-01, 5.7151e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.4983, 0.3444, 0.7600, ..., 0.8482, 0.3104, 0.4836]) +tensor([0.6841, 0.8830, 0.8336, ..., 0.0293, 0.2899, 0.6914]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -754,378 +647,378 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 9.821086883544922 seconds +Time: 9.296976089477539 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 141816 -ss 10000 -sd 1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.164389848709106} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 142926 -ss 10000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.514646053314209} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([1810, 1856, 931, 2279, 4513, 5670, 4299, 1451, 4735, - 6521, 8634, 5493, 2604, 416, 4540, 2978, 6685, 9818, - 316, 767, 3433, 9310, 5118, 5536, 8136, 256, 5007, - 9151, 5614, 7335, 5950, 7216, 5695, 8824, 5574, 8028, - 9895, 2763, 3721, 6353, 4375, 393, 4695, 4114, 2940, - 9233, 9506, 5002, 9687, 9138, 8360, 7455, 1902, 6476, - 6018, 9078, 1607, 8332, 6637, 1057, 721, 3190, 9337, - 8872, 2095, 3714, 9220, 3100, 1647, 1733, 5119, 557, - 9473, 477, 8030, 5805, 9318, 3400, 7191, 1837, 2389, - 3821, 5362, 913, 1742, 7815, 3737, 6731, 1087, 8363, - 833, 8039, 7694, 5811, 4452, 6537, 8096, 9721, 7906, - 9466, 2451, 2361, 1224, 6931, 8635, 7881, 1988, 582, - 2422, 9410, 3064, 764, 9933, 6316, 5596, 8997, 8781, - 7963, 2462, 5618, 7101, 6035, 2718, 6507, 6397, 8696, - 1231, 5024, 7289, 7325, 4627, 7957, 8556, 9457, 8469, - 1043, 400, 9203, 2337, 6088, 6450, 5297, 9131, 397, - 4650, 9068, 3118, 9379, 183, 3638, 6795, 2673, 5425, - 3649, 2251, 386, 8750, 9749, 9815, 9857, 2491, 548, - 7332, 870, 4366, 7003, 5840, 4001, 7232, 493, 9185, - 1450, 4016, 5889, 9803, 8136, 3806, 4997, 2092, 5865, - 1283, 1288, 4177, 4695, 6708, 4768, 3634, 5753, 9458, - 4153, 5992, 9552, 362, 5136, 5952, 4357, 6044, 6531, - 7848, 3651, 1491, 3291, 6199, 3810, 9851, 4453, 573, - 5207, 5294, 8733, 2431, 4535, 2913, 2731, 1236, 8636, - 2221, 7544, 4435, 1981, 8442, 7395, 1867, 72, 1063, - 8996, 5199, 834, 4580, 906, 1579, 4963, 9505, 8722, - 759, 7337, 4330, 7322, 3927, 8749, 6201, 5819, 3314, - 1728, 979, 8398, 547, 1399, 9722, 7198, 6130, 1651, - 9224, 7134, 7178, 2300, 7252, 3738, 6753, 8700, 4027, - 4660, 1616, 5761, 9749, 8175, 9091, 5857, 850, 4133, - 6445, 9542, 5521, 3707, 501, 2454, 2733, 8580, 6779, - 4547, 3843, 9359, 9768, 9596, 5497, 2652, 1974, 934, - 2868, 643, 3680, 5565, 2740, 5926, 9245, 297, 5885, - 7332, 4786, 9103, 3064, 6549, 4286, 1165, 4952, 4434, - 6047, 6832, 8224, 2174, 9669, 5613, 92, 4381, 1935, - 8729, 8524, 3665, 9250, 881, 978, 7426, 214, 4381, - 9072, 7182, 9280, 9247, 3139, 4899, 5790, 8131, 5251, - 1945, 4451, 6820, 2480, 3728, 5141, 1006, 2084, 5510, - 7851, 3147, 280, 4766, 666, 3022, 5516, 62, 851, - 2617, 1208, 9039, 5500, 3894, 8214, 5791, 9173, 3794, - 9197, 8491, 3563, 9034, 6932, 2159, 6540, 6695, 5710, - 3701, 4010, 4892, 9604, 2787, 1559, 9207, 7948, 9910, - 7651, 9441, 2786, 5611, 7856, 780, 2409, 6611, 3028, - 2892, 4645, 7285, 6793, 2186, 7630, 8443, 4152, 9915, - 4301, 8517, 4370, 1937, 7168, 4699, 5667, 9090, 4697, - 7753, 3776, 2536, 2414, 849, 2750, 3568, 2331, 6088, - 4578, 2773, 5446, 8350, 58, 2640, 4319, 9666, 17, - 5678, 3420, 4634, 1241, 8722, 9054, 958, 8602, 7469, - 2692, 1737, 4604, 7250, 3682, 5105, 6794, 511, 200, - 2748, 5616, 2441, 8764, 2176, 2132, 2501, 6271, 7122, - 6825, 5910, 7394, 1413, 4046, 8556, 3307, 7999, 6117, - 136, 3329, 6583, 6397, 3117, 5562, 172, 7031, 3604, - 3778, 9349, 1380, 6106, 5738, 8078, 2759, 7054, 1000, - 9090, 9645, 4500, 2130, 6459, 4518, 8573, 7237, 8964, - 1058, 7216, 321, 4470, 8421, 3209, 1529, 1093, 5471, - 1648, 6187, 665, 1916, 3569, 8936, 4606, 7181, 1035, - 7655, 8639, 69, 6816, 473, 2761, 4406, 5451, 5659, - 2977, 7560, 8606, 9549, 5893, 7706, 8223, 5677, 1005, - 2082, 9957, 1028, 4827, 3540, 3354, 6748, 9990, 785, - 5380, 8309, 3006, 2550, 7674, 3028, 201, 5759, 6945, - 8400, 5908, 5641, 3104, 188, 8136, 2319, 4660, 3971, - 5048, 7804, 2519, 2680, 6795, 5335, 4837, 7700, 9088, - 2631, 1592, 1189, 2012, 8241, 5397, 5890, 301, 5350, - 8109, 4551, 7974, 3627, 3176, 9488, 5900, 2093, 6017, - 8155, 7066, 9159, 4950, 5821, 8787, 5297, 6212, 9674, - 5892, 9764, 4869, 6527, 3772, 9250, 3915, 7057, 2066, - 776, 2058, 536, 3836, 2276, 3119, 9771, 6981, 1557, - 3152, 7989, 4943, 908, 3893, 5867, 5751, 2670, 3300, - 1333, 456, 4978, 7244, 6587, 1413, 7491, 8838, 3245, - 2823, 4557, 2336, 9507, 6703, 5382, 5989, 8187, 4846, - 1379, 7329, 2895, 3755, 789, 4156, 7015, 2001, 7794, - 9724, 106, 6678, 1270, 870, 2287, 1171, 6963, 9681, - 5390, 2761, 8932, 9364, 62, 804, 3515, 7687, 2532, - 8090, 9047, 5660, 1726, 4838, 1887, 2558, 7861, 4629, - 2768, 6071, 1636, 2194, 3356, 323, 808, 5194, 4156, - 4843, 3619, 4136, 8024, 2003, 3759, 5309, 8654, 6344, - 8097, 7018, 700, 4124, 9984, 875, 4417, 277, 1439, - 4346, 1616, 8339, 8550, 235, 4963, 6446, 9405, 5699, - 7561, 820, 3489, 4243, 5511, 7457, 3168, 6565, 7884, - 8513, 7357, 155, 9743, 8038, 1450, 8771, 4684, 9678, - 1784, 756, 7286, 5655, 2184, 7686, 6558, 9894, 2199, - 6800, 1205, 214, 3125, 963, 5379, 2608, 8434, 9677, - 8270, 5214, 1299, 8433, 9372, 4124, 8888, 2011, 2113, - 6354, 4807, 8546, 2110, 5520, 384, 6426, 6575, 6564, - 1390, 2433, 1505, 2722, 88, 4635, 4428, 6839, 7408, - 8050, 5826, 8959, 7096, 3081, 3080, 7686, 1331, 8565, - 7693, 9348, 292, 6130, 8809, 6301, 7956, 1983, 1907, - 7186, 1820, 8616, 9283, 2455, 140, 6679, 5156, 1774, - 1712, 9243, 5561, 8196, 944, 8632, 2385, 3424, 9448, - 8082, 2162, 5310, 6544, 1094, 2786, 7082, 5011, 9221, - 3773, 8258, 4870, 3431, 6655, 7055, 1419, 339, 1377, - 8494, 1974, 4038, 7018, 9098, 3701, 7826, 2779, 8163, - 8543, 7815, 6923, 2804, 1690, 4707, 676, 7163, 8410, - 9359, 929, 1202, 2632, 8972, 1668, 6980, 7195, 1951, - 5754, 9735, 8716, 8066, 9339, 4502, 7781, 2962, 9864, - 2935, 2818, 1369, 8993, 3859, 4694, 5956, 5879, 8042, - 3649, 5653, 8964, 4085, 8658, 1438, 3412, 4666, 4822, - 7011, 6520, 1596, 2472, 2945, 5559, 9517, 6060, 1482, - 5581, 4794, 920, 1911, 8883, 1952, 2387, 3314, 7049, - 5952, 9066, 9847, 1846, 3233, 143, 1181, 7109, 1398, - 2396, 1676, 6911, 442, 3879, 8274, 5165, 8071, 8656, - 6961, 7229, 9025, 3470, 7036, 3654, 2383, 637, 1637, - 3509, 6294, 8342, 7867, 2855, 4117, 3596, 2252, 7724, - 1591, 2221, 9186, 877, 4114, 477, 6322, 2667, 7693, - 8444, 1638, 8721, 472, 1976, 9959, 6980, 3171, 4887, - 2038, 2562, 9386, 9361, 3719, 1289, 9961, 8417, 3223, - 4861, 2375, 8977, 6396, 9866, 3342, 8327, 5726, 4452, - 5350, 9941, 8254, 9351, 5780, 6640, 4731, 8661, 5704, - 2606, 7600, 4119, 1364, 333, 1408, 3800, 8150, 9778, - 5408, 2234, 9215, 76, 9479, 2233, 1268, 5179, 868, - 9805, 4840, 1357, 335, 7091, 2257, 1253, 3554, 2744, - 1420]), - values=tensor([1.2669e-01, 8.7393e-01, 8.1661e-01, 4.0787e-01, - 2.6982e-01, 7.1775e-01, 9.8153e-01, 1.5123e-01, - 1.7162e-01, 6.5592e-01, 5.3835e-01, 9.5599e-01, - 9.6014e-01, 7.6631e-01, 2.6202e-01, 6.6482e-01, - 5.4833e-01, 1.9027e-01, 4.5359e-01, 2.1409e-01, - 7.9491e-02, 6.8437e-01, 4.9517e-01, 4.4702e-01, - 2.7025e-01, 7.9112e-02, 3.0727e-01, 3.3256e-01, - 8.9558e-01, 5.9045e-01, 8.6208e-02, 2.3611e-01, - 6.3933e-01, 1.2532e-01, 7.7999e-01, 3.9650e-01, - 9.1520e-01, 2.8599e-01, 9.8139e-01, 3.0200e-01, - 4.1650e-02, 6.8925e-01, 7.1861e-01, 7.0927e-01, - 3.3385e-01, 4.3698e-01, 5.8834e-01, 2.0230e-01, - 7.3991e-01, 3.4908e-01, 6.5491e-01, 1.5128e-02, - 4.1229e-01, 7.1760e-01, 8.9114e-01, 3.2176e-01, - 7.5048e-01, 1.5471e-01, 4.8574e-01, 7.1400e-01, - 3.9874e-01, 4.9790e-02, 6.7377e-01, 5.2447e-01, - 9.5955e-01, 8.2373e-01, 2.7976e-01, 8.0827e-01, - 3.5445e-01, 8.7547e-01, 2.4819e-01, 5.7891e-01, - 1.7842e-01, 9.9633e-01, 2.0344e-01, 5.5228e-01, - 3.2819e-01, 9.4763e-01, 7.8439e-01, 1.3606e-01, - 7.7341e-01, 8.6243e-01, 2.3202e-01, 9.9464e-01, - 4.5853e-01, 1.8634e-01, 4.2896e-01, 4.9027e-01, - 8.9351e-01, 3.3378e-01, 4.0627e-01, 4.1185e-01, - 6.3928e-01, 9.6280e-01, 1.7920e-01, 2.0803e-01, - 5.5008e-02, 7.4580e-02, 9.4528e-01, 4.9813e-01, - 9.0883e-01, 7.1517e-01, 6.3946e-01, 1.7988e-01, - 4.8656e-02, 3.7757e-01, 5.0139e-01, 5.4126e-01, - 3.2256e-01, 8.7845e-01, 8.5856e-01, 5.2275e-01, - 1.0808e-01, 8.9153e-01, 7.1791e-01, 8.9698e-01, - 4.0713e-01, 4.4172e-01, 3.0770e-01, 7.3007e-01, - 4.4634e-01, 9.3715e-01, 2.8418e-01, 2.6231e-01, - 9.4816e-01, 1.0540e-02, 4.5361e-01, 8.4126e-01, - 8.5753e-01, 7.5409e-01, 7.0734e-01, 1.9791e-01, - 1.7291e-01, 6.6092e-01, 5.8409e-01, 7.5187e-01, - 3.5966e-01, 9.8459e-02, 8.7200e-01, 5.0893e-02, - 4.9743e-01, 3.0398e-02, 6.0787e-01, 6.1896e-01, - 5.4418e-01, 5.6134e-01, 7.5484e-01, 8.2147e-02, - 5.5628e-01, 9.8056e-01, 5.2700e-01, 3.7724e-01, - 2.5510e-01, 3.5734e-01, 6.0300e-01, 8.5641e-01, - 1.6978e-01, 3.1856e-01, 5.3176e-03, 4.6888e-01, - 2.5591e-01, 1.7626e-01, 5.1990e-01, 5.3558e-01, - 6.1758e-01, 6.4691e-01, 9.5777e-01, 7.0400e-02, - 7.5579e-01, 4.5386e-01, 5.5198e-02, 3.9878e-01, - 1.2664e-01, 7.6396e-01, 2.8470e-01, 4.9434e-01, - 3.0702e-01, 6.8774e-01, 2.7310e-01, 2.6016e-01, - 6.2440e-01, 9.5555e-01, 8.2495e-01, 3.3464e-01, - 1.5315e-01, 4.5941e-01, 1.5110e-01, 7.9281e-01, - 1.9727e-01, 2.9202e-01, 1.2270e-02, 4.1303e-01, - 3.1626e-01, 3.4680e-01, 8.2967e-01, 7.7487e-01, - 6.0472e-01, 4.2670e-01, 5.1644e-01, 3.3165e-01, - 7.3877e-01, 1.7731e-01, 6.0793e-01, 5.8154e-01, - 1.9342e-01, 5.4986e-01, 2.4134e-01, 6.4498e-01, - 8.7501e-01, 7.3630e-01, 4.6071e-01, 7.8664e-01, - 5.3966e-01, 6.2202e-01, 8.9457e-01, 4.1110e-01, - 9.9926e-01, 6.6711e-01, 3.9496e-01, 5.8353e-01, - 5.9854e-01, 9.8342e-01, 8.8288e-01, 4.7062e-01, - 6.0286e-01, 3.3123e-01, 4.2946e-01, 6.8966e-01, - 9.7649e-01, 6.3504e-01, 2.7886e-01, 1.7738e-01, - 9.1002e-01, 1.7273e-02, 8.5873e-01, 7.4755e-01, - 9.6283e-02, 8.8306e-01, 6.0624e-01, 5.1544e-01, - 8.9322e-01, 7.0187e-01, 6.5332e-01, 5.7367e-01, - 3.1488e-01, 7.5282e-01, 8.7148e-01, 6.3232e-02, - 9.4629e-01, 8.8039e-01, 3.3352e-01, 7.6588e-01, - 3.2974e-01, 4.2399e-01, 6.8394e-01, 7.5248e-01, - 2.5822e-01, 5.8647e-01, 7.3684e-01, 7.3199e-01, - 2.1290e-01, 1.3727e-01, 6.2815e-01, 2.1737e-02, - 8.5861e-01, 4.6935e-01, 1.6690e-02, 4.2346e-01, - 2.4551e-01, 6.5316e-01, 3.9505e-01, 1.4802e-01, - 5.9363e-01, 8.0643e-01, 9.8503e-01, 7.3261e-01, - 9.6675e-01, 8.7076e-01, 6.8400e-02, 3.5968e-01, - 3.5093e-01, 7.4507e-01, 3.7687e-01, 4.4083e-01, - 8.9130e-01, 7.7598e-01, 9.9040e-01, 9.3688e-01, - 2.0552e-01, 5.0793e-01, 5.9538e-01, 5.4962e-01, - 7.9509e-01, 1.3750e-01, 1.7177e-02, 6.7863e-01, - 7.3264e-01, 9.6215e-01, 8.1209e-01, 5.9917e-01, - 9.9758e-01, 1.0184e-02, 1.3805e-01, 4.0914e-01, - 4.4068e-01, 9.0888e-02, 1.2403e-01, 2.7014e-01, - 8.3471e-01, 6.6512e-01, 8.9411e-01, 2.7876e-01, - 3.9486e-01, 3.2953e-01, 7.6349e-01, 6.6362e-01, - 8.7638e-01, 7.0782e-01, 3.8536e-01, 3.2754e-02, - 1.7659e-01, 3.1028e-01, 4.0587e-01, 6.0771e-01, - 2.3400e-01, 1.3235e-01, 6.4474e-01, 8.6121e-01, - 4.0888e-01, 2.5669e-01, 8.1427e-02, 1.0228e-01, - 7.6945e-01, 8.4433e-01, 9.3924e-01, 6.2411e-01, - 4.8252e-02, 5.7566e-01, 6.5033e-01, 4.0115e-01, - 8.6214e-01, 7.1450e-01, 6.7169e-01, 8.1160e-01, - 2.4782e-01, 6.1569e-01, 1.4264e-01, 5.3882e-01, - 3.0175e-01, 2.1447e-01, 8.2910e-01, 7.1263e-01, - 4.1041e-01, 7.2068e-01, 8.4498e-01, 5.2096e-02, - 9.5888e-01, 7.9916e-02, 6.4003e-01, 6.0626e-01, - 3.3487e-01, 1.9126e-01, 8.2271e-01, 5.8379e-01, - 5.6910e-01, 6.6284e-02, 9.9075e-01, 9.6108e-01, - 8.1421e-01, 8.9408e-03, 4.7697e-01, 1.4288e-01, - 4.3262e-01, 1.2967e-01, 9.4186e-01, 9.9199e-01, - 4.3357e-01, 6.1622e-01, 5.7557e-01, 3.5405e-01, - 6.9167e-01, 8.5568e-01, 4.3381e-01, 8.4869e-01, - 2.1798e-01, 2.4886e-01, 2.0942e-01, 2.6149e-01, - 8.5653e-01, 1.5770e-01, 3.8634e-01, 4.9710e-01, - 7.4703e-01, 4.5228e-01, 3.0141e-01, 9.7591e-01, - 7.5096e-01, 8.1526e-01, 5.8201e-01, 1.4188e-01, - 4.6956e-02, 1.5967e-01, 9.8820e-01, 4.6653e-01, - 9.8081e-01, 2.8229e-01, 7.7183e-01, 1.6760e-01, - 6.5027e-01, 9.6884e-01, 1.9788e-01, 8.5793e-01, - 6.0644e-01, 4.0800e-01, 2.5730e-01, 3.9003e-01, - 1.7666e-01, 8.0375e-02, 3.4927e-01, 9.6081e-01, - 2.3696e-01, 8.5889e-01, 8.6890e-01, 2.1566e-01, - 6.7472e-01, 7.3409e-01, 3.6598e-01, 6.4398e-01, - 6.0834e-01, 2.2605e-01, 9.6430e-01, 3.1620e-01, - 4.2307e-01, 2.9137e-01, 3.7774e-02, 3.2640e-01, - 7.8393e-01, 1.2873e-01, 1.8067e-01, 3.4296e-01, - 5.8599e-01, 1.8457e-01, 2.8466e-01, 1.6284e-01, - 3.8473e-01, 3.7876e-01, 7.5374e-01, 2.2498e-01, - 1.0433e-01, 9.8092e-01, 5.2731e-01, 4.5361e-01, - 4.3056e-01, 5.9834e-02, 9.8079e-01, 5.3458e-01, - 7.7131e-02, 1.6104e-01, 2.2237e-01, 5.4361e-01, - 5.1495e-01, 4.2570e-01, 6.6160e-01, 5.8793e-01, - 3.6340e-01, 9.8347e-01, 2.7024e-01, 2.1522e-01, - 3.3250e-01, 7.9908e-01, 2.6481e-01, 3.5391e-01, - 8.3857e-01, 8.7019e-01, 8.7277e-01, 3.2002e-01, - 5.3592e-01, 1.0393e-01, 3.3628e-02, 1.4102e-01, - 3.8307e-01, 9.9468e-01, 2.2920e-01, 1.6939e-01, - 5.4000e-01, 6.7492e-01, 3.1163e-01, 9.0707e-01, - 1.4558e-01, 4.4107e-01, 9.1161e-01, 2.9585e-01, - 8.9864e-01, 5.7575e-01, 8.0033e-01, 1.8430e-01, - 2.6406e-01, 2.8590e-01, 4.5809e-01, 3.0343e-01, - 1.6994e-01, 7.0324e-01, 8.4510e-01, 6.1950e-01, - 6.1401e-01, 5.4919e-01, 2.5461e-01, 2.3646e-01, - 4.0097e-03, 5.3325e-01, 1.7385e-01, 1.9099e-01, - 7.6460e-01, 4.9279e-01, 2.3519e-01, 4.4248e-01, - 1.8583e-01, 7.0388e-01, 9.0522e-01, 6.9809e-01, - 4.8338e-01, 6.8957e-01, 7.8366e-01, 1.2459e-01, - 5.5699e-01, 4.4103e-01, 2.1030e-01, 9.5385e-01, - 9.1934e-01, 5.2540e-01, 5.4072e-01, 7.4035e-01, - 1.1919e-01, 8.7267e-01, 5.0507e-01, 1.3004e-01, - 6.5324e-01, 2.4884e-01, 6.1389e-01, 6.0188e-01, - 5.4507e-01, 5.1591e-01, 6.9854e-01, 3.3306e-01, - 1.0301e-01, 8.1510e-01, 3.2515e-01, 8.7804e-01, - 9.9064e-01, 5.8741e-01, 6.9486e-01, 2.9900e-01, - 8.9103e-01, 4.2348e-01, 6.3428e-01, 3.3684e-02, - 4.1931e-01, 2.9892e-01, 4.6109e-01, 2.7567e-01, - 9.3564e-01, 5.2654e-01, 1.0018e-01, 8.4790e-01, - 5.7012e-01, 5.4810e-01, 3.4658e-01, 7.4770e-01, - 9.4877e-01, 5.1453e-01, 8.3770e-01, 2.2628e-01, - 2.8297e-01, 9.3592e-01, 9.9443e-01, 7.3418e-01, - 7.7762e-02, 3.2115e-01, 2.8077e-01, 2.4932e-03, - 7.7722e-01, 9.3033e-02, 2.8306e-01, 9.0197e-01, - 3.8358e-01, 2.3377e-01, 2.8415e-01, 9.6665e-01, - 4.5076e-01, 5.8332e-01, 5.3551e-01, 1.4427e-01, - 9.7314e-01, 9.5072e-01, 9.6080e-01, 6.9839e-01, - 7.7003e-01, 2.9602e-02, 1.5625e-02, 9.6492e-01, - 5.5545e-01, 4.6314e-02, 4.0823e-01, 3.4010e-01, - 7.9200e-01, 3.6788e-01, 2.6052e-02, 8.1153e-01, - 9.0067e-01, 4.6863e-01, 6.3271e-01, 2.4893e-01, - 1.4957e-01, 7.2187e-01, 3.2709e-01, 9.9022e-02, - 6.5922e-01, 5.9406e-01, 3.3545e-01, 9.9067e-02, - 3.3824e-01, 9.2845e-01, 5.0255e-02, 2.2726e-01, - 3.4047e-01, 1.9861e-01, 1.4552e-01, 8.7986e-01, - 9.7482e-01, 4.1268e-01, 5.3799e-01, 5.9400e-01, - 7.4292e-01, 9.3075e-01, 3.9928e-01, 4.1655e-01, - 4.2639e-01, 7.7034e-01, 8.0540e-01, 3.1683e-01, - 2.3456e-01, 8.4488e-01, 3.7461e-01, 1.9023e-01, - 3.2398e-01, 6.2931e-01, 7.9000e-01, 2.9367e-01, - 6.8792e-01, 2.5211e-01, 6.9874e-01, 6.6391e-02, - 3.9713e-01, 6.5888e-01, 9.5501e-01, 5.4415e-01, - 5.5479e-01, 1.8311e-01, 3.5347e-01, 1.1535e-01, - 8.4727e-02, 3.7156e-02, 2.1737e-01, 3.9178e-01, - 4.3687e-01, 5.7922e-01, 7.1261e-01, 9.3706e-01, - 3.1458e-01, 5.7745e-01, 9.5299e-01, 5.8225e-01, - 6.4173e-02, 6.8551e-01, 9.5133e-01, 9.7228e-01, - 1.2820e-01, 1.3996e-01, 9.9997e-01, 9.2530e-01, - 8.5804e-01, 3.2017e-01, 1.4546e-01, 5.6185e-01, - 7.3055e-01, 5.3408e-01, 6.1432e-01, 2.1717e-01, - 8.6165e-01, 1.0379e-01, 6.0844e-01, 6.0791e-01, - 9.6595e-01, 3.1142e-01, 8.5742e-01, 1.0143e-01, - 2.0557e-01, 2.5530e-02, 5.2649e-01, 2.9851e-01, - 3.0730e-01, 3.2053e-01, 5.2734e-01, 8.8573e-01, - 4.1562e-01, 8.5644e-01, 1.1528e-01, 4.7387e-01, - 3.6684e-01, 6.5894e-01, 5.6719e-01, 6.4520e-01, - 2.5761e-01, 9.5989e-01, 7.8963e-01, 6.8385e-01, - 7.8112e-01, 6.4983e-01, 7.3846e-02, 9.9274e-01, - 7.8711e-01, 2.1382e-01, 6.0371e-01, 6.0743e-01, - 5.6459e-01, 6.4106e-02, 3.6125e-01, 8.0174e-01, - 8.1660e-01, 5.0011e-02, 3.1920e-01, 6.8889e-01, - 5.1944e-01, 7.9207e-01, 9.5144e-01, 3.0550e-01, - 8.9033e-01, 1.1908e-01, 4.7515e-01, 7.3463e-02, - 8.5474e-01, 7.5163e-01, 4.4201e-01, 8.9693e-01, - 8.0204e-01, 9.5888e-01, 3.0000e-01, 7.3653e-01, - 2.4941e-01, 9.0498e-01, 4.0852e-01, 9.1876e-01, - 3.2672e-01, 2.0116e-01, 9.6205e-01, 1.3782e-01, - 1.8651e-01, 4.1469e-01, 9.9287e-01, 3.3236e-01, - 8.4546e-01, 4.9708e-01, 6.1721e-01, 1.3183e-02, - 1.1176e-01, 7.7549e-01, 9.1833e-01, 8.2602e-01, - 8.8564e-01, 8.1683e-01, 9.4631e-01, 5.3871e-01, - 9.2475e-01, 7.5219e-01, 5.5456e-01, 5.8932e-01, - 6.0369e-01, 8.7860e-01, 7.8732e-01, 3.3553e-01, - 1.4227e-01, 7.2320e-02, 9.1130e-01, 2.7576e-01, - 1.9227e-01, 5.8261e-01, 5.7597e-01, 5.2942e-01, - 8.0195e-01, 7.5532e-01, 7.3289e-01, 7.3481e-01, - 1.5441e-01, 2.6307e-01, 3.8647e-01, 4.7675e-01, - 5.4830e-01, 3.3849e-01, 2.7906e-01, 2.0740e-02, - 5.7538e-01, 1.6988e-01, 5.3493e-01, 6.4440e-01, - 4.3749e-01, 1.6581e-01, 3.9710e-02, 2.9556e-01, - 2.7617e-01, 6.3054e-01, 4.6486e-01, 2.3433e-01, - 9.4185e-01, 2.6274e-01, 8.2593e-02, 9.5403e-01, - 7.0567e-01, 3.2809e-01, 6.7833e-01, 6.0174e-01, - 9.4228e-01, 8.9392e-01, 7.5028e-01, 5.3536e-01, - 8.1596e-01, 7.2865e-02, 7.6011e-02, 6.6139e-02, - 8.9616e-01, 3.0205e-01, 2.0969e-01, 7.2103e-01, - 1.4867e-01, 3.5570e-01, 9.6596e-02, 7.1176e-01, - 3.2679e-01, 6.5342e-04, 6.5438e-01, 3.4126e-01, - 4.7895e-01, 6.8964e-01, 6.8851e-01, 1.1205e-02, - 4.9141e-01, 3.0960e-01, 6.7523e-01, 3.4067e-01, - 6.0303e-01, 7.5867e-02, 5.9892e-01, 1.5932e-01, - 3.8413e-01, 7.0884e-01, 1.1522e-01, 8.3082e-01, - 9.2526e-01, 6.1693e-01, 1.2340e-01, 7.9432e-01, - 1.2029e-01, 1.5355e-01, 8.0700e-01, 1.8975e-01, - 2.8623e-01, 3.2111e-01, 7.9519e-01, 9.2948e-01, - 4.0853e-01, 4.7139e-02, 3.8241e-02, 1.6226e-01, - 1.4393e-01, 6.6874e-02, 9.4315e-01, 6.3129e-01, - 7.4798e-01, 4.7261e-03, 5.4673e-01, 8.9635e-02, - 8.8321e-02, 8.3158e-01, 5.7913e-01, 1.7580e-01, - 9.4173e-01, 5.0281e-01, 2.1334e-01, 2.4799e-01, - 8.1771e-01, 9.2260e-01, 3.2236e-01, 4.1922e-01, - 6.3245e-02, 2.7162e-01, 7.0913e-01, 4.4175e-01, - 8.4221e-01, 9.1147e-01, 5.9522e-01, 2.1069e-01, - 9.7237e-01, 4.6063e-01, 6.0893e-01, 9.1593e-01, - 5.5942e-01, 8.9949e-01, 1.5959e-01, 5.1028e-01, - 5.8067e-01, 5.0040e-01, 7.5637e-01, 5.0051e-01, - 1.3529e-01, 1.6889e-01, 3.7566e-01, 7.6014e-01, - 6.9943e-01, 4.3676e-02, 4.9197e-01, 7.5237e-01, - 5.2776e-01, 8.1006e-01, 4.0253e-01, 1.3341e-01, - 5.7251e-01, 8.5232e-01, 1.3314e-01, 4.7442e-01, - 1.0458e-01, 7.8215e-01, 8.6254e-01, 7.8172e-01, - 7.5658e-01, 3.7316e-01, 4.9245e-01, 2.0250e-01, - 2.9283e-01, 4.9418e-01, 8.3756e-01, 7.9447e-01, - 2.5860e-02, 8.6507e-01, 3.3105e-01, 7.4279e-01, - 6.9986e-01, 2.4197e-01, 8.4334e-01, 6.3321e-02, - 1.5450e-01, 3.6323e-01, 9.1984e-01, 2.5840e-01, - 6.8977e-01, 9.3757e-01, 3.4717e-01, 4.3387e-01, - 1.1786e-01, 9.0562e-01, 7.6327e-02, 9.9274e-01, - 9.3539e-01, 3.4387e-01, 7.6361e-01, 1.6568e-01, - 9.7903e-01, 2.7082e-01, 1.0756e-01, 7.6346e-01, - 7.1179e-01, 7.6705e-01, 6.9893e-01, 4.3779e-01, - 6.1936e-01, 1.2852e-01, 7.0319e-01, 4.1450e-01, - 9.3806e-01, 9.4660e-01, 5.1220e-01, 6.6328e-01, - 1.6919e-01, 1.5555e-01, 1.4095e-02, 5.4087e-01, - 1.9144e-01, 4.5731e-01, 6.9006e-01, 4.7231e-01, - 1.8945e-01, 9.9883e-01, 4.1069e-01, 2.3659e-01, - 9.8239e-01, 3.1741e-01, 7.2376e-01, 8.4620e-01, - 6.8221e-01, 4.3839e-01, 8.3264e-01, 3.0102e-01]), + col_indices=tensor([4116, 7192, 1414, 602, 9261, 9755, 3418, 3677, 1346, + 4915, 1923, 5999, 8929, 3632, 514, 7579, 9728, 993, + 4226, 2729, 2969, 7063, 8946, 3199, 2641, 3551, 3369, + 5419, 1831, 1652, 6779, 7428, 3773, 5376, 162, 579, + 7703, 6315, 199, 8043, 3670, 9337, 2098, 2118, 8554, + 2706, 4081, 7007, 1627, 5281, 2169, 7536, 2244, 9570, + 3079, 5784, 1151, 8783, 1389, 8630, 6457, 6608, 4618, + 9063, 5053, 6181, 9948, 5748, 552, 4335, 6638, 3245, + 5740, 6165, 6638, 3389, 4075, 7308, 3538, 1808, 7667, + 6538, 3469, 3661, 4798, 9461, 4545, 9042, 8936, 6823, + 3214, 2364, 8082, 6264, 8924, 2858, 8926, 6581, 6873, + 4238, 1490, 2662, 5578, 4356, 3367, 3328, 2236, 5544, + 9846, 3138, 7106, 9710, 3457, 1720, 9664, 7549, 5930, + 186, 2220, 5945, 7396, 6063, 9045, 7005, 4888, 8764, + 4115, 6721, 9214, 1263, 9026, 6945, 128, 3992, 9195, + 1474, 898, 6799, 6691, 3804, 4100, 3045, 4269, 7083, + 7634, 6955, 7626, 3398, 9853, 5960, 6034, 6094, 1156, + 179, 4425, 7472, 1643, 3634, 5583, 4054, 7495, 8640, + 7830, 4823, 8512, 2700, 1579, 955, 2712, 5607, 4060, + 5276, 5257, 5785, 8404, 113, 7405, 3608, 3433, 3557, + 6849, 1586, 7863, 4221, 9310, 989, 7433, 3566, 8151, + 6016, 311, 1505, 3718, 1327, 7984, 3830, 167, 458, + 1529, 4687, 5720, 3066, 2390, 9640, 22, 1812, 4977, + 5871, 208, 5078, 2553, 6038, 7129, 4310, 282, 7188, + 2629, 1264, 6276, 1900, 4410, 2767, 3442, 8825, 8207, + 9184, 696, 4991, 9290, 793, 7381, 5472, 9417, 843, + 7337, 5992, 4392, 6955, 6527, 4732, 1836, 2887, 3302, + 1246, 4380, 7016, 2283, 2360, 339, 4662, 7184, 8502, + 5643, 5095, 1512, 8433, 2939, 1784, 9937, 1906, 9377, + 4250, 5487, 5234, 4792, 8152, 7183, 2215, 7833, 3347, + 5213, 732, 5590, 2763, 4277, 7806, 3532, 1529, 9333, + 4668, 9843, 4597, 3627, 8649, 8703, 873, 4071, 3905, + 7625, 2889, 4631, 9159, 8732, 933, 5436, 4772, 8351, + 1495, 1952, 2186, 1108, 4949, 2677, 1933, 3185, 4340, + 5683, 5723, 8381, 9022, 2370, 9966, 5403, 9733, 2198, + 1637, 6841, 4735, 9117, 8560, 917, 3319, 1371, 5395, + 9003, 4729, 166, 4382, 5364, 8859, 5190, 7650, 5699, + 8419, 9352, 522, 3927, 2679, 4262, 9505, 590, 7467, + 8458, 9407, 3144, 2243, 9605, 8976, 1644, 2029, 7253, + 258, 2996, 9166, 6182, 3685, 448, 388, 9009, 1003, + 8225, 5809, 5225, 5068, 7864, 1148, 1681, 8631, 9776, + 2434, 9005, 570, 3704, 421, 1541, 2997, 1138, 443, + 8928, 4642, 2073, 2012, 8211, 4813, 5727, 2486, 2677, + 2787, 9420, 1109, 597, 365, 1642, 876, 5405, 9802, + 7033, 1434, 7253, 4790, 1231, 1111, 5807, 2216, 8736, + 4015, 4599, 8996, 3935, 942, 449, 9751, 9186, 7778, + 1387, 8588, 7679, 2605, 4391, 4798, 2000, 5117, 3414, + 1861, 49, 3127, 3681, 1232, 3277, 4285, 4110, 2170, + 1131, 3609, 305, 4308, 1078, 3550, 8680, 9527, 3657, + 5580, 3942, 7339, 4571, 6030, 3953, 2251, 4453, 7552, + 1958, 1501, 1056, 6071, 8794, 5497, 2466, 5840, 5755, + 4350, 6652, 9676, 7696, 5668, 5716, 6802, 2756, 2844, + 2076, 9916, 4254, 4247, 1549, 6255, 136, 9745, 4708, + 8246, 5741, 3283, 9856, 3884, 7212, 2451, 4506, 3332, + 2802, 546, 1107, 6908, 7485, 6240, 3041, 3085, 3258, + 8550, 9847, 4893, 2038, 9539, 625, 7440, 4196, 5182, + 3039, 1354, 4896, 920, 8631, 5555, 3241, 4037, 7927, + 7978, 2714, 4090, 1155, 2312, 8957, 8647, 1625, 2284, + 5120, 2713, 6756, 1447, 5297, 9292, 191, 9719, 512, + 3285, 1008, 1631, 2282, 1, 8384, 4959, 4082, 6943, + 5026, 1284, 2559, 7835, 8876, 7406, 9259, 6431, 2124, + 5249, 886, 3877, 9616, 7440, 7935, 2953, 4353, 8630, + 6873, 6450, 7974, 2907, 6853, 1731, 1734, 7915, 627, + 1848, 9651, 8072, 325, 9090, 3952, 2190, 6969, 5724, + 4737, 4971, 5930, 9065, 795, 2734, 7142, 1334, 1101, + 6976, 2804, 9022, 4770, 8850, 2321, 2915, 4870, 317, + 200, 1907, 4202, 8058, 468, 2452, 3052, 4111, 8780, + 2827, 6426, 5414, 9074, 2589, 6068, 209, 6819, 6948, + 7804, 7353, 5400, 8311, 624, 4794, 4250, 9150, 7304, + 7383, 244, 3241, 8673, 824, 8833, 5120, 347, 3945, + 9530, 4501, 8187, 5682, 9083, 4685, 9255, 3954, 6003, + 1181, 7405, 4909, 2035, 5243, 7589, 7209, 2811, 8638, + 6620, 998, 4062, 4859, 2372, 2608, 3632, 9769, 7028, + 4263, 7696, 6057, 9107, 4050, 223, 2225, 9966, 6759, + 8412, 3222, 9059, 9449, 3803, 5159, 6770, 4309, 803, + 3956, 6109, 2635, 8651, 9493, 6196, 8647, 3760, 7476, + 2703, 4996, 7071, 454, 6200, 1008, 6621, 5370, 4441, + 9279, 2513, 9418, 7493, 7526, 4237, 4698, 7251, 6897, + 2020, 7731, 9984, 3559, 4926, 9283, 2622, 1954, 1488, + 3911, 7884, 2789, 6162, 5229, 4168, 282, 389, 5316, + 1498, 4458, 3561, 8943, 9282, 8236, 2217, 2559, 1819, + 1350, 706, 7116, 9065, 8837, 4066, 7679, 6388, 3799, + 9827, 4686, 9282, 6788, 6841, 1373, 7434, 8130, 3964, + 8166, 3594, 3817, 6355, 3458, 2124, 3724, 9463, 6106, + 9252, 7691, 2416, 3575, 1616, 9144, 6977, 3531, 6725, + 6763, 1340, 5794, 8225, 5653, 9046, 9095, 7748, 6148, + 7023, 3605, 4097, 7017, 1954, 2458, 616, 3576, 4392, + 1508, 4668, 9858, 6665, 8198, 7943, 9561, 6812, 4402, + 7065, 911, 6654, 5206, 2290, 2647, 2858, 4004, 7033, + 1456, 4004, 5009, 7018, 441, 1434, 616, 9192, 4014, + 4224, 3305, 5685, 5038, 1879, 2513, 2975, 7217, 6734, + 4346, 2451, 4252, 1359, 1897, 1558, 1518, 8946, 1045, + 2687, 5779, 3825, 7875, 7903, 3483, 3272, 5342, 4112, + 6056, 4149, 9046, 6331, 5493, 5740, 4356, 9840, 2344, + 8131, 1983, 3832, 1085, 6587, 7944, 5354, 9757, 9494, + 6772, 4253, 3448, 1844, 3828, 8186, 561, 2149, 277, + 7639, 7736, 9923, 874, 4782, 7514, 9258, 25, 3290, + 9135, 821, 7578, 3691, 6954, 4378, 106, 3717, 4341, + 7878, 4081, 3382, 2833, 5381, 1369, 1198, 8238, 509, + 8127, 7888, 3507, 99, 1906, 6992, 375, 2939, 9900, + 3554, 9933, 4200, 9872, 7049, 4972, 7060, 9575, 558, + 4822, 5894, 5066, 5665, 2353, 8349, 8831, 6355, 6847, + 4671, 7517, 5761, 7668, 862, 525, 7306, 1582, 5763, + 7813, 8159, 6461, 9837, 487, 5618, 551, 6758, 6315, + 7722, 5312, 2191, 6095, 5151, 3304, 6873, 8430, 6063, + 9471, 4395, 9467, 7290, 1009, 2539, 6217, 7375, 2928, + 8340, 8978, 9118, 6252, 7166, 2776, 341, 5357, 7950, + 2900, 8133, 6787, 2966, 2483, 5175, 1262, 2317, 2466, + 7698, 6866, 558, 1985, 7535, 6044, 3673, 7654, 8927, + 6144, 8580, 3529, 9842, 2172, 4769, 2277, 2207, 1695, + 234, 4499, 3795, 4016, 695, 3395, 6723, 2800, 2814, + 7062]), + values=tensor([1.3767e-01, 7.1628e-01, 8.6135e-01, 7.1511e-01, + 9.5089e-01, 2.4757e-01, 9.3760e-03, 7.7773e-01, + 4.0669e-01, 9.1069e-01, 1.2601e-01, 5.4040e-01, + 3.7319e-02, 3.2172e-01, 9.1567e-01, 8.9101e-01, + 2.2712e-02, 6.3215e-01, 1.8351e-01, 5.7793e-01, + 6.9901e-01, 5.3678e-01, 4.7677e-01, 5.0165e-02, + 5.6649e-01, 6.3104e-01, 3.0293e-01, 7.4977e-01, + 7.3472e-02, 2.8601e-01, 5.5054e-01, 3.5579e-01, + 3.6622e-01, 6.5500e-01, 6.2994e-01, 3.6452e-01, + 2.0704e-01, 1.0247e-01, 2.5826e-01, 5.2540e-02, + 7.3130e-02, 6.3376e-01, 5.9810e-01, 4.9090e-01, + 6.8296e-02, 5.3917e-01, 1.1966e-01, 1.8500e-01, + 3.0815e-01, 5.8388e-01, 4.2982e-01, 5.6405e-02, + 4.8100e-01, 2.6901e-01, 3.0648e-03, 7.4291e-01, + 1.2739e-01, 3.2902e-01, 7.3718e-01, 7.4258e-01, + 9.6792e-01, 2.1717e-01, 2.0890e-01, 5.2258e-01, + 6.8834e-01, 3.6326e-01, 6.9665e-01, 6.8825e-01, + 6.2143e-01, 6.2623e-01, 6.1424e-01, 7.0805e-01, + 4.0377e-01, 5.0286e-01, 3.1606e-01, 1.0216e-01, + 8.7212e-01, 2.5393e-01, 4.3722e-01, 4.5099e-01, + 6.2296e-01, 6.7718e-01, 1.1725e-01, 7.5117e-01, + 6.1649e-01, 2.0645e-01, 1.5527e-01, 5.8584e-01, + 6.5559e-01, 9.7418e-01, 5.7488e-01, 4.5016e-01, + 1.0108e-02, 7.8700e-01, 8.5952e-01, 3.6767e-01, + 6.6708e-01, 3.9792e-01, 2.1940e-01, 4.4322e-01, + 7.0303e-01, 3.4139e-01, 3.9259e-01, 8.8211e-01, + 8.8687e-01, 7.0191e-01, 4.6733e-01, 4.5169e-01, + 1.3884e-01, 7.2389e-01, 1.0696e-01, 1.9507e-01, + 5.4738e-01, 6.7276e-01, 9.7900e-02, 5.0607e-01, + 6.0787e-01, 3.7552e-01, 6.9448e-01, 9.9517e-01, + 6.9703e-01, 4.5115e-01, 2.7976e-01, 3.3528e-01, + 6.2057e-01, 8.6211e-01, 7.0540e-01, 7.0073e-01, + 7.5910e-01, 7.0805e-01, 6.8043e-01, 9.2129e-01, + 3.8606e-02, 8.1173e-01, 6.8589e-01, 8.8272e-01, + 8.5540e-01, 8.5966e-01, 2.5725e-01, 6.2432e-02, + 2.0565e-01, 7.6464e-01, 6.2149e-01, 7.0689e-01, + 9.6943e-01, 9.1375e-01, 6.9609e-01, 1.0540e-01, + 9.0816e-01, 1.5889e-01, 5.7258e-01, 5.5242e-01, + 4.2908e-01, 4.4953e-01, 6.5806e-01, 8.4537e-01, + 8.1500e-02, 5.0051e-01, 6.5699e-01, 1.6438e-01, + 1.5049e-02, 4.9899e-01, 3.9120e-01, 3.2732e-01, + 3.8565e-01, 8.2901e-01, 4.0085e-01, 6.0713e-01, + 7.0000e-01, 3.0412e-01, 6.0281e-01, 2.5542e-02, + 4.3786e-02, 1.8718e-01, 8.7005e-01, 9.4782e-01, + 5.9295e-01, 7.8689e-03, 8.5626e-01, 6.4375e-01, + 3.2275e-01, 4.2197e-01, 3.6112e-01, 6.2448e-02, + 9.7853e-01, 8.6224e-01, 7.4828e-01, 5.8071e-01, + 8.7233e-01, 5.0843e-01, 2.8581e-01, 6.4133e-01, + 7.0918e-01, 9.5194e-01, 6.1273e-01, 8.5849e-01, + 4.9317e-01, 5.7639e-02, 9.9630e-01, 3.7211e-01, + 3.4682e-01, 1.1154e-01, 2.5831e-03, 6.3374e-01, + 6.6874e-01, 3.9360e-01, 3.5838e-01, 8.5471e-01, + 7.0322e-01, 8.2311e-01, 5.4150e-01, 5.7026e-01, + 6.5821e-01, 1.3975e-01, 7.7260e-02, 1.1189e-01, + 5.2080e-01, 1.2715e-02, 6.9032e-01, 7.5638e-01, + 2.0518e-01, 5.5927e-01, 8.3030e-02, 5.5062e-01, + 3.2127e-01, 1.0789e-01, 9.1260e-01, 6.4350e-01, + 9.9466e-01, 5.5812e-01, 2.5897e-01, 1.4537e-01, + 8.8427e-01, 2.4431e-01, 5.3467e-01, 1.1437e-01, + 2.2271e-01, 3.6377e-01, 5.0688e-01, 4.0210e-01, + 9.6214e-02, 7.4145e-01, 4.1409e-01, 2.7186e-01, + 6.8016e-01, 3.4039e-01, 5.5569e-01, 9.7247e-02, + 6.0054e-01, 9.7719e-01, 1.3649e-01, 9.9753e-01, + 8.3259e-01, 6.4755e-01, 3.9749e-01, 9.0807e-01, + 2.1147e-01, 5.0004e-01, 5.3220e-02, 3.1762e-01, + 3.3833e-01, 7.7938e-01, 6.2849e-01, 4.7457e-01, + 1.7438e-02, 8.1308e-01, 2.8307e-01, 6.7284e-01, + 5.1861e-01, 3.3344e-01, 9.1361e-01, 6.0753e-01, + 7.0133e-01, 3.6039e-01, 5.5368e-02, 9.3108e-01, + 3.5189e-01, 9.7164e-01, 3.9002e-01, 5.2678e-01, + 2.8953e-03, 9.8500e-01, 7.3340e-01, 9.2830e-01, + 4.0230e-01, 7.7052e-01, 9.5800e-02, 3.1615e-01, + 5.9244e-01, 9.0208e-01, 1.2767e-01, 1.5811e-01, + 8.0315e-01, 6.2049e-01, 3.9867e-01, 5.8587e-01, + 5.4263e-01, 5.8712e-01, 6.4417e-01, 2.5094e-01, + 5.7318e-03, 4.2910e-01, 4.9714e-01, 1.2439e-01, + 6.8645e-01, 2.7117e-01, 9.4144e-01, 1.0741e-01, + 6.0964e-01, 4.9124e-01, 1.2215e-01, 6.2184e-01, + 9.5970e-01, 1.4042e-01, 3.8926e-01, 2.5587e-01, + 2.3181e-01, 1.5447e-01, 2.6754e-01, 2.8396e-01, + 4.9732e-01, 7.7233e-01, 7.2055e-01, 2.6804e-01, + 5.2721e-01, 2.8004e-02, 4.0875e-01, 1.0355e-01, + 3.8021e-01, 1.6312e-01, 9.3351e-01, 4.0357e-01, + 9.6564e-01, 9.5352e-01, 2.3033e-01, 1.8023e-01, + 8.9340e-01, 3.6181e-01, 5.5393e-01, 3.3373e-01, + 6.1869e-01, 1.4247e-01, 3.8655e-02, 1.5970e-01, + 1.5518e-01, 8.9244e-01, 2.1298e-01, 7.1235e-01, + 5.5568e-01, 5.8432e-01, 1.9164e-01, 2.3690e-01, + 9.5455e-01, 7.2857e-01, 3.7214e-01, 5.5532e-01, + 8.8376e-01, 5.9028e-01, 2.0300e-01, 6.3695e-01, + 3.3213e-01, 9.5342e-02, 7.7225e-01, 6.6953e-02, + 1.6999e-01, 9.4302e-01, 2.6314e-01, 3.3727e-01, + 2.7787e-01, 8.0225e-01, 6.5075e-01, 7.4035e-01, + 5.3106e-01, 5.4790e-01, 3.1026e-01, 2.2004e-02, + 9.0580e-01, 7.6487e-01, 5.6201e-01, 1.1201e-01, + 1.5233e-02, 3.8437e-01, 6.1598e-02, 3.1117e-01, + 3.2700e-01, 8.9412e-01, 3.9616e-01, 5.3201e-01, + 6.4656e-01, 2.1046e-01, 3.6325e-01, 9.2194e-01, + 6.3959e-01, 3.9911e-01, 4.4881e-01, 1.0553e-01, + 2.5978e-01, 1.8512e-01, 3.9395e-01, 5.4669e-02, + 6.8945e-01, 8.0965e-01, 3.2951e-01, 5.4811e-01, + 3.0211e-02, 6.8181e-01, 3.2372e-01, 9.3662e-01, + 6.8499e-01, 6.4111e-01, 5.8163e-01, 3.5869e-01, + 2.8411e-01, 4.3117e-01, 1.3450e-01, 4.8766e-01, + 7.0478e-01, 5.3516e-01, 6.7174e-01, 8.0094e-01, + 8.1062e-01, 2.7839e-02, 8.6884e-01, 7.9628e-01, + 1.2090e-01, 9.4127e-02, 2.6206e-01, 7.1621e-02, + 9.6549e-01, 9.9012e-02, 8.1580e-01, 6.2717e-01, + 3.7858e-02, 6.6910e-01, 1.1367e-01, 4.4587e-01, + 2.1667e-01, 6.2027e-01, 6.0694e-01, 4.8314e-01, + 8.9715e-01, 1.2495e-01, 4.0892e-01, 5.4664e-01, + 2.0868e-01, 3.7281e-01, 7.2776e-01, 3.9380e-01, + 7.7841e-01, 7.2770e-01, 5.5081e-01, 4.5550e-01, + 1.1337e-01, 6.0070e-01, 1.7006e-01, 8.9155e-01, + 3.4892e-04, 8.9357e-01, 9.6087e-01, 3.6593e-02, + 5.8737e-01, 3.6900e-02, 7.9887e-01, 6.1905e-01, + 9.3419e-01, 1.0042e-01, 7.9304e-01, 5.8615e-01, + 7.2710e-01, 5.8139e-01, 2.5740e-02, 1.1614e-01, + 8.0492e-01, 1.9472e-01, 6.4306e-01, 7.9291e-01, + 8.8424e-01, 2.4042e-01, 6.6614e-01, 9.9409e-01, + 3.9403e-01, 4.6506e-01, 4.8211e-01, 5.6363e-01, + 6.0078e-01, 8.3139e-02, 7.8203e-01, 4.7756e-01, + 8.5709e-01, 5.7873e-01, 7.6714e-02, 3.9206e-01, + 6.8500e-01, 9.6940e-01, 2.5652e-01, 8.8082e-01, + 3.7856e-01, 9.2438e-01, 4.7830e-01, 1.4020e-01, + 5.2884e-01, 3.1246e-01, 6.5357e-01, 3.5125e-01, + 8.1007e-01, 7.6288e-01, 4.0573e-01, 2.6397e-01, + 7.7222e-01, 4.6844e-01, 6.6931e-01, 3.7442e-02, + 7.3587e-01, 1.7894e-01, 1.9269e-01, 5.3208e-02, + 1.7345e-01, 3.2373e-01, 8.6054e-01, 9.6058e-01, + 3.3684e-01, 1.2915e-01, 3.7259e-01, 8.5330e-02, + 8.7195e-01, 4.1642e-01, 1.4156e-02, 5.6366e-01, + 2.0687e-02, 4.5670e-01, 9.4825e-01, 4.6193e-01, + 9.6608e-01, 7.2678e-01, 1.0354e-01, 4.0029e-01, + 8.7647e-01, 7.6570e-01, 2.0229e-01, 2.0324e-01, + 5.5069e-01, 9.8105e-01, 3.4961e-01, 9.6117e-01, + 1.2299e-01, 8.9272e-01, 8.4714e-01, 8.7303e-01, + 8.8032e-01, 6.5685e-01, 5.8322e-01, 5.8733e-01, + 2.6798e-01, 6.5585e-01, 4.5400e-01, 5.2309e-03, + 9.9176e-01, 9.0773e-01, 9.4229e-01, 8.4914e-01, + 1.9365e-01, 5.8394e-01, 9.0261e-01, 5.1355e-01, + 4.1587e-01, 2.4801e-01, 2.7669e-01, 5.5044e-01, + 2.4672e-01, 4.0436e-01, 2.4099e-01, 5.0330e-01, + 9.5869e-01, 7.6414e-01, 1.2318e-01, 8.6587e-01, + 7.4214e-01, 6.6104e-03, 1.1094e-01, 4.0845e-01, + 1.1767e-01, 9.0883e-01, 7.2050e-01, 9.7280e-01, + 9.8324e-01, 1.3704e-01, 4.0185e-01, 3.4497e-01, + 7.4353e-01, 9.0031e-01, 2.0494e-01, 2.5380e-01, + 5.2888e-01, 2.1441e-02, 4.0249e-01, 5.3610e-01, + 5.2276e-01, 3.3112e-01, 2.3875e-02, 9.2410e-01, + 5.6142e-02, 8.6019e-01, 3.4566e-01, 5.7521e-01, + 3.8381e-01, 2.0229e-01, 5.4861e-01, 2.1478e-01, + 3.3400e-01, 2.2956e-01, 3.0797e-01, 5.1339e-01, + 4.1802e-01, 1.9688e-01, 6.2325e-01, 3.2738e-01, + 4.8917e-01, 3.2538e-01, 5.1070e-01, 7.4161e-01, + 7.0588e-01, 3.9899e-01, 5.2221e-01, 1.5384e-01, + 3.5179e-01, 4.8115e-01, 2.7363e-01, 2.8002e-01, + 4.2415e-01, 2.1545e-01, 7.6635e-01, 4.4319e-01, + 1.0434e-01, 5.3882e-01, 1.7751e-01, 9.7206e-01, + 5.0674e-01, 3.4560e-01, 3.2579e-01, 5.4960e-01, + 3.2111e-01, 9.7020e-01, 8.9102e-01, 3.3955e-01, + 7.4355e-01, 1.1853e-01, 7.3761e-01, 5.9870e-01, + 6.3084e-01, 5.2364e-01, 6.9109e-01, 9.7319e-01, + 4.4327e-01, 6.7270e-01, 5.0866e-01, 5.8518e-02, + 8.6936e-01, 2.0647e-01, 7.9599e-01, 8.3871e-01, + 8.4882e-01, 4.8923e-01, 9.0385e-01, 5.4210e-01, + 8.2097e-02, 4.4715e-01, 3.9892e-02, 9.7745e-01, + 2.1047e-02, 7.0280e-01, 1.6161e-01, 5.8636e-01, + 5.1251e-02, 3.7167e-01, 6.3274e-01, 6.2298e-02, + 7.4293e-01, 7.9914e-01, 8.7203e-01, 9.6482e-01, + 5.6499e-01, 3.8671e-01, 7.0059e-01, 7.3199e-01, + 9.2567e-01, 7.3812e-01, 1.6861e-01, 7.4046e-01, + 8.1607e-01, 4.6992e-01, 7.2711e-01, 9.1487e-01, + 4.1156e-01, 5.1202e-01, 3.5986e-01, 3.3062e-01, + 2.9725e-01, 1.6921e-01, 5.4909e-01, 1.2069e-01, + 3.5195e-01, 7.6383e-01, 7.4288e-01, 2.4758e-01, + 3.8444e-01, 5.8266e-02, 2.4042e-01, 3.0919e-01, + 1.7204e-01, 6.1885e-01, 2.9065e-01, 1.3719e-02, + 4.0827e-01, 1.3647e-01, 1.4959e-02, 9.9494e-01, + 5.1923e-01, 5.6432e-01, 9.4876e-01, 5.7473e-01, + 9.7697e-01, 1.8048e-02, 5.6856e-01, 1.5324e-01, + 1.0664e-01, 5.3795e-01, 7.1911e-01, 1.5088e-01, + 6.3302e-01, 1.3568e-02, 5.4872e-01, 9.1526e-01, + 9.6402e-01, 6.2534e-01, 7.4570e-01, 5.7812e-01, + 3.7302e-01, 2.3742e-01, 4.2409e-02, 4.5568e-01, + 5.1109e-01, 4.5050e-01, 6.3292e-01, 8.9476e-01, + 7.5507e-01, 6.3026e-01, 4.1196e-01, 4.5580e-01, + 5.6006e-01, 6.5274e-01, 9.8682e-01, 3.8258e-01, + 9.0774e-01, 5.1524e-01, 3.1276e-01, 8.3507e-01, + 3.6866e-01, 8.7038e-01, 1.8687e-01, 8.7445e-01, + 3.4415e-01, 2.8866e-01, 2.8929e-01, 1.2231e-01, + 9.4505e-01, 4.2536e-01, 6.2981e-01, 4.8049e-01, + 2.0686e-01, 1.7140e-01, 8.2123e-01, 5.0642e-01, + 9.1534e-01, 5.8595e-01, 7.0950e-01, 3.9363e-01, + 1.4897e-01, 8.5593e-01, 3.9573e-01, 4.2217e-01, + 1.3321e-01, 8.2831e-01, 3.5401e-01, 8.5123e-01, + 9.7129e-01, 3.5101e-01, 2.9517e-01, 7.8648e-01, + 1.9865e-01, 7.6650e-01, 8.6624e-01, 6.6334e-01, + 5.3911e-02, 2.1179e-01, 7.9277e-01, 3.3075e-01, + 9.7451e-02, 2.2374e-01, 6.0424e-01, 3.4120e-01, + 1.6678e-01, 7.3058e-01, 1.4131e-01, 3.2380e-01, + 8.0025e-02, 8.7760e-02, 2.1083e-01, 2.1844e-01, + 6.6954e-02, 4.9328e-01, 7.2243e-01, 8.3534e-01, + 9.9053e-01, 2.3837e-01, 1.9357e-01, 5.5049e-01, + 6.7173e-01, 6.2973e-03, 2.5163e-01, 4.1258e-01, + 6.6680e-01, 5.0225e-01, 6.5658e-01, 7.9015e-01, + 4.6985e-01, 9.5341e-01, 1.8942e-01, 9.6505e-01, + 5.9884e-01, 7.6761e-01, 9.9066e-01, 1.7115e-01, + 1.5569e-01, 6.6473e-01, 4.0053e-01, 1.1972e-01, + 1.4245e-01, 7.4121e-01, 4.5033e-01, 7.0448e-01, + 8.4109e-01, 9.6275e-01, 5.6967e-01, 4.6629e-01, + 4.6346e-01, 8.8916e-01, 5.7855e-01, 5.9218e-01, + 9.7444e-01, 5.1624e-01, 5.3685e-01, 3.2910e-01, + 4.5445e-03, 3.8540e-01, 9.5888e-01, 5.3370e-01, + 5.2745e-01, 7.0115e-01, 1.7582e-02, 7.0961e-01, + 4.8579e-01, 4.4872e-01, 1.8650e-01, 9.1635e-01, + 7.1700e-01, 7.1226e-01, 3.2373e-01, 9.7173e-01, + 9.5299e-01, 8.4083e-01, 2.5068e-01, 3.3961e-01, + 2.8141e-01, 1.6843e-01, 7.2851e-01, 3.5589e-01, + 4.7044e-01, 3.3914e-01, 9.9225e-01, 8.3744e-01, + 6.3322e-01, 2.6791e-01, 1.4790e-01, 8.5573e-01, + 8.6051e-03, 6.8401e-01, 8.5940e-01, 3.1405e-01, + 8.0239e-01, 5.3587e-01, 7.6761e-02, 1.9059e-01, + 5.5597e-01, 2.2680e-01, 5.0511e-01, 7.4720e-01, + 5.6044e-01, 1.2904e-01, 2.6998e-02, 2.6815e-02, + 1.4811e-01, 5.7710e-01, 3.8720e-01, 3.1164e-01, + 5.0377e-01, 6.6323e-01, 3.0251e-01, 7.6729e-01, + 4.0920e-01, 2.6721e-01, 4.7283e-01, 7.9623e-01, + 3.3513e-01, 9.2036e-01, 3.7500e-01, 1.7646e-01, + 4.9155e-01, 6.1185e-01, 7.4077e-01, 7.1092e-01, + 5.2796e-02, 7.3738e-01, 3.3700e-02, 5.8595e-01, + 7.1338e-01, 1.9473e-01, 7.7258e-01, 8.6642e-01, + 3.1804e-01, 9.2864e-01, 3.4002e-01, 5.8527e-01, + 3.2231e-01, 2.5907e-01, 2.4104e-02, 9.6474e-01, + 7.6358e-01, 2.8793e-01, 2.4963e-01, 3.2547e-03, + 3.7889e-01, 7.2848e-01, 7.8352e-01, 6.5869e-01, + 6.7556e-01, 9.9946e-01, 7.1099e-01, 7.8559e-01, + 2.4594e-01, 7.5866e-01, 2.1889e-01, 8.6192e-01, + 1.0492e-01, 6.6974e-01, 1.9110e-01, 4.2178e-01, + 5.2211e-01, 5.7968e-01, 4.3791e-01, 1.6162e-01, + 1.8223e-01, 4.0645e-01, 8.0597e-01, 1.8599e-01, + 8.5659e-01, 8.8819e-01, 4.3915e-01, 2.0250e-01, + 7.8476e-02, 6.6803e-01, 7.7438e-01, 2.0798e-02, + 6.5758e-01, 3.2100e-01, 3.5731e-04, 2.1289e-01, + 2.5481e-01, 6.2077e-01, 2.4535e-01, 8.8808e-01, + 3.0564e-01, 8.8046e-01, 1.5537e-01, 2.9485e-01, + 4.3426e-01, 1.5200e-01, 2.3927e-01, 4.1251e-02, + 2.6271e-01, 5.8274e-01, 5.6945e-02, 1.4017e-01, + 3.0720e-01, 3.6401e-01, 1.6795e-01, 6.9631e-02, + 6.9758e-01, 5.9161e-01, 8.3040e-01, 9.4507e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.7176, 0.7209, 0.4421, ..., 0.7861, 0.0630, 0.0040]) +tensor([0.7781, 0.3926, 0.9305, ..., 0.7246, 0.2621, 0.2068]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -1133,375 +1026,375 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 10.164389848709106 seconds +Time: 10.514646053314209 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([1810, 1856, 931, 2279, 4513, 5670, 4299, 1451, 4735, - 6521, 8634, 5493, 2604, 416, 4540, 2978, 6685, 9818, - 316, 767, 3433, 9310, 5118, 5536, 8136, 256, 5007, - 9151, 5614, 7335, 5950, 7216, 5695, 8824, 5574, 8028, - 9895, 2763, 3721, 6353, 4375, 393, 4695, 4114, 2940, - 9233, 9506, 5002, 9687, 9138, 8360, 7455, 1902, 6476, - 6018, 9078, 1607, 8332, 6637, 1057, 721, 3190, 9337, - 8872, 2095, 3714, 9220, 3100, 1647, 1733, 5119, 557, - 9473, 477, 8030, 5805, 9318, 3400, 7191, 1837, 2389, - 3821, 5362, 913, 1742, 7815, 3737, 6731, 1087, 8363, - 833, 8039, 7694, 5811, 4452, 6537, 8096, 9721, 7906, - 9466, 2451, 2361, 1224, 6931, 8635, 7881, 1988, 582, - 2422, 9410, 3064, 764, 9933, 6316, 5596, 8997, 8781, - 7963, 2462, 5618, 7101, 6035, 2718, 6507, 6397, 8696, - 1231, 5024, 7289, 7325, 4627, 7957, 8556, 9457, 8469, - 1043, 400, 9203, 2337, 6088, 6450, 5297, 9131, 397, - 4650, 9068, 3118, 9379, 183, 3638, 6795, 2673, 5425, - 3649, 2251, 386, 8750, 9749, 9815, 9857, 2491, 548, - 7332, 870, 4366, 7003, 5840, 4001, 7232, 493, 9185, - 1450, 4016, 5889, 9803, 8136, 3806, 4997, 2092, 5865, - 1283, 1288, 4177, 4695, 6708, 4768, 3634, 5753, 9458, - 4153, 5992, 9552, 362, 5136, 5952, 4357, 6044, 6531, - 7848, 3651, 1491, 3291, 6199, 3810, 9851, 4453, 573, - 5207, 5294, 8733, 2431, 4535, 2913, 2731, 1236, 8636, - 2221, 7544, 4435, 1981, 8442, 7395, 1867, 72, 1063, - 8996, 5199, 834, 4580, 906, 1579, 4963, 9505, 8722, - 759, 7337, 4330, 7322, 3927, 8749, 6201, 5819, 3314, - 1728, 979, 8398, 547, 1399, 9722, 7198, 6130, 1651, - 9224, 7134, 7178, 2300, 7252, 3738, 6753, 8700, 4027, - 4660, 1616, 5761, 9749, 8175, 9091, 5857, 850, 4133, - 6445, 9542, 5521, 3707, 501, 2454, 2733, 8580, 6779, - 4547, 3843, 9359, 9768, 9596, 5497, 2652, 1974, 934, - 2868, 643, 3680, 5565, 2740, 5926, 9245, 297, 5885, - 7332, 4786, 9103, 3064, 6549, 4286, 1165, 4952, 4434, - 6047, 6832, 8224, 2174, 9669, 5613, 92, 4381, 1935, - 8729, 8524, 3665, 9250, 881, 978, 7426, 214, 4381, - 9072, 7182, 9280, 9247, 3139, 4899, 5790, 8131, 5251, - 1945, 4451, 6820, 2480, 3728, 5141, 1006, 2084, 5510, - 7851, 3147, 280, 4766, 666, 3022, 5516, 62, 851, - 2617, 1208, 9039, 5500, 3894, 8214, 5791, 9173, 3794, - 9197, 8491, 3563, 9034, 6932, 2159, 6540, 6695, 5710, - 3701, 4010, 4892, 9604, 2787, 1559, 9207, 7948, 9910, - 7651, 9441, 2786, 5611, 7856, 780, 2409, 6611, 3028, - 2892, 4645, 7285, 6793, 2186, 7630, 8443, 4152, 9915, - 4301, 8517, 4370, 1937, 7168, 4699, 5667, 9090, 4697, - 7753, 3776, 2536, 2414, 849, 2750, 3568, 2331, 6088, - 4578, 2773, 5446, 8350, 58, 2640, 4319, 9666, 17, - 5678, 3420, 4634, 1241, 8722, 9054, 958, 8602, 7469, - 2692, 1737, 4604, 7250, 3682, 5105, 6794, 511, 200, - 2748, 5616, 2441, 8764, 2176, 2132, 2501, 6271, 7122, - 6825, 5910, 7394, 1413, 4046, 8556, 3307, 7999, 6117, - 136, 3329, 6583, 6397, 3117, 5562, 172, 7031, 3604, - 3778, 9349, 1380, 6106, 5738, 8078, 2759, 7054, 1000, - 9090, 9645, 4500, 2130, 6459, 4518, 8573, 7237, 8964, - 1058, 7216, 321, 4470, 8421, 3209, 1529, 1093, 5471, - 1648, 6187, 665, 1916, 3569, 8936, 4606, 7181, 1035, - 7655, 8639, 69, 6816, 473, 2761, 4406, 5451, 5659, - 2977, 7560, 8606, 9549, 5893, 7706, 8223, 5677, 1005, - 2082, 9957, 1028, 4827, 3540, 3354, 6748, 9990, 785, - 5380, 8309, 3006, 2550, 7674, 3028, 201, 5759, 6945, - 8400, 5908, 5641, 3104, 188, 8136, 2319, 4660, 3971, - 5048, 7804, 2519, 2680, 6795, 5335, 4837, 7700, 9088, - 2631, 1592, 1189, 2012, 8241, 5397, 5890, 301, 5350, - 8109, 4551, 7974, 3627, 3176, 9488, 5900, 2093, 6017, - 8155, 7066, 9159, 4950, 5821, 8787, 5297, 6212, 9674, - 5892, 9764, 4869, 6527, 3772, 9250, 3915, 7057, 2066, - 776, 2058, 536, 3836, 2276, 3119, 9771, 6981, 1557, - 3152, 7989, 4943, 908, 3893, 5867, 5751, 2670, 3300, - 1333, 456, 4978, 7244, 6587, 1413, 7491, 8838, 3245, - 2823, 4557, 2336, 9507, 6703, 5382, 5989, 8187, 4846, - 1379, 7329, 2895, 3755, 789, 4156, 7015, 2001, 7794, - 9724, 106, 6678, 1270, 870, 2287, 1171, 6963, 9681, - 5390, 2761, 8932, 9364, 62, 804, 3515, 7687, 2532, - 8090, 9047, 5660, 1726, 4838, 1887, 2558, 7861, 4629, - 2768, 6071, 1636, 2194, 3356, 323, 808, 5194, 4156, - 4843, 3619, 4136, 8024, 2003, 3759, 5309, 8654, 6344, - 8097, 7018, 700, 4124, 9984, 875, 4417, 277, 1439, - 4346, 1616, 8339, 8550, 235, 4963, 6446, 9405, 5699, - 7561, 820, 3489, 4243, 5511, 7457, 3168, 6565, 7884, - 8513, 7357, 155, 9743, 8038, 1450, 8771, 4684, 9678, - 1784, 756, 7286, 5655, 2184, 7686, 6558, 9894, 2199, - 6800, 1205, 214, 3125, 963, 5379, 2608, 8434, 9677, - 8270, 5214, 1299, 8433, 9372, 4124, 8888, 2011, 2113, - 6354, 4807, 8546, 2110, 5520, 384, 6426, 6575, 6564, - 1390, 2433, 1505, 2722, 88, 4635, 4428, 6839, 7408, - 8050, 5826, 8959, 7096, 3081, 3080, 7686, 1331, 8565, - 7693, 9348, 292, 6130, 8809, 6301, 7956, 1983, 1907, - 7186, 1820, 8616, 9283, 2455, 140, 6679, 5156, 1774, - 1712, 9243, 5561, 8196, 944, 8632, 2385, 3424, 9448, - 8082, 2162, 5310, 6544, 1094, 2786, 7082, 5011, 9221, - 3773, 8258, 4870, 3431, 6655, 7055, 1419, 339, 1377, - 8494, 1974, 4038, 7018, 9098, 3701, 7826, 2779, 8163, - 8543, 7815, 6923, 2804, 1690, 4707, 676, 7163, 8410, - 9359, 929, 1202, 2632, 8972, 1668, 6980, 7195, 1951, - 5754, 9735, 8716, 8066, 9339, 4502, 7781, 2962, 9864, - 2935, 2818, 1369, 8993, 3859, 4694, 5956, 5879, 8042, - 3649, 5653, 8964, 4085, 8658, 1438, 3412, 4666, 4822, - 7011, 6520, 1596, 2472, 2945, 5559, 9517, 6060, 1482, - 5581, 4794, 920, 1911, 8883, 1952, 2387, 3314, 7049, - 5952, 9066, 9847, 1846, 3233, 143, 1181, 7109, 1398, - 2396, 1676, 6911, 442, 3879, 8274, 5165, 8071, 8656, - 6961, 7229, 9025, 3470, 7036, 3654, 2383, 637, 1637, - 3509, 6294, 8342, 7867, 2855, 4117, 3596, 2252, 7724, - 1591, 2221, 9186, 877, 4114, 477, 6322, 2667, 7693, - 8444, 1638, 8721, 472, 1976, 9959, 6980, 3171, 4887, - 2038, 2562, 9386, 9361, 3719, 1289, 9961, 8417, 3223, - 4861, 2375, 8977, 6396, 9866, 3342, 8327, 5726, 4452, - 5350, 9941, 8254, 9351, 5780, 6640, 4731, 8661, 5704, - 2606, 7600, 4119, 1364, 333, 1408, 3800, 8150, 9778, - 5408, 2234, 9215, 76, 9479, 2233, 1268, 5179, 868, - 9805, 4840, 1357, 335, 7091, 2257, 1253, 3554, 2744, - 1420]), - values=tensor([1.2669e-01, 8.7393e-01, 8.1661e-01, 4.0787e-01, - 2.6982e-01, 7.1775e-01, 9.8153e-01, 1.5123e-01, - 1.7162e-01, 6.5592e-01, 5.3835e-01, 9.5599e-01, - 9.6014e-01, 7.6631e-01, 2.6202e-01, 6.6482e-01, - 5.4833e-01, 1.9027e-01, 4.5359e-01, 2.1409e-01, - 7.9491e-02, 6.8437e-01, 4.9517e-01, 4.4702e-01, - 2.7025e-01, 7.9112e-02, 3.0727e-01, 3.3256e-01, - 8.9558e-01, 5.9045e-01, 8.6208e-02, 2.3611e-01, - 6.3933e-01, 1.2532e-01, 7.7999e-01, 3.9650e-01, - 9.1520e-01, 2.8599e-01, 9.8139e-01, 3.0200e-01, - 4.1650e-02, 6.8925e-01, 7.1861e-01, 7.0927e-01, - 3.3385e-01, 4.3698e-01, 5.8834e-01, 2.0230e-01, - 7.3991e-01, 3.4908e-01, 6.5491e-01, 1.5128e-02, - 4.1229e-01, 7.1760e-01, 8.9114e-01, 3.2176e-01, - 7.5048e-01, 1.5471e-01, 4.8574e-01, 7.1400e-01, - 3.9874e-01, 4.9790e-02, 6.7377e-01, 5.2447e-01, - 9.5955e-01, 8.2373e-01, 2.7976e-01, 8.0827e-01, - 3.5445e-01, 8.7547e-01, 2.4819e-01, 5.7891e-01, - 1.7842e-01, 9.9633e-01, 2.0344e-01, 5.5228e-01, - 3.2819e-01, 9.4763e-01, 7.8439e-01, 1.3606e-01, - 7.7341e-01, 8.6243e-01, 2.3202e-01, 9.9464e-01, - 4.5853e-01, 1.8634e-01, 4.2896e-01, 4.9027e-01, - 8.9351e-01, 3.3378e-01, 4.0627e-01, 4.1185e-01, - 6.3928e-01, 9.6280e-01, 1.7920e-01, 2.0803e-01, - 5.5008e-02, 7.4580e-02, 9.4528e-01, 4.9813e-01, - 9.0883e-01, 7.1517e-01, 6.3946e-01, 1.7988e-01, - 4.8656e-02, 3.7757e-01, 5.0139e-01, 5.4126e-01, - 3.2256e-01, 8.7845e-01, 8.5856e-01, 5.2275e-01, - 1.0808e-01, 8.9153e-01, 7.1791e-01, 8.9698e-01, - 4.0713e-01, 4.4172e-01, 3.0770e-01, 7.3007e-01, - 4.4634e-01, 9.3715e-01, 2.8418e-01, 2.6231e-01, - 9.4816e-01, 1.0540e-02, 4.5361e-01, 8.4126e-01, - 8.5753e-01, 7.5409e-01, 7.0734e-01, 1.9791e-01, - 1.7291e-01, 6.6092e-01, 5.8409e-01, 7.5187e-01, - 3.5966e-01, 9.8459e-02, 8.7200e-01, 5.0893e-02, - 4.9743e-01, 3.0398e-02, 6.0787e-01, 6.1896e-01, - 5.4418e-01, 5.6134e-01, 7.5484e-01, 8.2147e-02, - 5.5628e-01, 9.8056e-01, 5.2700e-01, 3.7724e-01, - 2.5510e-01, 3.5734e-01, 6.0300e-01, 8.5641e-01, - 1.6978e-01, 3.1856e-01, 5.3176e-03, 4.6888e-01, - 2.5591e-01, 1.7626e-01, 5.1990e-01, 5.3558e-01, - 6.1758e-01, 6.4691e-01, 9.5777e-01, 7.0400e-02, - 7.5579e-01, 4.5386e-01, 5.5198e-02, 3.9878e-01, - 1.2664e-01, 7.6396e-01, 2.8470e-01, 4.9434e-01, - 3.0702e-01, 6.8774e-01, 2.7310e-01, 2.6016e-01, - 6.2440e-01, 9.5555e-01, 8.2495e-01, 3.3464e-01, - 1.5315e-01, 4.5941e-01, 1.5110e-01, 7.9281e-01, - 1.9727e-01, 2.9202e-01, 1.2270e-02, 4.1303e-01, - 3.1626e-01, 3.4680e-01, 8.2967e-01, 7.7487e-01, - 6.0472e-01, 4.2670e-01, 5.1644e-01, 3.3165e-01, - 7.3877e-01, 1.7731e-01, 6.0793e-01, 5.8154e-01, - 1.9342e-01, 5.4986e-01, 2.4134e-01, 6.4498e-01, - 8.7501e-01, 7.3630e-01, 4.6071e-01, 7.8664e-01, - 5.3966e-01, 6.2202e-01, 8.9457e-01, 4.1110e-01, - 9.9926e-01, 6.6711e-01, 3.9496e-01, 5.8353e-01, - 5.9854e-01, 9.8342e-01, 8.8288e-01, 4.7062e-01, - 6.0286e-01, 3.3123e-01, 4.2946e-01, 6.8966e-01, - 9.7649e-01, 6.3504e-01, 2.7886e-01, 1.7738e-01, - 9.1002e-01, 1.7273e-02, 8.5873e-01, 7.4755e-01, - 9.6283e-02, 8.8306e-01, 6.0624e-01, 5.1544e-01, - 8.9322e-01, 7.0187e-01, 6.5332e-01, 5.7367e-01, - 3.1488e-01, 7.5282e-01, 8.7148e-01, 6.3232e-02, - 9.4629e-01, 8.8039e-01, 3.3352e-01, 7.6588e-01, - 3.2974e-01, 4.2399e-01, 6.8394e-01, 7.5248e-01, - 2.5822e-01, 5.8647e-01, 7.3684e-01, 7.3199e-01, - 2.1290e-01, 1.3727e-01, 6.2815e-01, 2.1737e-02, - 8.5861e-01, 4.6935e-01, 1.6690e-02, 4.2346e-01, - 2.4551e-01, 6.5316e-01, 3.9505e-01, 1.4802e-01, - 5.9363e-01, 8.0643e-01, 9.8503e-01, 7.3261e-01, - 9.6675e-01, 8.7076e-01, 6.8400e-02, 3.5968e-01, - 3.5093e-01, 7.4507e-01, 3.7687e-01, 4.4083e-01, - 8.9130e-01, 7.7598e-01, 9.9040e-01, 9.3688e-01, - 2.0552e-01, 5.0793e-01, 5.9538e-01, 5.4962e-01, - 7.9509e-01, 1.3750e-01, 1.7177e-02, 6.7863e-01, - 7.3264e-01, 9.6215e-01, 8.1209e-01, 5.9917e-01, - 9.9758e-01, 1.0184e-02, 1.3805e-01, 4.0914e-01, - 4.4068e-01, 9.0888e-02, 1.2403e-01, 2.7014e-01, - 8.3471e-01, 6.6512e-01, 8.9411e-01, 2.7876e-01, - 3.9486e-01, 3.2953e-01, 7.6349e-01, 6.6362e-01, - 8.7638e-01, 7.0782e-01, 3.8536e-01, 3.2754e-02, - 1.7659e-01, 3.1028e-01, 4.0587e-01, 6.0771e-01, - 2.3400e-01, 1.3235e-01, 6.4474e-01, 8.6121e-01, - 4.0888e-01, 2.5669e-01, 8.1427e-02, 1.0228e-01, - 7.6945e-01, 8.4433e-01, 9.3924e-01, 6.2411e-01, - 4.8252e-02, 5.7566e-01, 6.5033e-01, 4.0115e-01, - 8.6214e-01, 7.1450e-01, 6.7169e-01, 8.1160e-01, - 2.4782e-01, 6.1569e-01, 1.4264e-01, 5.3882e-01, - 3.0175e-01, 2.1447e-01, 8.2910e-01, 7.1263e-01, - 4.1041e-01, 7.2068e-01, 8.4498e-01, 5.2096e-02, - 9.5888e-01, 7.9916e-02, 6.4003e-01, 6.0626e-01, - 3.3487e-01, 1.9126e-01, 8.2271e-01, 5.8379e-01, - 5.6910e-01, 6.6284e-02, 9.9075e-01, 9.6108e-01, - 8.1421e-01, 8.9408e-03, 4.7697e-01, 1.4288e-01, - 4.3262e-01, 1.2967e-01, 9.4186e-01, 9.9199e-01, - 4.3357e-01, 6.1622e-01, 5.7557e-01, 3.5405e-01, - 6.9167e-01, 8.5568e-01, 4.3381e-01, 8.4869e-01, - 2.1798e-01, 2.4886e-01, 2.0942e-01, 2.6149e-01, - 8.5653e-01, 1.5770e-01, 3.8634e-01, 4.9710e-01, - 7.4703e-01, 4.5228e-01, 3.0141e-01, 9.7591e-01, - 7.5096e-01, 8.1526e-01, 5.8201e-01, 1.4188e-01, - 4.6956e-02, 1.5967e-01, 9.8820e-01, 4.6653e-01, - 9.8081e-01, 2.8229e-01, 7.7183e-01, 1.6760e-01, - 6.5027e-01, 9.6884e-01, 1.9788e-01, 8.5793e-01, - 6.0644e-01, 4.0800e-01, 2.5730e-01, 3.9003e-01, - 1.7666e-01, 8.0375e-02, 3.4927e-01, 9.6081e-01, - 2.3696e-01, 8.5889e-01, 8.6890e-01, 2.1566e-01, - 6.7472e-01, 7.3409e-01, 3.6598e-01, 6.4398e-01, - 6.0834e-01, 2.2605e-01, 9.6430e-01, 3.1620e-01, - 4.2307e-01, 2.9137e-01, 3.7774e-02, 3.2640e-01, - 7.8393e-01, 1.2873e-01, 1.8067e-01, 3.4296e-01, - 5.8599e-01, 1.8457e-01, 2.8466e-01, 1.6284e-01, - 3.8473e-01, 3.7876e-01, 7.5374e-01, 2.2498e-01, - 1.0433e-01, 9.8092e-01, 5.2731e-01, 4.5361e-01, - 4.3056e-01, 5.9834e-02, 9.8079e-01, 5.3458e-01, - 7.7131e-02, 1.6104e-01, 2.2237e-01, 5.4361e-01, - 5.1495e-01, 4.2570e-01, 6.6160e-01, 5.8793e-01, - 3.6340e-01, 9.8347e-01, 2.7024e-01, 2.1522e-01, - 3.3250e-01, 7.9908e-01, 2.6481e-01, 3.5391e-01, - 8.3857e-01, 8.7019e-01, 8.7277e-01, 3.2002e-01, - 5.3592e-01, 1.0393e-01, 3.3628e-02, 1.4102e-01, - 3.8307e-01, 9.9468e-01, 2.2920e-01, 1.6939e-01, - 5.4000e-01, 6.7492e-01, 3.1163e-01, 9.0707e-01, - 1.4558e-01, 4.4107e-01, 9.1161e-01, 2.9585e-01, - 8.9864e-01, 5.7575e-01, 8.0033e-01, 1.8430e-01, - 2.6406e-01, 2.8590e-01, 4.5809e-01, 3.0343e-01, - 1.6994e-01, 7.0324e-01, 8.4510e-01, 6.1950e-01, - 6.1401e-01, 5.4919e-01, 2.5461e-01, 2.3646e-01, - 4.0097e-03, 5.3325e-01, 1.7385e-01, 1.9099e-01, - 7.6460e-01, 4.9279e-01, 2.3519e-01, 4.4248e-01, - 1.8583e-01, 7.0388e-01, 9.0522e-01, 6.9809e-01, - 4.8338e-01, 6.8957e-01, 7.8366e-01, 1.2459e-01, - 5.5699e-01, 4.4103e-01, 2.1030e-01, 9.5385e-01, - 9.1934e-01, 5.2540e-01, 5.4072e-01, 7.4035e-01, - 1.1919e-01, 8.7267e-01, 5.0507e-01, 1.3004e-01, - 6.5324e-01, 2.4884e-01, 6.1389e-01, 6.0188e-01, - 5.4507e-01, 5.1591e-01, 6.9854e-01, 3.3306e-01, - 1.0301e-01, 8.1510e-01, 3.2515e-01, 8.7804e-01, - 9.9064e-01, 5.8741e-01, 6.9486e-01, 2.9900e-01, - 8.9103e-01, 4.2348e-01, 6.3428e-01, 3.3684e-02, - 4.1931e-01, 2.9892e-01, 4.6109e-01, 2.7567e-01, - 9.3564e-01, 5.2654e-01, 1.0018e-01, 8.4790e-01, - 5.7012e-01, 5.4810e-01, 3.4658e-01, 7.4770e-01, - 9.4877e-01, 5.1453e-01, 8.3770e-01, 2.2628e-01, - 2.8297e-01, 9.3592e-01, 9.9443e-01, 7.3418e-01, - 7.7762e-02, 3.2115e-01, 2.8077e-01, 2.4932e-03, - 7.7722e-01, 9.3033e-02, 2.8306e-01, 9.0197e-01, - 3.8358e-01, 2.3377e-01, 2.8415e-01, 9.6665e-01, - 4.5076e-01, 5.8332e-01, 5.3551e-01, 1.4427e-01, - 9.7314e-01, 9.5072e-01, 9.6080e-01, 6.9839e-01, - 7.7003e-01, 2.9602e-02, 1.5625e-02, 9.6492e-01, - 5.5545e-01, 4.6314e-02, 4.0823e-01, 3.4010e-01, - 7.9200e-01, 3.6788e-01, 2.6052e-02, 8.1153e-01, - 9.0067e-01, 4.6863e-01, 6.3271e-01, 2.4893e-01, - 1.4957e-01, 7.2187e-01, 3.2709e-01, 9.9022e-02, - 6.5922e-01, 5.9406e-01, 3.3545e-01, 9.9067e-02, - 3.3824e-01, 9.2845e-01, 5.0255e-02, 2.2726e-01, - 3.4047e-01, 1.9861e-01, 1.4552e-01, 8.7986e-01, - 9.7482e-01, 4.1268e-01, 5.3799e-01, 5.9400e-01, - 7.4292e-01, 9.3075e-01, 3.9928e-01, 4.1655e-01, - 4.2639e-01, 7.7034e-01, 8.0540e-01, 3.1683e-01, - 2.3456e-01, 8.4488e-01, 3.7461e-01, 1.9023e-01, - 3.2398e-01, 6.2931e-01, 7.9000e-01, 2.9367e-01, - 6.8792e-01, 2.5211e-01, 6.9874e-01, 6.6391e-02, - 3.9713e-01, 6.5888e-01, 9.5501e-01, 5.4415e-01, - 5.5479e-01, 1.8311e-01, 3.5347e-01, 1.1535e-01, - 8.4727e-02, 3.7156e-02, 2.1737e-01, 3.9178e-01, - 4.3687e-01, 5.7922e-01, 7.1261e-01, 9.3706e-01, - 3.1458e-01, 5.7745e-01, 9.5299e-01, 5.8225e-01, - 6.4173e-02, 6.8551e-01, 9.5133e-01, 9.7228e-01, - 1.2820e-01, 1.3996e-01, 9.9997e-01, 9.2530e-01, - 8.5804e-01, 3.2017e-01, 1.4546e-01, 5.6185e-01, - 7.3055e-01, 5.3408e-01, 6.1432e-01, 2.1717e-01, - 8.6165e-01, 1.0379e-01, 6.0844e-01, 6.0791e-01, - 9.6595e-01, 3.1142e-01, 8.5742e-01, 1.0143e-01, - 2.0557e-01, 2.5530e-02, 5.2649e-01, 2.9851e-01, - 3.0730e-01, 3.2053e-01, 5.2734e-01, 8.8573e-01, - 4.1562e-01, 8.5644e-01, 1.1528e-01, 4.7387e-01, - 3.6684e-01, 6.5894e-01, 5.6719e-01, 6.4520e-01, - 2.5761e-01, 9.5989e-01, 7.8963e-01, 6.8385e-01, - 7.8112e-01, 6.4983e-01, 7.3846e-02, 9.9274e-01, - 7.8711e-01, 2.1382e-01, 6.0371e-01, 6.0743e-01, - 5.6459e-01, 6.4106e-02, 3.6125e-01, 8.0174e-01, - 8.1660e-01, 5.0011e-02, 3.1920e-01, 6.8889e-01, - 5.1944e-01, 7.9207e-01, 9.5144e-01, 3.0550e-01, - 8.9033e-01, 1.1908e-01, 4.7515e-01, 7.3463e-02, - 8.5474e-01, 7.5163e-01, 4.4201e-01, 8.9693e-01, - 8.0204e-01, 9.5888e-01, 3.0000e-01, 7.3653e-01, - 2.4941e-01, 9.0498e-01, 4.0852e-01, 9.1876e-01, - 3.2672e-01, 2.0116e-01, 9.6205e-01, 1.3782e-01, - 1.8651e-01, 4.1469e-01, 9.9287e-01, 3.3236e-01, - 8.4546e-01, 4.9708e-01, 6.1721e-01, 1.3183e-02, - 1.1176e-01, 7.7549e-01, 9.1833e-01, 8.2602e-01, - 8.8564e-01, 8.1683e-01, 9.4631e-01, 5.3871e-01, - 9.2475e-01, 7.5219e-01, 5.5456e-01, 5.8932e-01, - 6.0369e-01, 8.7860e-01, 7.8732e-01, 3.3553e-01, - 1.4227e-01, 7.2320e-02, 9.1130e-01, 2.7576e-01, - 1.9227e-01, 5.8261e-01, 5.7597e-01, 5.2942e-01, - 8.0195e-01, 7.5532e-01, 7.3289e-01, 7.3481e-01, - 1.5441e-01, 2.6307e-01, 3.8647e-01, 4.7675e-01, - 5.4830e-01, 3.3849e-01, 2.7906e-01, 2.0740e-02, - 5.7538e-01, 1.6988e-01, 5.3493e-01, 6.4440e-01, - 4.3749e-01, 1.6581e-01, 3.9710e-02, 2.9556e-01, - 2.7617e-01, 6.3054e-01, 4.6486e-01, 2.3433e-01, - 9.4185e-01, 2.6274e-01, 8.2593e-02, 9.5403e-01, - 7.0567e-01, 3.2809e-01, 6.7833e-01, 6.0174e-01, - 9.4228e-01, 8.9392e-01, 7.5028e-01, 5.3536e-01, - 8.1596e-01, 7.2865e-02, 7.6011e-02, 6.6139e-02, - 8.9616e-01, 3.0205e-01, 2.0969e-01, 7.2103e-01, - 1.4867e-01, 3.5570e-01, 9.6596e-02, 7.1176e-01, - 3.2679e-01, 6.5342e-04, 6.5438e-01, 3.4126e-01, - 4.7895e-01, 6.8964e-01, 6.8851e-01, 1.1205e-02, - 4.9141e-01, 3.0960e-01, 6.7523e-01, 3.4067e-01, - 6.0303e-01, 7.5867e-02, 5.9892e-01, 1.5932e-01, - 3.8413e-01, 7.0884e-01, 1.1522e-01, 8.3082e-01, - 9.2526e-01, 6.1693e-01, 1.2340e-01, 7.9432e-01, - 1.2029e-01, 1.5355e-01, 8.0700e-01, 1.8975e-01, - 2.8623e-01, 3.2111e-01, 7.9519e-01, 9.2948e-01, - 4.0853e-01, 4.7139e-02, 3.8241e-02, 1.6226e-01, - 1.4393e-01, 6.6874e-02, 9.4315e-01, 6.3129e-01, - 7.4798e-01, 4.7261e-03, 5.4673e-01, 8.9635e-02, - 8.8321e-02, 8.3158e-01, 5.7913e-01, 1.7580e-01, - 9.4173e-01, 5.0281e-01, 2.1334e-01, 2.4799e-01, - 8.1771e-01, 9.2260e-01, 3.2236e-01, 4.1922e-01, - 6.3245e-02, 2.7162e-01, 7.0913e-01, 4.4175e-01, - 8.4221e-01, 9.1147e-01, 5.9522e-01, 2.1069e-01, - 9.7237e-01, 4.6063e-01, 6.0893e-01, 9.1593e-01, - 5.5942e-01, 8.9949e-01, 1.5959e-01, 5.1028e-01, - 5.8067e-01, 5.0040e-01, 7.5637e-01, 5.0051e-01, - 1.3529e-01, 1.6889e-01, 3.7566e-01, 7.6014e-01, - 6.9943e-01, 4.3676e-02, 4.9197e-01, 7.5237e-01, - 5.2776e-01, 8.1006e-01, 4.0253e-01, 1.3341e-01, - 5.7251e-01, 8.5232e-01, 1.3314e-01, 4.7442e-01, - 1.0458e-01, 7.8215e-01, 8.6254e-01, 7.8172e-01, - 7.5658e-01, 3.7316e-01, 4.9245e-01, 2.0250e-01, - 2.9283e-01, 4.9418e-01, 8.3756e-01, 7.9447e-01, - 2.5860e-02, 8.6507e-01, 3.3105e-01, 7.4279e-01, - 6.9986e-01, 2.4197e-01, 8.4334e-01, 6.3321e-02, - 1.5450e-01, 3.6323e-01, 9.1984e-01, 2.5840e-01, - 6.8977e-01, 9.3757e-01, 3.4717e-01, 4.3387e-01, - 1.1786e-01, 9.0562e-01, 7.6327e-02, 9.9274e-01, - 9.3539e-01, 3.4387e-01, 7.6361e-01, 1.6568e-01, - 9.7903e-01, 2.7082e-01, 1.0756e-01, 7.6346e-01, - 7.1179e-01, 7.6705e-01, 6.9893e-01, 4.3779e-01, - 6.1936e-01, 1.2852e-01, 7.0319e-01, 4.1450e-01, - 9.3806e-01, 9.4660e-01, 5.1220e-01, 6.6328e-01, - 1.6919e-01, 1.5555e-01, 1.4095e-02, 5.4087e-01, - 1.9144e-01, 4.5731e-01, 6.9006e-01, 4.7231e-01, - 1.8945e-01, 9.9883e-01, 4.1069e-01, 2.3659e-01, - 9.8239e-01, 3.1741e-01, 7.2376e-01, 8.4620e-01, - 6.8221e-01, 4.3839e-01, 8.3264e-01, 3.0102e-01]), + col_indices=tensor([4116, 7192, 1414, 602, 9261, 9755, 3418, 3677, 1346, + 4915, 1923, 5999, 8929, 3632, 514, 7579, 9728, 993, + 4226, 2729, 2969, 7063, 8946, 3199, 2641, 3551, 3369, + 5419, 1831, 1652, 6779, 7428, 3773, 5376, 162, 579, + 7703, 6315, 199, 8043, 3670, 9337, 2098, 2118, 8554, + 2706, 4081, 7007, 1627, 5281, 2169, 7536, 2244, 9570, + 3079, 5784, 1151, 8783, 1389, 8630, 6457, 6608, 4618, + 9063, 5053, 6181, 9948, 5748, 552, 4335, 6638, 3245, + 5740, 6165, 6638, 3389, 4075, 7308, 3538, 1808, 7667, + 6538, 3469, 3661, 4798, 9461, 4545, 9042, 8936, 6823, + 3214, 2364, 8082, 6264, 8924, 2858, 8926, 6581, 6873, + 4238, 1490, 2662, 5578, 4356, 3367, 3328, 2236, 5544, + 9846, 3138, 7106, 9710, 3457, 1720, 9664, 7549, 5930, + 186, 2220, 5945, 7396, 6063, 9045, 7005, 4888, 8764, + 4115, 6721, 9214, 1263, 9026, 6945, 128, 3992, 9195, + 1474, 898, 6799, 6691, 3804, 4100, 3045, 4269, 7083, + 7634, 6955, 7626, 3398, 9853, 5960, 6034, 6094, 1156, + 179, 4425, 7472, 1643, 3634, 5583, 4054, 7495, 8640, + 7830, 4823, 8512, 2700, 1579, 955, 2712, 5607, 4060, + 5276, 5257, 5785, 8404, 113, 7405, 3608, 3433, 3557, + 6849, 1586, 7863, 4221, 9310, 989, 7433, 3566, 8151, + 6016, 311, 1505, 3718, 1327, 7984, 3830, 167, 458, + 1529, 4687, 5720, 3066, 2390, 9640, 22, 1812, 4977, + 5871, 208, 5078, 2553, 6038, 7129, 4310, 282, 7188, + 2629, 1264, 6276, 1900, 4410, 2767, 3442, 8825, 8207, + 9184, 696, 4991, 9290, 793, 7381, 5472, 9417, 843, + 7337, 5992, 4392, 6955, 6527, 4732, 1836, 2887, 3302, + 1246, 4380, 7016, 2283, 2360, 339, 4662, 7184, 8502, + 5643, 5095, 1512, 8433, 2939, 1784, 9937, 1906, 9377, + 4250, 5487, 5234, 4792, 8152, 7183, 2215, 7833, 3347, + 5213, 732, 5590, 2763, 4277, 7806, 3532, 1529, 9333, + 4668, 9843, 4597, 3627, 8649, 8703, 873, 4071, 3905, + 7625, 2889, 4631, 9159, 8732, 933, 5436, 4772, 8351, + 1495, 1952, 2186, 1108, 4949, 2677, 1933, 3185, 4340, + 5683, 5723, 8381, 9022, 2370, 9966, 5403, 9733, 2198, + 1637, 6841, 4735, 9117, 8560, 917, 3319, 1371, 5395, + 9003, 4729, 166, 4382, 5364, 8859, 5190, 7650, 5699, + 8419, 9352, 522, 3927, 2679, 4262, 9505, 590, 7467, + 8458, 9407, 3144, 2243, 9605, 8976, 1644, 2029, 7253, + 258, 2996, 9166, 6182, 3685, 448, 388, 9009, 1003, + 8225, 5809, 5225, 5068, 7864, 1148, 1681, 8631, 9776, + 2434, 9005, 570, 3704, 421, 1541, 2997, 1138, 443, + 8928, 4642, 2073, 2012, 8211, 4813, 5727, 2486, 2677, + 2787, 9420, 1109, 597, 365, 1642, 876, 5405, 9802, + 7033, 1434, 7253, 4790, 1231, 1111, 5807, 2216, 8736, + 4015, 4599, 8996, 3935, 942, 449, 9751, 9186, 7778, + 1387, 8588, 7679, 2605, 4391, 4798, 2000, 5117, 3414, + 1861, 49, 3127, 3681, 1232, 3277, 4285, 4110, 2170, + 1131, 3609, 305, 4308, 1078, 3550, 8680, 9527, 3657, + 5580, 3942, 7339, 4571, 6030, 3953, 2251, 4453, 7552, + 1958, 1501, 1056, 6071, 8794, 5497, 2466, 5840, 5755, + 4350, 6652, 9676, 7696, 5668, 5716, 6802, 2756, 2844, + 2076, 9916, 4254, 4247, 1549, 6255, 136, 9745, 4708, + 8246, 5741, 3283, 9856, 3884, 7212, 2451, 4506, 3332, + 2802, 546, 1107, 6908, 7485, 6240, 3041, 3085, 3258, + 8550, 9847, 4893, 2038, 9539, 625, 7440, 4196, 5182, + 3039, 1354, 4896, 920, 8631, 5555, 3241, 4037, 7927, + 7978, 2714, 4090, 1155, 2312, 8957, 8647, 1625, 2284, + 5120, 2713, 6756, 1447, 5297, 9292, 191, 9719, 512, + 3285, 1008, 1631, 2282, 1, 8384, 4959, 4082, 6943, + 5026, 1284, 2559, 7835, 8876, 7406, 9259, 6431, 2124, + 5249, 886, 3877, 9616, 7440, 7935, 2953, 4353, 8630, + 6873, 6450, 7974, 2907, 6853, 1731, 1734, 7915, 627, + 1848, 9651, 8072, 325, 9090, 3952, 2190, 6969, 5724, + 4737, 4971, 5930, 9065, 795, 2734, 7142, 1334, 1101, + 6976, 2804, 9022, 4770, 8850, 2321, 2915, 4870, 317, + 200, 1907, 4202, 8058, 468, 2452, 3052, 4111, 8780, + 2827, 6426, 5414, 9074, 2589, 6068, 209, 6819, 6948, + 7804, 7353, 5400, 8311, 624, 4794, 4250, 9150, 7304, + 7383, 244, 3241, 8673, 824, 8833, 5120, 347, 3945, + 9530, 4501, 8187, 5682, 9083, 4685, 9255, 3954, 6003, + 1181, 7405, 4909, 2035, 5243, 7589, 7209, 2811, 8638, + 6620, 998, 4062, 4859, 2372, 2608, 3632, 9769, 7028, + 4263, 7696, 6057, 9107, 4050, 223, 2225, 9966, 6759, + 8412, 3222, 9059, 9449, 3803, 5159, 6770, 4309, 803, + 3956, 6109, 2635, 8651, 9493, 6196, 8647, 3760, 7476, + 2703, 4996, 7071, 454, 6200, 1008, 6621, 5370, 4441, + 9279, 2513, 9418, 7493, 7526, 4237, 4698, 7251, 6897, + 2020, 7731, 9984, 3559, 4926, 9283, 2622, 1954, 1488, + 3911, 7884, 2789, 6162, 5229, 4168, 282, 389, 5316, + 1498, 4458, 3561, 8943, 9282, 8236, 2217, 2559, 1819, + 1350, 706, 7116, 9065, 8837, 4066, 7679, 6388, 3799, + 9827, 4686, 9282, 6788, 6841, 1373, 7434, 8130, 3964, + 8166, 3594, 3817, 6355, 3458, 2124, 3724, 9463, 6106, + 9252, 7691, 2416, 3575, 1616, 9144, 6977, 3531, 6725, + 6763, 1340, 5794, 8225, 5653, 9046, 9095, 7748, 6148, + 7023, 3605, 4097, 7017, 1954, 2458, 616, 3576, 4392, + 1508, 4668, 9858, 6665, 8198, 7943, 9561, 6812, 4402, + 7065, 911, 6654, 5206, 2290, 2647, 2858, 4004, 7033, + 1456, 4004, 5009, 7018, 441, 1434, 616, 9192, 4014, + 4224, 3305, 5685, 5038, 1879, 2513, 2975, 7217, 6734, + 4346, 2451, 4252, 1359, 1897, 1558, 1518, 8946, 1045, + 2687, 5779, 3825, 7875, 7903, 3483, 3272, 5342, 4112, + 6056, 4149, 9046, 6331, 5493, 5740, 4356, 9840, 2344, + 8131, 1983, 3832, 1085, 6587, 7944, 5354, 9757, 9494, + 6772, 4253, 3448, 1844, 3828, 8186, 561, 2149, 277, + 7639, 7736, 9923, 874, 4782, 7514, 9258, 25, 3290, + 9135, 821, 7578, 3691, 6954, 4378, 106, 3717, 4341, + 7878, 4081, 3382, 2833, 5381, 1369, 1198, 8238, 509, + 8127, 7888, 3507, 99, 1906, 6992, 375, 2939, 9900, + 3554, 9933, 4200, 9872, 7049, 4972, 7060, 9575, 558, + 4822, 5894, 5066, 5665, 2353, 8349, 8831, 6355, 6847, + 4671, 7517, 5761, 7668, 862, 525, 7306, 1582, 5763, + 7813, 8159, 6461, 9837, 487, 5618, 551, 6758, 6315, + 7722, 5312, 2191, 6095, 5151, 3304, 6873, 8430, 6063, + 9471, 4395, 9467, 7290, 1009, 2539, 6217, 7375, 2928, + 8340, 8978, 9118, 6252, 7166, 2776, 341, 5357, 7950, + 2900, 8133, 6787, 2966, 2483, 5175, 1262, 2317, 2466, + 7698, 6866, 558, 1985, 7535, 6044, 3673, 7654, 8927, + 6144, 8580, 3529, 9842, 2172, 4769, 2277, 2207, 1695, + 234, 4499, 3795, 4016, 695, 3395, 6723, 2800, 2814, + 7062]), + values=tensor([1.3767e-01, 7.1628e-01, 8.6135e-01, 7.1511e-01, + 9.5089e-01, 2.4757e-01, 9.3760e-03, 7.7773e-01, + 4.0669e-01, 9.1069e-01, 1.2601e-01, 5.4040e-01, + 3.7319e-02, 3.2172e-01, 9.1567e-01, 8.9101e-01, + 2.2712e-02, 6.3215e-01, 1.8351e-01, 5.7793e-01, + 6.9901e-01, 5.3678e-01, 4.7677e-01, 5.0165e-02, + 5.6649e-01, 6.3104e-01, 3.0293e-01, 7.4977e-01, + 7.3472e-02, 2.8601e-01, 5.5054e-01, 3.5579e-01, + 3.6622e-01, 6.5500e-01, 6.2994e-01, 3.6452e-01, + 2.0704e-01, 1.0247e-01, 2.5826e-01, 5.2540e-02, + 7.3130e-02, 6.3376e-01, 5.9810e-01, 4.9090e-01, + 6.8296e-02, 5.3917e-01, 1.1966e-01, 1.8500e-01, + 3.0815e-01, 5.8388e-01, 4.2982e-01, 5.6405e-02, + 4.8100e-01, 2.6901e-01, 3.0648e-03, 7.4291e-01, + 1.2739e-01, 3.2902e-01, 7.3718e-01, 7.4258e-01, + 9.6792e-01, 2.1717e-01, 2.0890e-01, 5.2258e-01, + 6.8834e-01, 3.6326e-01, 6.9665e-01, 6.8825e-01, + 6.2143e-01, 6.2623e-01, 6.1424e-01, 7.0805e-01, + 4.0377e-01, 5.0286e-01, 3.1606e-01, 1.0216e-01, + 8.7212e-01, 2.5393e-01, 4.3722e-01, 4.5099e-01, + 6.2296e-01, 6.7718e-01, 1.1725e-01, 7.5117e-01, + 6.1649e-01, 2.0645e-01, 1.5527e-01, 5.8584e-01, + 6.5559e-01, 9.7418e-01, 5.7488e-01, 4.5016e-01, + 1.0108e-02, 7.8700e-01, 8.5952e-01, 3.6767e-01, + 6.6708e-01, 3.9792e-01, 2.1940e-01, 4.4322e-01, + 7.0303e-01, 3.4139e-01, 3.9259e-01, 8.8211e-01, + 8.8687e-01, 7.0191e-01, 4.6733e-01, 4.5169e-01, + 1.3884e-01, 7.2389e-01, 1.0696e-01, 1.9507e-01, + 5.4738e-01, 6.7276e-01, 9.7900e-02, 5.0607e-01, + 6.0787e-01, 3.7552e-01, 6.9448e-01, 9.9517e-01, + 6.9703e-01, 4.5115e-01, 2.7976e-01, 3.3528e-01, + 6.2057e-01, 8.6211e-01, 7.0540e-01, 7.0073e-01, + 7.5910e-01, 7.0805e-01, 6.8043e-01, 9.2129e-01, + 3.8606e-02, 8.1173e-01, 6.8589e-01, 8.8272e-01, + 8.5540e-01, 8.5966e-01, 2.5725e-01, 6.2432e-02, + 2.0565e-01, 7.6464e-01, 6.2149e-01, 7.0689e-01, + 9.6943e-01, 9.1375e-01, 6.9609e-01, 1.0540e-01, + 9.0816e-01, 1.5889e-01, 5.7258e-01, 5.5242e-01, + 4.2908e-01, 4.4953e-01, 6.5806e-01, 8.4537e-01, + 8.1500e-02, 5.0051e-01, 6.5699e-01, 1.6438e-01, + 1.5049e-02, 4.9899e-01, 3.9120e-01, 3.2732e-01, + 3.8565e-01, 8.2901e-01, 4.0085e-01, 6.0713e-01, + 7.0000e-01, 3.0412e-01, 6.0281e-01, 2.5542e-02, + 4.3786e-02, 1.8718e-01, 8.7005e-01, 9.4782e-01, + 5.9295e-01, 7.8689e-03, 8.5626e-01, 6.4375e-01, + 3.2275e-01, 4.2197e-01, 3.6112e-01, 6.2448e-02, + 9.7853e-01, 8.6224e-01, 7.4828e-01, 5.8071e-01, + 8.7233e-01, 5.0843e-01, 2.8581e-01, 6.4133e-01, + 7.0918e-01, 9.5194e-01, 6.1273e-01, 8.5849e-01, + 4.9317e-01, 5.7639e-02, 9.9630e-01, 3.7211e-01, + 3.4682e-01, 1.1154e-01, 2.5831e-03, 6.3374e-01, + 6.6874e-01, 3.9360e-01, 3.5838e-01, 8.5471e-01, + 7.0322e-01, 8.2311e-01, 5.4150e-01, 5.7026e-01, + 6.5821e-01, 1.3975e-01, 7.7260e-02, 1.1189e-01, + 5.2080e-01, 1.2715e-02, 6.9032e-01, 7.5638e-01, + 2.0518e-01, 5.5927e-01, 8.3030e-02, 5.5062e-01, + 3.2127e-01, 1.0789e-01, 9.1260e-01, 6.4350e-01, + 9.9466e-01, 5.5812e-01, 2.5897e-01, 1.4537e-01, + 8.8427e-01, 2.4431e-01, 5.3467e-01, 1.1437e-01, + 2.2271e-01, 3.6377e-01, 5.0688e-01, 4.0210e-01, + 9.6214e-02, 7.4145e-01, 4.1409e-01, 2.7186e-01, + 6.8016e-01, 3.4039e-01, 5.5569e-01, 9.7247e-02, + 6.0054e-01, 9.7719e-01, 1.3649e-01, 9.9753e-01, + 8.3259e-01, 6.4755e-01, 3.9749e-01, 9.0807e-01, + 2.1147e-01, 5.0004e-01, 5.3220e-02, 3.1762e-01, + 3.3833e-01, 7.7938e-01, 6.2849e-01, 4.7457e-01, + 1.7438e-02, 8.1308e-01, 2.8307e-01, 6.7284e-01, + 5.1861e-01, 3.3344e-01, 9.1361e-01, 6.0753e-01, + 7.0133e-01, 3.6039e-01, 5.5368e-02, 9.3108e-01, + 3.5189e-01, 9.7164e-01, 3.9002e-01, 5.2678e-01, + 2.8953e-03, 9.8500e-01, 7.3340e-01, 9.2830e-01, + 4.0230e-01, 7.7052e-01, 9.5800e-02, 3.1615e-01, + 5.9244e-01, 9.0208e-01, 1.2767e-01, 1.5811e-01, + 8.0315e-01, 6.2049e-01, 3.9867e-01, 5.8587e-01, + 5.4263e-01, 5.8712e-01, 6.4417e-01, 2.5094e-01, + 5.7318e-03, 4.2910e-01, 4.9714e-01, 1.2439e-01, + 6.8645e-01, 2.7117e-01, 9.4144e-01, 1.0741e-01, + 6.0964e-01, 4.9124e-01, 1.2215e-01, 6.2184e-01, + 9.5970e-01, 1.4042e-01, 3.8926e-01, 2.5587e-01, + 2.3181e-01, 1.5447e-01, 2.6754e-01, 2.8396e-01, + 4.9732e-01, 7.7233e-01, 7.2055e-01, 2.6804e-01, + 5.2721e-01, 2.8004e-02, 4.0875e-01, 1.0355e-01, + 3.8021e-01, 1.6312e-01, 9.3351e-01, 4.0357e-01, + 9.6564e-01, 9.5352e-01, 2.3033e-01, 1.8023e-01, + 8.9340e-01, 3.6181e-01, 5.5393e-01, 3.3373e-01, + 6.1869e-01, 1.4247e-01, 3.8655e-02, 1.5970e-01, + 1.5518e-01, 8.9244e-01, 2.1298e-01, 7.1235e-01, + 5.5568e-01, 5.8432e-01, 1.9164e-01, 2.3690e-01, + 9.5455e-01, 7.2857e-01, 3.7214e-01, 5.5532e-01, + 8.8376e-01, 5.9028e-01, 2.0300e-01, 6.3695e-01, + 3.3213e-01, 9.5342e-02, 7.7225e-01, 6.6953e-02, + 1.6999e-01, 9.4302e-01, 2.6314e-01, 3.3727e-01, + 2.7787e-01, 8.0225e-01, 6.5075e-01, 7.4035e-01, + 5.3106e-01, 5.4790e-01, 3.1026e-01, 2.2004e-02, + 9.0580e-01, 7.6487e-01, 5.6201e-01, 1.1201e-01, + 1.5233e-02, 3.8437e-01, 6.1598e-02, 3.1117e-01, + 3.2700e-01, 8.9412e-01, 3.9616e-01, 5.3201e-01, + 6.4656e-01, 2.1046e-01, 3.6325e-01, 9.2194e-01, + 6.3959e-01, 3.9911e-01, 4.4881e-01, 1.0553e-01, + 2.5978e-01, 1.8512e-01, 3.9395e-01, 5.4669e-02, + 6.8945e-01, 8.0965e-01, 3.2951e-01, 5.4811e-01, + 3.0211e-02, 6.8181e-01, 3.2372e-01, 9.3662e-01, + 6.8499e-01, 6.4111e-01, 5.8163e-01, 3.5869e-01, + 2.8411e-01, 4.3117e-01, 1.3450e-01, 4.8766e-01, + 7.0478e-01, 5.3516e-01, 6.7174e-01, 8.0094e-01, + 8.1062e-01, 2.7839e-02, 8.6884e-01, 7.9628e-01, + 1.2090e-01, 9.4127e-02, 2.6206e-01, 7.1621e-02, + 9.6549e-01, 9.9012e-02, 8.1580e-01, 6.2717e-01, + 3.7858e-02, 6.6910e-01, 1.1367e-01, 4.4587e-01, + 2.1667e-01, 6.2027e-01, 6.0694e-01, 4.8314e-01, + 8.9715e-01, 1.2495e-01, 4.0892e-01, 5.4664e-01, + 2.0868e-01, 3.7281e-01, 7.2776e-01, 3.9380e-01, + 7.7841e-01, 7.2770e-01, 5.5081e-01, 4.5550e-01, + 1.1337e-01, 6.0070e-01, 1.7006e-01, 8.9155e-01, + 3.4892e-04, 8.9357e-01, 9.6087e-01, 3.6593e-02, + 5.8737e-01, 3.6900e-02, 7.9887e-01, 6.1905e-01, + 9.3419e-01, 1.0042e-01, 7.9304e-01, 5.8615e-01, + 7.2710e-01, 5.8139e-01, 2.5740e-02, 1.1614e-01, + 8.0492e-01, 1.9472e-01, 6.4306e-01, 7.9291e-01, + 8.8424e-01, 2.4042e-01, 6.6614e-01, 9.9409e-01, + 3.9403e-01, 4.6506e-01, 4.8211e-01, 5.6363e-01, + 6.0078e-01, 8.3139e-02, 7.8203e-01, 4.7756e-01, + 8.5709e-01, 5.7873e-01, 7.6714e-02, 3.9206e-01, + 6.8500e-01, 9.6940e-01, 2.5652e-01, 8.8082e-01, + 3.7856e-01, 9.2438e-01, 4.7830e-01, 1.4020e-01, + 5.2884e-01, 3.1246e-01, 6.5357e-01, 3.5125e-01, + 8.1007e-01, 7.6288e-01, 4.0573e-01, 2.6397e-01, + 7.7222e-01, 4.6844e-01, 6.6931e-01, 3.7442e-02, + 7.3587e-01, 1.7894e-01, 1.9269e-01, 5.3208e-02, + 1.7345e-01, 3.2373e-01, 8.6054e-01, 9.6058e-01, + 3.3684e-01, 1.2915e-01, 3.7259e-01, 8.5330e-02, + 8.7195e-01, 4.1642e-01, 1.4156e-02, 5.6366e-01, + 2.0687e-02, 4.5670e-01, 9.4825e-01, 4.6193e-01, + 9.6608e-01, 7.2678e-01, 1.0354e-01, 4.0029e-01, + 8.7647e-01, 7.6570e-01, 2.0229e-01, 2.0324e-01, + 5.5069e-01, 9.8105e-01, 3.4961e-01, 9.6117e-01, + 1.2299e-01, 8.9272e-01, 8.4714e-01, 8.7303e-01, + 8.8032e-01, 6.5685e-01, 5.8322e-01, 5.8733e-01, + 2.6798e-01, 6.5585e-01, 4.5400e-01, 5.2309e-03, + 9.9176e-01, 9.0773e-01, 9.4229e-01, 8.4914e-01, + 1.9365e-01, 5.8394e-01, 9.0261e-01, 5.1355e-01, + 4.1587e-01, 2.4801e-01, 2.7669e-01, 5.5044e-01, + 2.4672e-01, 4.0436e-01, 2.4099e-01, 5.0330e-01, + 9.5869e-01, 7.6414e-01, 1.2318e-01, 8.6587e-01, + 7.4214e-01, 6.6104e-03, 1.1094e-01, 4.0845e-01, + 1.1767e-01, 9.0883e-01, 7.2050e-01, 9.7280e-01, + 9.8324e-01, 1.3704e-01, 4.0185e-01, 3.4497e-01, + 7.4353e-01, 9.0031e-01, 2.0494e-01, 2.5380e-01, + 5.2888e-01, 2.1441e-02, 4.0249e-01, 5.3610e-01, + 5.2276e-01, 3.3112e-01, 2.3875e-02, 9.2410e-01, + 5.6142e-02, 8.6019e-01, 3.4566e-01, 5.7521e-01, + 3.8381e-01, 2.0229e-01, 5.4861e-01, 2.1478e-01, + 3.3400e-01, 2.2956e-01, 3.0797e-01, 5.1339e-01, + 4.1802e-01, 1.9688e-01, 6.2325e-01, 3.2738e-01, + 4.8917e-01, 3.2538e-01, 5.1070e-01, 7.4161e-01, + 7.0588e-01, 3.9899e-01, 5.2221e-01, 1.5384e-01, + 3.5179e-01, 4.8115e-01, 2.7363e-01, 2.8002e-01, + 4.2415e-01, 2.1545e-01, 7.6635e-01, 4.4319e-01, + 1.0434e-01, 5.3882e-01, 1.7751e-01, 9.7206e-01, + 5.0674e-01, 3.4560e-01, 3.2579e-01, 5.4960e-01, + 3.2111e-01, 9.7020e-01, 8.9102e-01, 3.3955e-01, + 7.4355e-01, 1.1853e-01, 7.3761e-01, 5.9870e-01, + 6.3084e-01, 5.2364e-01, 6.9109e-01, 9.7319e-01, + 4.4327e-01, 6.7270e-01, 5.0866e-01, 5.8518e-02, + 8.6936e-01, 2.0647e-01, 7.9599e-01, 8.3871e-01, + 8.4882e-01, 4.8923e-01, 9.0385e-01, 5.4210e-01, + 8.2097e-02, 4.4715e-01, 3.9892e-02, 9.7745e-01, + 2.1047e-02, 7.0280e-01, 1.6161e-01, 5.8636e-01, + 5.1251e-02, 3.7167e-01, 6.3274e-01, 6.2298e-02, + 7.4293e-01, 7.9914e-01, 8.7203e-01, 9.6482e-01, + 5.6499e-01, 3.8671e-01, 7.0059e-01, 7.3199e-01, + 9.2567e-01, 7.3812e-01, 1.6861e-01, 7.4046e-01, + 8.1607e-01, 4.6992e-01, 7.2711e-01, 9.1487e-01, + 4.1156e-01, 5.1202e-01, 3.5986e-01, 3.3062e-01, + 2.9725e-01, 1.6921e-01, 5.4909e-01, 1.2069e-01, + 3.5195e-01, 7.6383e-01, 7.4288e-01, 2.4758e-01, + 3.8444e-01, 5.8266e-02, 2.4042e-01, 3.0919e-01, + 1.7204e-01, 6.1885e-01, 2.9065e-01, 1.3719e-02, + 4.0827e-01, 1.3647e-01, 1.4959e-02, 9.9494e-01, + 5.1923e-01, 5.6432e-01, 9.4876e-01, 5.7473e-01, + 9.7697e-01, 1.8048e-02, 5.6856e-01, 1.5324e-01, + 1.0664e-01, 5.3795e-01, 7.1911e-01, 1.5088e-01, + 6.3302e-01, 1.3568e-02, 5.4872e-01, 9.1526e-01, + 9.6402e-01, 6.2534e-01, 7.4570e-01, 5.7812e-01, + 3.7302e-01, 2.3742e-01, 4.2409e-02, 4.5568e-01, + 5.1109e-01, 4.5050e-01, 6.3292e-01, 8.9476e-01, + 7.5507e-01, 6.3026e-01, 4.1196e-01, 4.5580e-01, + 5.6006e-01, 6.5274e-01, 9.8682e-01, 3.8258e-01, + 9.0774e-01, 5.1524e-01, 3.1276e-01, 8.3507e-01, + 3.6866e-01, 8.7038e-01, 1.8687e-01, 8.7445e-01, + 3.4415e-01, 2.8866e-01, 2.8929e-01, 1.2231e-01, + 9.4505e-01, 4.2536e-01, 6.2981e-01, 4.8049e-01, + 2.0686e-01, 1.7140e-01, 8.2123e-01, 5.0642e-01, + 9.1534e-01, 5.8595e-01, 7.0950e-01, 3.9363e-01, + 1.4897e-01, 8.5593e-01, 3.9573e-01, 4.2217e-01, + 1.3321e-01, 8.2831e-01, 3.5401e-01, 8.5123e-01, + 9.7129e-01, 3.5101e-01, 2.9517e-01, 7.8648e-01, + 1.9865e-01, 7.6650e-01, 8.6624e-01, 6.6334e-01, + 5.3911e-02, 2.1179e-01, 7.9277e-01, 3.3075e-01, + 9.7451e-02, 2.2374e-01, 6.0424e-01, 3.4120e-01, + 1.6678e-01, 7.3058e-01, 1.4131e-01, 3.2380e-01, + 8.0025e-02, 8.7760e-02, 2.1083e-01, 2.1844e-01, + 6.6954e-02, 4.9328e-01, 7.2243e-01, 8.3534e-01, + 9.9053e-01, 2.3837e-01, 1.9357e-01, 5.5049e-01, + 6.7173e-01, 6.2973e-03, 2.5163e-01, 4.1258e-01, + 6.6680e-01, 5.0225e-01, 6.5658e-01, 7.9015e-01, + 4.6985e-01, 9.5341e-01, 1.8942e-01, 9.6505e-01, + 5.9884e-01, 7.6761e-01, 9.9066e-01, 1.7115e-01, + 1.5569e-01, 6.6473e-01, 4.0053e-01, 1.1972e-01, + 1.4245e-01, 7.4121e-01, 4.5033e-01, 7.0448e-01, + 8.4109e-01, 9.6275e-01, 5.6967e-01, 4.6629e-01, + 4.6346e-01, 8.8916e-01, 5.7855e-01, 5.9218e-01, + 9.7444e-01, 5.1624e-01, 5.3685e-01, 3.2910e-01, + 4.5445e-03, 3.8540e-01, 9.5888e-01, 5.3370e-01, + 5.2745e-01, 7.0115e-01, 1.7582e-02, 7.0961e-01, + 4.8579e-01, 4.4872e-01, 1.8650e-01, 9.1635e-01, + 7.1700e-01, 7.1226e-01, 3.2373e-01, 9.7173e-01, + 9.5299e-01, 8.4083e-01, 2.5068e-01, 3.3961e-01, + 2.8141e-01, 1.6843e-01, 7.2851e-01, 3.5589e-01, + 4.7044e-01, 3.3914e-01, 9.9225e-01, 8.3744e-01, + 6.3322e-01, 2.6791e-01, 1.4790e-01, 8.5573e-01, + 8.6051e-03, 6.8401e-01, 8.5940e-01, 3.1405e-01, + 8.0239e-01, 5.3587e-01, 7.6761e-02, 1.9059e-01, + 5.5597e-01, 2.2680e-01, 5.0511e-01, 7.4720e-01, + 5.6044e-01, 1.2904e-01, 2.6998e-02, 2.6815e-02, + 1.4811e-01, 5.7710e-01, 3.8720e-01, 3.1164e-01, + 5.0377e-01, 6.6323e-01, 3.0251e-01, 7.6729e-01, + 4.0920e-01, 2.6721e-01, 4.7283e-01, 7.9623e-01, + 3.3513e-01, 9.2036e-01, 3.7500e-01, 1.7646e-01, + 4.9155e-01, 6.1185e-01, 7.4077e-01, 7.1092e-01, + 5.2796e-02, 7.3738e-01, 3.3700e-02, 5.8595e-01, + 7.1338e-01, 1.9473e-01, 7.7258e-01, 8.6642e-01, + 3.1804e-01, 9.2864e-01, 3.4002e-01, 5.8527e-01, + 3.2231e-01, 2.5907e-01, 2.4104e-02, 9.6474e-01, + 7.6358e-01, 2.8793e-01, 2.4963e-01, 3.2547e-03, + 3.7889e-01, 7.2848e-01, 7.8352e-01, 6.5869e-01, + 6.7556e-01, 9.9946e-01, 7.1099e-01, 7.8559e-01, + 2.4594e-01, 7.5866e-01, 2.1889e-01, 8.6192e-01, + 1.0492e-01, 6.6974e-01, 1.9110e-01, 4.2178e-01, + 5.2211e-01, 5.7968e-01, 4.3791e-01, 1.6162e-01, + 1.8223e-01, 4.0645e-01, 8.0597e-01, 1.8599e-01, + 8.5659e-01, 8.8819e-01, 4.3915e-01, 2.0250e-01, + 7.8476e-02, 6.6803e-01, 7.7438e-01, 2.0798e-02, + 6.5758e-01, 3.2100e-01, 3.5731e-04, 2.1289e-01, + 2.5481e-01, 6.2077e-01, 2.4535e-01, 8.8808e-01, + 3.0564e-01, 8.8046e-01, 1.5537e-01, 2.9485e-01, + 4.3426e-01, 1.5200e-01, 2.3927e-01, 4.1251e-02, + 2.6271e-01, 5.8274e-01, 5.6945e-02, 1.4017e-01, + 3.0720e-01, 3.6401e-01, 1.6795e-01, 6.9631e-02, + 6.9758e-01, 5.9161e-01, 8.3040e-01, 9.4507e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.7176, 0.7209, 0.4421, ..., 0.7861, 0.0630, 0.0040]) +tensor([0.7781, 0.3926, 0.9305, ..., 0.7246, 0.2621, 0.2068]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -1509,13 +1402,13 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 10.164389848709106 seconds +Time: 10.514646053314209 seconds -[20.48, 20.52, 20.52, 20.52, 20.2, 20.28, 20.28, 20.24, 20.08, 20.24] -[20.36, 20.52, 21.96, 22.92, 24.08, 24.08, 24.4, 25.08, 24.4, 23.76, 24.0, 23.72, 23.4] -13.186760663986206 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 141816, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.164389848709106, 'TIME_S_1KI': 0.07167308236524163, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 285.17019953727726, 'W': 21.625492932171987} -[20.48, 20.52, 20.52, 20.52, 20.2, 20.28, 20.28, 20.24, 20.08, 20.24, 20.16, 20.28, 20.28, 20.28, 20.12, 20.12, 20.0, 20.12, 20.52, 20.8] -365.2 -18.259999999999998 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 141816, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.164389848709106, 'TIME_S_1KI': 0.07167308236524163, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 285.17019953727726, 'W': 21.625492932171987, 'J_1KI': 2.010846445656888, 'W_1KI': 0.1524897961596152, 'W_D': 3.365492932171989, 'J_D': 44.37994981288918, 'W_D_1KI': 0.023731405004879483, 'J_D_1KI': 0.00016733940461499043} +[21.04, 21.72, 22.28, 22.76, 23.44, 23.84, 24.4, 25.56, 26.08, 25.56] +[25.56, 25.8, 25.36, 25.76, 26.12, 26.96, 27.2, 26.52, 26.28, 25.72, 25.56, 25.04, 24.92, 24.92] +14.21888017654419 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 142926, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.514646053314209, 'TIME_S_1KI': 0.07356706304880994, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 341.9344939994813, 'W': 24.04792007204229} +[21.04, 21.72, 22.28, 22.76, 23.44, 23.84, 24.4, 25.56, 26.08, 25.56, 20.64, 20.76, 20.84, 20.68, 20.84, 20.92, 20.48, 20.28, 20.36, 20.6] +399.15999999999997 +19.958 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 142926, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.514646053314209, 'TIME_S_1KI': 0.07356706304880994, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 341.9344939994813, 'W': 24.04792007204229, 'J_1KI': 2.3923883268228403, 'W_1KI': 0.1682543419114947, 'W_D': 4.089920072042293, 'J_D': 58.15408343601235, 'W_D_1KI': 0.02861564776207473, 'J_D_1KI': 0.00020021303165326625} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_500000_1e-05.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_500000_1e-05.json index cb5e414..510a9ba 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_500000_1e-05.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_500000_1e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 39.37885141372681, "TIME_S_1KI": 39.37885141372681, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 3668.398367080688, "W": 77.07751960335096, "J_1KI": 3668.398367080688, "W_1KI": 77.07751960335096, "W_D": 57.18851960335097, "J_D": 2721.8088102509973, "W_D_1KI": 57.18851960335097, "J_D_1KI": 57.18851960335097} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 37.050822496414185, "TIME_S_1KI": 37.050822496414185, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 3160.1361892318723, "W": 76.88509213042873, "J_1KI": 3160.1361892318723, "W_1KI": 76.88509213042873, "W_D": 56.91509213042873, "J_D": 2339.3279161286355, "W_D_1KI": 56.91509213042873, "J_D_1KI": 56.91509213042873} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_500000_1e-05.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_500000_1e-05.output index 56c2844..259a25f 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_500000_1e-05.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_500000_1e-05.output @@ -1,15 +1,15 @@ ['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 500000 -sd 1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 39.37885141372681} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 37.050822496414185} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 10, ..., 2499992, - 2499996, 2500000]), - col_indices=tensor([ 4222, 120413, 177881, ..., 234997, 318812, - 370543]), - values=tensor([0.6429, 0.8175, 0.9231, ..., 0.8720, 0.9829, 0.6195]), +tensor(crow_indices=tensor([ 0, 4, 7, ..., 2499993, + 2499998, 2500000]), + col_indices=tensor([159663, 166958, 205263, ..., 483859, 36662, + 138241]), + values=tensor([0.8479, 0.2779, 0.6227, ..., 0.0012, 0.5209, 0.2466]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.7836, 0.9661, 0.9943, ..., 0.1995, 0.6325, 0.8613]) +tensor([0.9479, 0.6643, 0.5077, ..., 0.6908, 0.0479, 0.6658]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -17,17 +17,17 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 39.37885141372681 seconds +Time: 37.050822496414185 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 10, ..., 2499992, - 2499996, 2500000]), - col_indices=tensor([ 4222, 120413, 177881, ..., 234997, 318812, - 370543]), - values=tensor([0.6429, 0.8175, 0.9231, ..., 0.8720, 0.9829, 0.6195]), +tensor(crow_indices=tensor([ 0, 4, 7, ..., 2499993, + 2499998, 2500000]), + col_indices=tensor([159663, 166958, 205263, ..., 483859, 36662, + 138241]), + values=tensor([0.8479, 0.2779, 0.6227, ..., 0.0012, 0.5209, 0.2466]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.7836, 0.9661, 0.9943, ..., 0.1995, 0.6325, 0.8613]) +tensor([0.9479, 0.6643, 0.5077, ..., 0.6908, 0.0479, 0.6658]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -35,13 +35,13 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 39.37885141372681 seconds +Time: 37.050822496414185 seconds -[21.56, 21.8, 21.92, 21.8, 21.8, 21.72, 21.76, 21.96, 22.0, 22.0] -[22.12, 21.92, 21.96, 23.24, 24.64, 30.88, 42.28, 55.2, 68.48, 80.52, 87.0, 88.04, 90.88, 91.16, 92.4, 93.96, 93.96, 94.68, 93.52, 94.04, 94.08, 94.16, 93.04, 90.92, 89.84, 90.08, 89.76, 89.56, 91.4, 89.32, 89.44, 89.84, 89.84, 90.68, 89.76, 89.96, 89.52, 87.76, 87.84, 90.36, 91.92, 93.44, 92.0, 91.96, 92.36, 91.2] -47.59362244606018 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 39.37885141372681, 'TIME_S_1KI': 39.37885141372681, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3668.398367080688, 'W': 77.07751960335096} -[21.56, 21.8, 21.92, 21.8, 21.8, 21.72, 21.76, 21.96, 22.0, 22.0, 23.68, 23.6, 22.52, 21.8, 21.8, 21.84, 22.16, 22.32, 22.24, 22.24] -397.78 -19.889 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 39.37885141372681, 'TIME_S_1KI': 39.37885141372681, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3668.398367080688, 'W': 77.07751960335096, 'J_1KI': 3668.398367080688, 'W_1KI': 77.07751960335096, 'W_D': 57.18851960335097, 'J_D': 2721.8088102509973, 'W_D_1KI': 57.18851960335097, 'J_D_1KI': 57.18851960335097} +[22.44, 22.28, 22.0, 22.08, 22.44, 22.44, 22.24, 22.28, 22.28, 21.92] +[21.84, 21.84, 22.08, 23.32, 24.04, 37.0, 51.0, 68.72, 84.04, 93.2, 93.2, 96.84, 97.16, 96.0, 93.56, 93.56, 93.4, 92.48, 94.2, 94.4, 93.76, 94.28, 92.52, 92.4, 93.48, 93.48, 95.4, 93.6, 93.04, 91.68, 87.68, 87.08, 87.96, 88.4, 87.72, 87.2, 88.56, 88.0, 89.28, 89.12] +41.1020667552948 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 37.050822496414185, 'TIME_S_1KI': 37.050822496414185, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3160.1361892318723, 'W': 76.88509213042873} +[22.44, 22.28, 22.0, 22.08, 22.44, 22.44, 22.24, 22.28, 22.28, 21.92, 21.72, 21.72, 21.8, 22.04, 22.2, 22.32, 22.48, 22.44, 22.24, 22.16] +399.4 +19.97 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 37.050822496414185, 'TIME_S_1KI': 37.050822496414185, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3160.1361892318723, 'W': 76.88509213042873, 'J_1KI': 3160.1361892318723, 'W_1KI': 76.88509213042873, 'W_D': 56.91509213042873, 'J_D': 2339.3279161286355, 'W_D_1KI': 56.91509213042873, 'J_D_1KI': 56.91509213042873} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.0001.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.0001.json index 47ee4ee..372bd49 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.0001.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.0001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 1525, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.375513792037964, "TIME_S_1KI": 6.80361560133637, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1039.7556144714354, "W": 59.41096095809724, "J_1KI": 681.8069603091379, "W_1KI": 38.958007185637534, "W_D": 40.714960958097244, "J_D": 712.5555380096434, "W_D_1KI": 26.698335054489995, "J_D_1KI": 17.507104953763932} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.036840677261353, "TIME_S_1KI": 10.036840677261353, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 692.6639336013795, "W": 51.991207713568286, "J_1KI": 692.6639336013795, "W_1KI": 51.991207713568286, "W_D": 33.03220771356828, "J_D": 440.0786197633745, "W_D_1KI": 33.03220771356828, "J_D_1KI": 33.03220771356828} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.0001.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.0001.output index b2aa093..338ad01 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.0001.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.0001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 50000 -sd 0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 6.884527921676636} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.036840677261353} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 12, ..., 249988, 249997, +tensor(crow_indices=tensor([ 0, 11, 16, ..., 249987, 249992, 250000]), - col_indices=tensor([ 1848, 28763, 31705, ..., 4981, 22506, 45960]), - values=tensor([0.8493, 0.0534, 0.5342, ..., 0.4299, 0.9704, 0.1142]), + col_indices=tensor([ 4880, 10510, 11344, ..., 23863, 34979, 45750]), + values=tensor([0.1743, 0.6413, 0.1893, ..., 0.8376, 0.7119, 0.1905]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.1630, 0.3141, 0.8980, ..., 0.6818, 0.2617, 0.8646]) +tensor([0.5173, 0.4884, 0.4084, ..., 0.9473, 0.2501, 0.4146]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,16 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 6.884527921676636 seconds - -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1525 -ss 50000 -sd 0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.375513792037964} +Time: 10.036840677261353 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 12, ..., 249992, 249994, +tensor(crow_indices=tensor([ 0, 11, 16, ..., 249987, 249992, 250000]), - col_indices=tensor([ 3205, 25770, 28303, ..., 16579, 33459, 36956]), - values=tensor([0.6871, 0.0301, 0.1880, ..., 0.0850, 0.6966, 0.8839]), + col_indices=tensor([ 4880, 10510, 11344, ..., 23863, 34979, 45750]), + values=tensor([0.1743, 0.6413, 0.1893, ..., 0.8376, 0.7119, 0.1905]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.3970, 0.9447, 0.7491, ..., 0.5145, 0.9554, 0.9707]) +tensor([0.5173, 0.4884, 0.4084, ..., 0.9473, 0.2501, 0.4146]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,30 +33,13 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 10.375513792037964 seconds +Time: 10.036840677261353 seconds -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 12, ..., 249992, 249994, - 250000]), - col_indices=tensor([ 3205, 25770, 28303, ..., 16579, 33459, 36956]), - values=tensor([0.6871, 0.0301, 0.1880, ..., 0.0850, 0.6966, 0.8839]), - size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.3970, 0.9447, 0.7491, ..., 0.5145, 0.9554, 0.9707]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 250000 -Density: 0.0001 -Time: 10.375513792037964 seconds - -[20.84, 20.72, 20.68, 20.6, 20.64, 20.4, 20.52, 20.48, 20.44, 20.84] -[21.0, 20.96, 21.88, 23.12, 23.12, 32.08, 50.16, 64.0, 76.16, 91.92, 90.6, 89.16, 89.36, 89.52, 88.8, 89.28, 89.04] -17.501073837280273 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1525, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.375513792037964, 'TIME_S_1KI': 6.80361560133637, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1039.7556144714354, 'W': 59.41096095809724} -[20.84, 20.72, 20.68, 20.6, 20.64, 20.4, 20.52, 20.48, 20.44, 20.84, 20.92, 20.84, 20.84, 20.72, 21.0, 21.0, 21.28, 21.24, 20.92, 20.6] -373.91999999999996 -18.695999999999998 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1525, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.375513792037964, 'TIME_S_1KI': 6.80361560133637, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1039.7556144714354, 'W': 59.41096095809724, 'J_1KI': 681.8069603091379, 'W_1KI': 38.958007185637534, 'W_D': 40.714960958097244, 'J_D': 712.5555380096434, 'W_D_1KI': 26.698335054489995, 'J_D_1KI': 17.507104953763932} +[21.04, 21.04, 21.36, 21.48, 21.32, 21.52, 21.4, 21.16, 21.08, 21.12] +[21.08, 21.08, 21.08, 22.4, 23.64, 33.72, 51.68, 65.48, 81.88, 95.88, 94.44, 94.2, 93.16] +13.322712898254395 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.036840677261353, 'TIME_S_1KI': 10.036840677261353, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 692.6639336013795, 'W': 51.991207713568286} +[21.04, 21.04, 21.36, 21.48, 21.32, 21.52, 21.4, 21.16, 21.08, 21.12, 21.08, 21.04, 20.96, 20.96, 20.76, 20.68, 20.64, 20.84, 20.92, 20.8] +379.18 +18.959 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.036840677261353, 'TIME_S_1KI': 10.036840677261353, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 692.6639336013795, 'W': 51.991207713568286, 'J_1KI': 692.6639336013795, 'W_1KI': 51.991207713568286, 'W_D': 33.03220771356828, 'J_D': 440.0786197633745, 'W_D_1KI': 33.03220771356828, 'J_D_1KI': 33.03220771356828} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.001.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.001.json index c30df0a..aeffedd 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.001.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 60.445369720458984, "TIME_S_1KI": 60.445369720458984, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 5178.136594352722, "W": 75.0095899661512, "J_1KI": 5178.136594352722, "W_1KI": 75.0095899661512, "W_D": 55.72458996615119, "J_D": 3846.835299849509, "W_D_1KI": 55.72458996615119, "J_D_1KI": 55.72458996615119} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 64.77457070350647, "TIME_S_1KI": 64.77457070350647, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 4994.562112493517, "W": 73.50821663676467, "J_1KI": 4994.562112493517, "W_1KI": 73.50821663676467, "W_D": 54.114216636764674, "J_D": 3676.8245582232494, "W_D_1KI": 54.114216636764674, "J_D_1KI": 54.114216636764674} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.001.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.001.output index 1aeb456..56ca1f5 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.001.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 50000 -sd 0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 60.445369720458984} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 64.77457070350647} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 42, 88, ..., 2499911, - 2499959, 2500000]), - col_indices=tensor([ 784, 2104, 3070, ..., 44692, 45478, 45799]), - values=tensor([0.0569, 0.3731, 0.2156, ..., 0.1856, 0.5823, 0.7517]), +tensor(crow_indices=tensor([ 0, 48, 96, ..., 2499908, + 2499960, 2500000]), + col_indices=tensor([ 291, 1039, 1041, ..., 49096, 49434, 49928]), + values=tensor([0.5586, 0.2987, 0.2608, ..., 0.4587, 0.5222, 0.8471]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.7969, 0.4843, 0.4078, ..., 0.5644, 0.6126, 0.7864]) +tensor([0.1693, 0.3191, 0.9556, ..., 0.1736, 0.4599, 0.2505]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,16 +16,16 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 60.445369720458984 seconds +Time: 64.77457070350647 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 42, 88, ..., 2499911, - 2499959, 2500000]), - col_indices=tensor([ 784, 2104, 3070, ..., 44692, 45478, 45799]), - values=tensor([0.0569, 0.3731, 0.2156, ..., 0.1856, 0.5823, 0.7517]), +tensor(crow_indices=tensor([ 0, 48, 96, ..., 2499908, + 2499960, 2500000]), + col_indices=tensor([ 291, 1039, 1041, ..., 49096, 49434, 49928]), + values=tensor([0.5586, 0.2987, 0.2608, ..., 0.4587, 0.5222, 0.8471]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.7969, 0.4843, 0.4078, ..., 0.5644, 0.6126, 0.7864]) +tensor([0.1693, 0.3191, 0.9556, ..., 0.1736, 0.4599, 0.2505]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -33,13 +33,13 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 60.445369720458984 seconds +Time: 64.77457070350647 seconds -[21.52, 21.76, 21.6, 21.68, 21.68, 21.48, 21.52, 21.52, 21.72, 22.04] -[22.08, 22.0, 22.16, 23.24, 24.16, 35.0, 49.88, 62.44, 76.6, 76.6, 84.16, 86.2, 86.12, 85.64, 84.84, 83.12, 82.96, 81.88, 81.44, 81.36, 81.2, 82.36, 83.24, 83.28, 84.08, 84.08, 84.48, 84.24, 83.64, 83.96, 83.6, 83.68, 83.56, 84.48, 83.84, 84.08, 84.36, 84.48, 84.04, 84.4, 85.08, 85.08, 84.88, 84.76, 84.72, 84.08, 83.2, 83.08, 83.4, 83.56, 83.6, 83.56, 83.72, 83.4, 84.96, 85.84, 86.72, 86.72, 87.6, 88.08, 87.2, 87.32, 87.24, 86.88, 86.4, 85.96, 84.76] -69.0329942703247 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 60.445369720458984, 'TIME_S_1KI': 60.445369720458984, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 5178.136594352722, 'W': 75.0095899661512} -[21.52, 21.76, 21.6, 21.68, 21.68, 21.48, 21.52, 21.52, 21.72, 22.04, 21.4, 21.44, 21.4, 21.2, 21.2, 21.2, 21.2, 21.04, 20.96, 21.24] -385.70000000000005 -19.285000000000004 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 60.445369720458984, 'TIME_S_1KI': 60.445369720458984, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 5178.136594352722, 'W': 75.0095899661512, 'J_1KI': 5178.136594352722, 'W_1KI': 75.0095899661512, 'W_D': 55.72458996615119, 'J_D': 3846.835299849509, 'W_D_1KI': 55.72458996615119, 'J_D_1KI': 55.72458996615119} +[21.48, 21.56, 21.32, 21.36, 21.52, 21.32, 21.32, 21.4, 21.44, 21.56] +[21.64, 21.6, 22.08, 23.76, 24.8, 35.36, 47.08, 60.56, 71.04, 80.24, 82.48, 82.48, 82.88, 83.68, 82.36, 82.16, 82.04, 80.88, 80.72, 80.88, 80.44, 80.6, 80.44, 80.8, 80.56, 79.4, 78.92, 78.92, 78.4, 78.64, 80.36, 81.28, 82.52, 82.68, 81.64, 81.52, 81.32, 81.2, 80.48, 80.32, 82.04, 81.64, 81.64, 82.44, 82.48, 81.2, 81.08, 81.04, 81.44, 81.2, 81.68, 81.32, 81.0, 81.28, 81.36, 81.24, 81.36, 81.36, 81.88, 82.48, 83.6, 83.4, 83.64, 82.8, 82.56, 82.36] +67.94563031196594 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 64.77457070350647, 'TIME_S_1KI': 64.77457070350647, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 4994.562112493517, 'W': 73.50821663676467} +[21.48, 21.56, 21.32, 21.36, 21.52, 21.32, 21.32, 21.4, 21.44, 21.56, 21.88, 21.92, 21.8, 21.52, 21.68, 21.52, 21.68, 21.68, 21.6, 21.56] +387.88 +19.394 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 64.77457070350647, 'TIME_S_1KI': 64.77457070350647, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 4994.562112493517, 'W': 73.50821663676467, 'J_1KI': 4994.562112493517, 'W_1KI': 73.50821663676467, 'W_D': 54.114216636764674, 'J_D': 3676.8245582232494, 'W_D_1KI': 54.114216636764674, 'J_D_1KI': 54.114216636764674} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_1e-05.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_1e-05.json index 0a48909..2428814 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_1e-05.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_1e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 8439, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 16.30658531188965, "TIME_S_1KI": 1.9322888152493953, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 993.3751641559601, "W": 60.56268692312065, "J_1KI": 117.71242613531936, "W_1KI": 7.176524105121538, "W_D": 41.768686923120654, "J_D": 685.1079160590172, "W_D_1KI": 4.949482986505588, "J_D_1KI": 0.5865011241267435} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 6367, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 14.819957971572876, "TIME_S_1KI": 2.3276202248426068, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 977.0843494701387, "W": 58.720628093786544, "J_1KI": 153.46071139785437, "W_1KI": 9.222652441304625, "W_D": 39.857628093786545, "J_D": 663.2126712820532, "W_D_1KI": 6.260032683176778, "J_D_1KI": 0.9831997303560197} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_1e-05.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_1e-05.output index ee322a5..c94062e 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_1e-05.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_1e-05.output @@ -1,13 +1,13 @@ ['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 50000 -sd 1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 3.2654190063476562} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 2.0054540634155273} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 24998, 25000, 25000]), - col_indices=tensor([ 6514, 22496, 11789, ..., 40007, 5149, 28458]), - values=tensor([0.4327, 0.6473, 0.1491, ..., 0.8954, 0.9190, 0.6593]), +tensor(crow_indices=tensor([ 0, 0, 2, ..., 25000, 25000, 25000]), + col_indices=tensor([29300, 37118, 28917, ..., 16725, 28059, 47397]), + values=tensor([0.1773, 0.7310, 0.0095, ..., 0.4568, 0.7722, 0.2574]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.4414, 0.3955, 0.1417, ..., 0.3292, 0.0955, 0.0474]) +tensor([0.0745, 0.0507, 0.1628, ..., 0.0663, 0.8219, 0.2626]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -15,18 +15,18 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 3.2654190063476562 seconds +Time: 2.0054540634155273 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 3215 -ss 50000 -sd 1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 4.000005722045898} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 5235 -ss 50000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 8.632020473480225} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 1, ..., 25000, 25000, 25000]), - col_indices=tensor([27536, 25934, 37963, ..., 3997, 32688, 28318]), - values=tensor([0.1759, 0.2893, 0.0177, ..., 0.2344, 0.0283, 0.5475]), + col_indices=tensor([ 6005, 4214, 13465, ..., 35902, 7875, 2053]), + values=tensor([0.3591, 0.3792, 0.0771, ..., 0.2893, 0.2529, 0.4673]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.4720, 0.7633, 0.9347, ..., 0.8863, 0.6224, 0.2346]) +tensor([0.1098, 0.6338, 0.4539, ..., 0.7586, 0.0998, 0.7821]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -34,18 +34,18 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 4.000005722045898 seconds +Time: 8.632020473480225 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 8439 -ss 50000 -sd 1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 16.30658531188965} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 6367 -ss 50000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 14.819957971572876} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 24999, 24999, 25000]), - col_indices=tensor([22959, 5139, 40799, ..., 46493, 8579, 7673]), - values=tensor([0.4149, 0.3641, 0.9895, ..., 0.4042, 0.1062, 0.3479]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 25000, 25000, 25000]), + col_indices=tensor([30839, 39998, 2326, ..., 30652, 20576, 5061]), + values=tensor([0.3250, 0.6882, 0.6966, ..., 0.0105, 0.7219, 0.0367]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.8723, 0.6408, 0.2457, ..., 0.3733, 0.2625, 0.6379]) +tensor([0.0417, 0.5559, 0.6322, ..., 0.5652, 0.2111, 0.1243]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -53,15 +53,15 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 16.30658531188965 seconds +Time: 14.819957971572876 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 24999, 24999, 25000]), - col_indices=tensor([22959, 5139, 40799, ..., 46493, 8579, 7673]), - values=tensor([0.4149, 0.3641, 0.9895, ..., 0.4042, 0.1062, 0.3479]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 25000, 25000, 25000]), + col_indices=tensor([30839, 39998, 2326, ..., 30652, 20576, 5061]), + values=tensor([0.3250, 0.6882, 0.6966, ..., 0.0105, 0.7219, 0.0367]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.8723, 0.6408, 0.2457, ..., 0.3733, 0.2625, 0.6379]) +tensor([0.0417, 0.5559, 0.6322, ..., 0.5652, 0.2111, 0.1243]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -69,13 +69,13 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 16.30658531188965 seconds +Time: 14.819957971572876 seconds -[21.24, 20.92, 20.92, 21.08, 20.72, 20.88, 21.0, 21.0, 20.68, 20.92] -[21.0, 20.72, 20.88, 25.2, 27.52, 41.48, 58.76, 70.32, 84.16, 91.04, 91.68, 91.84, 91.84, 92.08, 92.44, 91.88] -16.40242886543274 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 8439, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 16.30658531188965, 'TIME_S_1KI': 1.9322888152493953, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 993.3751641559601, 'W': 60.56268692312065} -[21.24, 20.92, 20.92, 21.08, 20.72, 20.88, 21.0, 21.0, 20.68, 20.92, 20.8, 20.96, 20.92, 20.92, 20.76, 20.88, 20.8, 20.92, 20.76, 20.56] -375.88 -18.794 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 8439, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 16.30658531188965, 'TIME_S_1KI': 1.9322888152493953, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 993.3751641559601, 'W': 60.56268692312065, 'J_1KI': 117.71242613531936, 'W_1KI': 7.176524105121538, 'W_D': 41.768686923120654, 'J_D': 685.1079160590172, 'W_D_1KI': 4.949482986505588, 'J_D_1KI': 0.5865011241267435} +[20.72, 20.6, 20.92, 21.12, 21.0, 21.04, 21.08, 20.8, 20.88, 21.08] +[20.96, 21.0, 21.0, 24.32, 26.04, 35.64, 50.52, 67.6, 77.24, 93.48, 91.28, 89.6, 88.96, 88.4, 88.92, 90.04] +16.63954186439514 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 6367, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 14.819957971572876, 'TIME_S_1KI': 2.3276202248426068, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 977.0843494701387, 'W': 58.720628093786544} +[20.72, 20.6, 20.92, 21.12, 21.0, 21.04, 21.08, 20.8, 20.88, 21.08, 20.88, 20.92, 20.68, 20.84, 21.0, 21.0, 20.92, 21.32, 21.28, 21.04] +377.26 +18.863 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 6367, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 14.819957971572876, 'TIME_S_1KI': 2.3276202248426068, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 977.0843494701387, 'W': 58.720628093786544, 'J_1KI': 153.46071139785437, 'W_1KI': 9.222652441304625, 'W_D': 39.857628093786545, 'J_D': 663.2126712820532, 'W_D_1KI': 6.260032683176778, 'J_D_1KI': 0.9831997303560197} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.0001.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.0001.json new file mode 100644 index 0000000..fef2205 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 97519, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.570974826812744, "TIME_S_1KI": 0.10839913070081465, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 315.3015551567077, "W": 22.190891521159134, "J_1KI": 3.233232038440793, "W_1KI": 0.22755454343419368, "W_D": 3.682891521159135, "J_D": 52.32874141120907, "W_D_1KI": 0.037765886864704674, "J_D_1KI": 0.00038726696197361203} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.0001.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.0001.output new file mode 100644 index 0000000..c6cbfe2 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.0001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 5000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.11602067947387695} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 2500, 2500, 2500]), + col_indices=tensor([ 613, 2610, 3896, ..., 2268, 1349, 1721]), + values=tensor([0.3594, 0.2050, 0.8766, ..., 0.2511, 0.4340, 0.6606]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.7862, 0.0116, 0.6512, ..., 0.0192, 0.3599, 0.4463]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 0.11602067947387695 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 90501 -ss 5000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.744299173355103} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 2497, 2498, 2500]), + col_indices=tensor([3869, 881, 2923, ..., 3064, 1070, 3092]), + values=tensor([0.3867, 0.1123, 0.7736, ..., 0.1665, 0.3688, 0.6121]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.7562, 0.4892, 0.9144, ..., 0.6968, 0.8474, 0.7157]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 9.744299173355103 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 97519 -ss 5000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.570974826812744} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 2497, 2499, 2500]), + col_indices=tensor([4211, 3231, 4340, ..., 2446, 2540, 154]), + values=tensor([0.2167, 0.9555, 0.6550, ..., 0.2361, 0.5850, 0.4084]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.3692, 0.1411, 0.4138, ..., 0.1913, 0.1315, 0.0581]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.570974826812744 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 2497, 2499, 2500]), + col_indices=tensor([4211, 3231, 4340, ..., 2446, 2540, 154]), + values=tensor([0.2167, 0.9555, 0.6550, ..., 0.2361, 0.5850, 0.4084]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.3692, 0.1411, 0.4138, ..., 0.1913, 0.1315, 0.0581]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.570974826812744 seconds + +[20.44, 20.44, 20.48, 20.4, 20.48, 20.68, 20.76, 20.76, 20.72, 20.88] +[20.64, 20.56, 21.92, 23.68, 23.68, 25.04, 25.48, 26.12, 24.52, 24.44, 23.92, 24.04, 24.32, 24.36] +14.20860242843628 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 97519, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.570974826812744, 'TIME_S_1KI': 0.10839913070081465, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 315.3015551567077, 'W': 22.190891521159134} +[20.44, 20.44, 20.48, 20.4, 20.48, 20.68, 20.76, 20.76, 20.72, 20.88, 20.4, 20.52, 20.56, 20.56, 20.56, 20.6, 20.52, 20.4, 20.6, 20.52] +370.15999999999997 +18.508 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 97519, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.570974826812744, 'TIME_S_1KI': 0.10839913070081465, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 315.3015551567077, 'W': 22.190891521159134, 'J_1KI': 3.233232038440793, 'W_1KI': 0.22755454343419368, 'W_D': 3.682891521159135, 'J_D': 52.32874141120907, 'W_D_1KI': 0.037765886864704674, 'J_D_1KI': 0.00038726696197361203} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.001.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.001.json new file mode 100644 index 0000000..6041c9a --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 17764, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.691047191619873, "TIME_S_1KI": 0.6018378288459735, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 329.0331690597534, "W": 23.15708952749249, "J_1KI": 18.522470674383776, "W_1KI": 1.3035965732657337, "W_D": 4.568089527492493, "J_D": 64.90681706762312, "W_D_1KI": 0.2571543305276116, "J_D_1KI": 0.01447615010851225} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.001.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.001.output new file mode 100644 index 0000000..626c252 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 5000 -sd 0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.6273210048675537} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 7, ..., 24988, 24996, 25000]), + col_indices=tensor([1892, 2918, 4655, ..., 2029, 2603, 3010]), + values=tensor([0.8283, 0.5273, 0.2909, ..., 0.5828, 0.6477, 0.7502]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.8412, 0.7891, 0.2404, ..., 0.8503, 0.9914, 0.6212]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 0.6273210048675537 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 16737 -ss 5000 -sd 0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 9.892534494400024} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 10, ..., 24991, 24996, 25000]), + col_indices=tensor([4752, 479, 2068, ..., 1338, 4478, 4539]), + values=tensor([0.3996, 0.8763, 0.4834, ..., 0.3300, 0.4860, 0.9993]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.9991, 0.1904, 0.1090, ..., 0.8295, 0.4248, 0.2043]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 9.892534494400024 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 17764 -ss 5000 -sd 0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.691047191619873} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 8, 15, ..., 24992, 24996, 25000]), + col_indices=tensor([1098, 1490, 1639, ..., 3549, 3645, 4602]), + values=tensor([0.2763, 0.7775, 0.9451, ..., 0.5590, 0.3508, 0.3085]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.1047, 0.7118, 0.3308, ..., 0.3344, 0.9893, 0.0200]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.691047191619873 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 8, 15, ..., 24992, 24996, 25000]), + col_indices=tensor([1098, 1490, 1639, ..., 3549, 3645, 4602]), + values=tensor([0.2763, 0.7775, 0.9451, ..., 0.5590, 0.3508, 0.3085]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.1047, 0.7118, 0.3308, ..., 0.3344, 0.9893, 0.0200]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.691047191619873 seconds + +[20.44, 20.2, 20.44, 20.64, 20.6, 20.72, 21.04, 21.04, 21.04, 20.88] +[20.72, 20.68, 20.52, 21.28, 23.28, 29.52, 30.36, 30.6, 30.4, 23.8, 23.72, 23.72, 23.84, 23.92] +14.208744525909424 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 17764, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.691047191619873, 'TIME_S_1KI': 0.6018378288459735, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 329.0331690597534, 'W': 23.15708952749249} +[20.44, 20.2, 20.44, 20.64, 20.6, 20.72, 21.04, 21.04, 21.04, 20.88, 20.68, 20.8, 20.72, 20.4, 20.32, 20.52, 20.64, 20.6, 20.72, 20.68] +371.78 +18.589 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 17764, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.691047191619873, 'TIME_S_1KI': 0.6018378288459735, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 329.0331690597534, 'W': 23.15708952749249, 'J_1KI': 18.522470674383776, 'W_1KI': 1.3035965732657337, 'W_D': 4.568089527492493, 'J_D': 64.90681706762312, 'W_D_1KI': 0.2571543305276116, 'J_D_1KI': 0.01447615010851225} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.01.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.01.json new file mode 100644 index 0000000..5558ea5 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 1959, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 11.120546340942383, "TIME_S_1KI": 5.676644380266659, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 339.91848049163815, "W": 23.94849862540507, "J_1KI": 173.51632490640029, "W_1KI": 12.224858920574308, "W_D": 5.3214986254050665, "J_D": 75.53190515112868, "W_D_1KI": 2.716436255949498, "J_D_1KI": 1.3866443368808055} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.01.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.01.output new file mode 100644 index 0000000..c4f4108 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.01.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 5000 -sd 0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 5.658592224121094} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 40, 84, ..., 249908, 249949, + 250000]), + col_indices=tensor([ 330, 398, 412, ..., 4758, 4825, 4990]), + values=tensor([0.1241, 0.3411, 0.2552, ..., 0.9324, 0.8443, 0.4144]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.9270, 0.0262, 0.1807, ..., 0.7250, 0.9803, 0.9114]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 5.658592224121094 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1855 -ss 5000 -sd 0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 9.939346075057983} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 42, 99, ..., 249900, 249944, + 250000]), + col_indices=tensor([ 71, 83, 134, ..., 4502, 4510, 4544]), + values=tensor([0.1222, 0.9313, 0.0593, ..., 0.6337, 0.4012, 0.6808]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.6318, 0.1040, 0.0347, ..., 0.9714, 0.1743, 0.3337]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 9.939346075057983 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1959 -ss 5000 -sd 0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 11.120546340942383} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 52, 94, ..., 249896, 249940, + 250000]), + col_indices=tensor([ 62, 114, 171, ..., 4675, 4821, 4860]), + values=tensor([0.5686, 0.1100, 0.2304, ..., 0.6863, 0.4817, 0.3965]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.8036, 0.6564, 0.9943, ..., 0.3026, 0.0525, 0.3398]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 11.120546340942383 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 52, 94, ..., 249896, 249940, + 250000]), + col_indices=tensor([ 62, 114, 171, ..., 4675, 4821, 4860]), + values=tensor([0.5686, 0.1100, 0.2304, ..., 0.6863, 0.4817, 0.3965]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.8036, 0.6564, 0.9943, ..., 0.3026, 0.0525, 0.3398]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 11.120546340942383 seconds + +[20.4, 20.56, 20.84, 20.88, 21.08, 21.16, 21.16, 20.92, 20.72, 20.76] +[20.2, 20.2, 20.96, 22.4, 23.64, 30.44, 31.56, 31.08, 30.2, 30.2, 24.44, 24.12, 24.04, 23.84] +14.19372820854187 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1959, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 11.120546340942383, 'TIME_S_1KI': 5.676644380266659, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 339.91848049163815, 'W': 23.94849862540507} +[20.4, 20.56, 20.84, 20.88, 21.08, 21.16, 21.16, 20.92, 20.72, 20.76, 20.32, 20.52, 20.64, 20.6, 20.44, 20.28, 20.44, 20.6, 20.64, 20.64] +372.54 +18.627000000000002 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1959, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 11.120546340942383, 'TIME_S_1KI': 5.676644380266659, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 339.91848049163815, 'W': 23.94849862540507, 'J_1KI': 173.51632490640029, 'W_1KI': 12.224858920574308, 'W_D': 5.3214986254050665, 'J_D': 75.53190515112868, 'W_D_1KI': 2.716436255949498, 'J_D_1KI': 1.3866443368808055} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.05.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.05.json new file mode 100644 index 0000000..88b3edd --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 26.64292335510254, "TIME_S_1KI": 26.64292335510254, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 738.6764411926268, "W": 24.280702381341985, "J_1KI": 738.6764411926268, "W_1KI": 24.280702381341985, "W_D": 5.882702381341982, "J_D": 178.96573136138895, "W_D_1KI": 5.882702381341982, "J_D_1KI": 5.882702381341982} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.05.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.05.output new file mode 100644 index 0000000..d050d33 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.05.output @@ -0,0 +1,45 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 5000 -sd 0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 26.64292335510254} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 258, 500, ..., 1249494, + 1249753, 1250000]), + col_indices=tensor([ 51, 83, 92, ..., 4940, 4981, 4997]), + values=tensor([0.9970, 0.1345, 0.9294, ..., 0.5035, 0.6973, 0.4629]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.6657, 0.7944, 0.8404, ..., 0.5502, 0.2324, 0.9138]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 26.64292335510254 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 258, 500, ..., 1249494, + 1249753, 1250000]), + col_indices=tensor([ 51, 83, 92, ..., 4940, 4981, 4997]), + values=tensor([0.9970, 0.1345, 0.9294, ..., 0.5035, 0.6973, 0.4629]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.6657, 0.7944, 0.8404, ..., 0.5502, 0.2324, 0.9138]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 26.64292335510254 seconds + +[20.76, 20.56, 20.76, 20.24, 20.0, 20.0, 19.96, 19.88, 20.0, 20.0] +[20.2, 20.56, 20.84, 23.48, 25.48, 31.96, 32.48, 32.96, 29.96, 29.16, 23.84, 23.8, 23.88, 23.88, 24.28, 24.2, 24.36, 24.32, 23.8, 24.12, 24.2, 24.36, 24.28, 24.24, 24.24, 24.24, 24.32, 24.24, 24.48, 24.52] +30.422367095947266 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 26.64292335510254, 'TIME_S_1KI': 26.64292335510254, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 738.6764411926268, 'W': 24.280702381341985} +[20.76, 20.56, 20.76, 20.24, 20.0, 20.0, 19.96, 19.88, 20.0, 20.0, 20.52, 20.44, 20.56, 20.64, 20.52, 20.76, 20.64, 20.92, 20.96, 20.96] +367.96000000000004 +18.398000000000003 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 26.64292335510254, 'TIME_S_1KI': 26.64292335510254, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 738.6764411926268, 'W': 24.280702381341985, 'J_1KI': 738.6764411926268, 'W_1KI': 24.280702381341985, 'W_D': 5.882702381341982, 'J_D': 178.96573136138895, 'W_D_1KI': 5.882702381341982, 'J_D_1KI': 5.882702381341982} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.1.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.1.json new file mode 100644 index 0000000..70fec2c --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 1000, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 53.09800863265991, "TIME_S_1KI": 53.09800863265991, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1383.3292028236385, "W": 24.387974690726086, "J_1KI": 1383.3292028236385, "W_1KI": 24.387974690726086, "W_D": 5.857974690726085, "J_D": 332.2747198915477, "W_D_1KI": 5.857974690726085, "J_D_1KI": 5.857974690726085} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.1.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.1.output new file mode 100644 index 0000000..20994df --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.1.output @@ -0,0 +1,45 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 5000 -sd 0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 53.09800863265991} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 484, 997, ..., 2498998, + 2499500, 2500000]), + col_indices=tensor([ 3, 13, 35, ..., 4966, 4993, 4997]), + values=tensor([0.0178, 0.5574, 0.8921, ..., 0.2131, 0.1882, 0.3495]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.5571, 0.9138, 0.4400, ..., 0.1682, 0.6225, 0.2202]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 53.09800863265991 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 484, 997, ..., 2498998, + 2499500, 2500000]), + col_indices=tensor([ 3, 13, 35, ..., 4966, 4993, 4997]), + values=tensor([0.0178, 0.5574, 0.8921, ..., 0.2131, 0.1882, 0.3495]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.5571, 0.9138, 0.4400, ..., 0.1682, 0.6225, 0.2202]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 53.09800863265991 seconds + +[20.6, 20.6, 20.72, 20.68, 20.72, 20.8, 20.84, 20.8, 20.92, 20.52] +[20.6, 20.4, 23.56, 24.32, 24.32, 28.84, 34.48, 35.48, 33.04, 32.72, 26.72, 24.2, 24.24, 24.24, 24.0, 24.0, 23.96, 23.96, 23.88, 23.96, 23.96, 23.92, 23.92, 23.68, 23.64, 23.76, 24.08, 24.12, 24.08, 24.08, 24.32, 24.08, 23.92, 24.04, 24.0, 23.96, 24.12, 24.24, 24.28, 24.24, 24.2, 23.92, 23.92, 24.0, 24.2, 24.24, 24.4, 24.2, 24.16, 24.0, 24.16, 24.32, 24.36, 24.36, 24.36, 24.12] +56.72177457809448 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 53.09800863265991, 'TIME_S_1KI': 53.09800863265991, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1383.3292028236385, 'W': 24.387974690726086} +[20.6, 20.6, 20.72, 20.68, 20.72, 20.8, 20.84, 20.8, 20.92, 20.52, 20.32, 20.36, 20.4, 20.48, 20.44, 20.64, 20.48, 20.36, 20.48, 20.32] +370.6 +18.53 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1000, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 53.09800863265991, 'TIME_S_1KI': 53.09800863265991, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1383.3292028236385, 'W': 24.387974690726086, 'J_1KI': 1383.3292028236385, 'W_1KI': 24.387974690726086, 'W_D': 5.857974690726085, 'J_D': 332.2747198915477, 'W_D_1KI': 5.857974690726085, 'J_D_1KI': 5.857974690726085} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_1e-05.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_1e-05.json new file mode 100644 index 0000000..7d0783d --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 275920, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.428882360458374, "TIME_S_1KI": 0.037796761236801875, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 315.0905113220215, "W": 22.127187461407587, "J_1KI": 1.141963291251165, "W_1KI": 0.08019421376271234, "W_D": 3.664187461407586, "J_D": 52.177923778533895, "W_D_1KI": 0.013279890770540686, "J_D_1KI": 4.812949684887172e-05} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_1e-05.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_1e-05.output new file mode 100644 index 0000000..a254447 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_1e-05.output @@ -0,0 +1,383 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1000 -ss 5000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.04542350769042969} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([1381, 3398, 2478, 1052, 529, 491, 2775, 3229, 1279, + 3454, 296, 3084, 4650, 2467, 784, 568, 918, 741, + 4819, 1730, 837, 408, 1523, 948, 4825, 1342, 952, + 2524, 3378, 2774, 370, 2319, 3980, 4108, 276, 4067, + 3823, 3153, 3158, 540, 2360, 1999, 1044, 1298, 4540, + 533, 3507, 1489, 2361, 1008, 555, 3416, 305, 3290, + 1136, 3809, 4448, 2408, 3611, 2892, 2540, 3779, 2041, + 4793, 4839, 534, 3664, 2180, 4711, 4601, 1136, 3681, + 165, 2858, 2937, 1364, 4737, 4916, 4412, 4772, 4253, + 200, 1254, 2702, 3949, 1138, 3253, 4523, 3563, 4932, + 724, 1152, 3157, 1713, 4323, 2340, 951, 3022, 1343, + 2260, 3881, 2605, 161, 4434, 3331, 1742, 2563, 4238, + 127, 3937, 396, 2283, 1557, 1554, 3292, 4855, 4197, + 4720, 716, 85, 4379, 3823, 2263, 2186, 2869, 1787, + 1168, 2429, 3045, 2919, 2350, 3479, 2094, 1065, 340, + 1288, 1877, 3764, 3457, 509, 1055, 3089, 605, 1110, + 3765, 3334, 3358, 602, 1278, 2312, 2279, 3749, 3299, + 4530, 804, 4261, 418, 4624, 585, 3050, 3236, 596, + 2133, 933, 4209, 3895, 174, 765, 3980, 381, 2181, + 2969, 46, 3997, 2920, 1083, 1216, 4056, 126, 248, + 1696, 352, 2821, 625, 3058, 4954, 4557, 865, 2010, + 2268, 2460, 1542, 329, 4649, 4740, 2546, 1491, 1783, + 2436, 2269, 2383, 734, 4372, 4876, 3373, 210, 4004, + 4560, 1501, 3320, 2378, 1630, 757, 3013, 4961, 4950, + 3415, 2145, 1401, 3711, 4355, 611, 1420, 3710, 4405, + 2508, 3816, 3, 3115, 4093, 2712, 1642, 4784, 2945, + 3902, 1255, 2147, 1010, 3088, 1205, 4589, 714, 2492, + 1954, 4006, 3877, 588, 962, 61, 4470]), + values=tensor([6.2379e-01, 5.1445e-01, 5.2888e-01, 6.4643e-01, + 3.6807e-01, 4.6260e-01, 2.5238e-01, 5.8157e-01, + 8.8267e-01, 2.6474e-01, 2.8446e-01, 9.5475e-01, + 4.8999e-01, 6.6621e-01, 3.2615e-02, 2.5044e-01, + 4.5496e-01, 3.7415e-01, 2.9199e-01, 2.8386e-01, + 7.1383e-01, 3.1109e-01, 1.1332e-01, 2.2089e-01, + 2.1912e-01, 5.6452e-01, 4.7190e-01, 5.8604e-01, + 7.8763e-01, 9.5122e-01, 1.1018e-01, 1.3969e-01, + 7.2800e-01, 6.6977e-01, 2.9413e-01, 6.1351e-01, + 4.9889e-01, 3.4691e-01, 3.9756e-01, 7.5031e-01, + 1.4612e-01, 6.6037e-01, 2.5630e-01, 9.1057e-02, + 8.2140e-01, 9.9620e-01, 5.5939e-01, 1.0762e-01, + 7.8811e-01, 5.4825e-01, 1.0084e-01, 8.9423e-01, + 7.7729e-01, 2.7164e-01, 7.0220e-01, 1.6836e-01, + 5.3765e-01, 2.0228e-01, 1.5568e-02, 8.3985e-01, + 2.3206e-01, 6.7022e-01, 4.7791e-01, 6.4798e-01, + 6.7036e-01, 1.6005e-01, 7.3101e-01, 9.4913e-01, + 2.2292e-01, 4.6540e-01, 7.6590e-01, 2.9344e-01, + 5.6223e-01, 8.4355e-01, 8.4945e-01, 1.4869e-01, + 2.8265e-01, 3.2754e-01, 5.8549e-01, 9.8812e-01, + 5.4427e-01, 9.3814e-01, 8.4516e-01, 1.7512e-01, + 1.2307e-02, 2.2939e-01, 7.7071e-01, 1.9977e-01, + 6.3831e-01, 1.4402e-01, 3.9596e-02, 8.3780e-01, + 6.9744e-01, 5.2304e-02, 1.7853e-01, 2.9282e-01, + 5.7428e-01, 3.6008e-01, 1.5117e-01, 8.0683e-01, + 6.9041e-02, 5.8242e-01, 9.0514e-01, 7.4588e-01, + 7.5412e-01, 9.1699e-01, 3.8286e-01, 9.6918e-01, + 7.4727e-01, 1.2312e-01, 4.8375e-01, 3.6856e-01, + 5.6299e-01, 5.3561e-01, 1.4061e-01, 8.9669e-01, + 2.2440e-02, 2.9850e-01, 1.9549e-01, 5.4525e-01, + 3.6535e-01, 4.3468e-01, 5.3884e-01, 4.1129e-01, + 3.4185e-02, 4.7048e-01, 7.9007e-01, 7.4755e-01, + 6.5822e-01, 7.8901e-01, 5.2911e-01, 9.3945e-02, + 3.8728e-01, 2.4384e-01, 9.0271e-01, 7.7139e-01, + 4.5138e-01, 3.9539e-01, 2.1438e-01, 3.5791e-01, + 1.8080e-01, 7.7421e-01, 4.8385e-01, 4.9788e-02, + 2.4055e-01, 7.0484e-01, 7.2661e-02, 1.7125e-01, + 5.6265e-01, 4.2036e-01, 3.2309e-01, 4.4267e-01, + 4.4235e-01, 6.4529e-02, 6.4435e-01, 3.7245e-02, + 9.3981e-02, 9.3849e-01, 7.6635e-01, 9.8748e-01, + 9.3709e-01, 4.7264e-01, 7.2366e-01, 2.8555e-01, + 6.0730e-01, 1.6315e-01, 1.9633e-01, 8.5030e-01, + 7.9308e-01, 8.9903e-01, 3.8550e-01, 1.0205e-01, + 7.1600e-01, 9.5343e-01, 5.6221e-01, 2.4332e-01, + 6.6738e-01, 6.3110e-01, 3.8857e-01, 3.1838e-01, + 9.9205e-01, 1.5720e-01, 5.2410e-01, 9.2976e-01, + 8.2543e-01, 3.3559e-01, 1.9409e-01, 5.6249e-01, + 6.4364e-01, 8.7136e-01, 7.9123e-01, 7.6006e-01, + 9.7435e-01, 1.3732e-04, 3.9675e-01, 1.5987e-01, + 8.7277e-01, 3.2665e-01, 2.4849e-01, 2.3783e-01, + 3.9434e-01, 2.1570e-01, 3.9410e-01, 1.8711e-01, + 8.7186e-01, 1.4542e-01, 2.5107e-01, 4.2214e-01, + 7.4868e-01, 9.8246e-01, 2.5484e-01, 6.8204e-01, + 1.5039e-01, 8.1100e-01, 5.5721e-01, 9.9490e-01, + 3.7944e-01, 6.0125e-01, 9.3454e-01, 4.8494e-01, + 6.9766e-01, 2.8071e-01, 2.0905e-01, 4.3587e-01, + 6.3412e-01, 5.5937e-01, 8.4498e-01, 7.4208e-01, + 3.0776e-01, 2.2212e-01, 1.0559e-01, 9.4254e-02, + 5.3053e-01, 9.3270e-01, 9.7013e-01, 2.7741e-01, + 1.3997e-01, 8.6033e-01, 4.2915e-01, 3.5325e-01, + 4.6135e-02, 7.5784e-01, 9.8773e-01, 1.4273e-01, + 6.0358e-01, 8.2895e-01, 4.6077e-01, 8.1063e-01, + 7.6600e-01, 4.0656e-01]), size=(5000, 5000), nnz=250, + layout=torch.sparse_csr) +tensor([0.9879, 0.6356, 0.7019, ..., 0.7112, 0.3671, 0.2365]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 0.04542350769042969 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 231157 -ss 5000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 8.796557664871216} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([3111, 1505, 3032, 732, 1363, 1458, 3691, 3479, 1828, + 3597, 4499, 2546, 4494, 4076, 1227, 315, 2912, 2533, + 3803, 3134, 640, 3070, 2300, 518, 2692, 231, 1494, + 3318, 4971, 422, 3082, 2927, 1622, 1132, 2842, 2550, + 858, 3774, 4214, 4966, 4389, 2049, 2398, 2999, 1799, + 2832, 2153, 27, 34, 4389, 312, 3190, 379, 1601, + 1697, 913, 4636, 815, 4061, 1986, 3680, 3169, 4367, + 3393, 3057, 2291, 4827, 23, 1618, 1053, 4545, 3302, + 3422, 4006, 1426, 4955, 4591, 3417, 1313, 3429, 107, + 4218, 3106, 1189, 3912, 4842, 4429, 3575, 3485, 3490, + 882, 360, 4104, 4077, 3992, 276, 3250, 2773, 1205, + 2877, 11, 3594, 1465, 1515, 1908, 3956, 3184, 720, + 1889, 1976, 1938, 4120, 4297, 973, 1625, 917, 1536, + 2392, 3682, 3004, 1179, 4481, 3988, 2811, 4539, 2610, + 1976, 4913, 2042, 484, 1934, 490, 618, 789, 166, + 350, 2451, 3722, 1235, 3537, 525, 2266, 4975, 4220, + 4123, 3129, 2765, 1943, 1088, 691, 3776, 4218, 1634, + 1744, 4688, 1575, 542, 1973, 3945, 1064, 4591, 2998, + 3960, 1404, 946, 565, 2717, 36, 3767, 131, 100, + 2765, 4203, 3784, 4608, 1970, 2801, 2408, 747, 3408, + 4944, 1175, 4949, 618, 3984, 4254, 2862, 67, 4254, + 4339, 3511, 3739, 1527, 1863, 4544, 3760, 3855, 3369, + 2589, 951, 3624, 662, 1187, 539, 768, 3623, 925, + 2247, 4155, 2098, 4222, 3094, 317, 3926, 4819, 4144, + 1170, 4442, 3477, 1185, 1554, 3509, 4061, 4484, 3086, + 3305, 1690, 502, 3177, 194, 4284, 4380, 4057, 3450, + 3635, 259, 715, 4710, 2651, 3054, 874, 3683, 2173, + 4229, 1021, 1554, 2109, 4700, 2191, 703]), + values=tensor([2.0669e-01, 3.0419e-01, 7.9177e-01, 9.1054e-01, + 3.8881e-01, 2.6543e-02, 3.2408e-01, 6.6356e-01, + 8.8613e-01, 5.9837e-02, 3.7951e-02, 3.4136e-01, + 1.1472e-01, 7.0817e-01, 3.4534e-01, 8.1697e-01, + 7.8754e-01, 7.8023e-01, 9.8801e-01, 9.9044e-01, + 1.5503e-01, 7.4190e-01, 2.0235e-02, 6.9844e-01, + 5.3330e-01, 1.2781e-01, 4.3680e-01, 3.2064e-01, + 5.9791e-01, 2.7496e-01, 7.0680e-01, 8.9543e-01, + 4.9085e-01, 6.2210e-02, 9.5831e-01, 7.1969e-01, + 4.5026e-01, 7.6189e-01, 6.9882e-01, 6.1830e-01, + 5.8254e-01, 7.1547e-01, 4.9443e-02, 2.3599e-01, + 3.0458e-01, 4.0447e-02, 3.1721e-01, 5.4475e-01, + 3.5915e-01, 3.9749e-01, 9.9941e-01, 1.6159e-01, + 4.4237e-01, 1.9078e-02, 2.7571e-01, 2.7359e-01, + 9.9205e-01, 8.2766e-01, 8.6948e-01, 5.9782e-01, + 9.2542e-02, 7.7287e-01, 2.5357e-01, 1.8439e-01, + 7.9355e-01, 4.9629e-01, 5.9496e-01, 8.0447e-01, + 7.1515e-01, 5.4041e-02, 2.8823e-01, 9.8319e-01, + 4.7970e-01, 7.2357e-01, 7.7834e-01, 3.4206e-01, + 2.4327e-01, 1.9641e-01, 4.3483e-01, 8.9675e-01, + 4.0813e-01, 7.5780e-01, 6.0567e-01, 9.2070e-01, + 9.5528e-01, 1.0617e-01, 7.5111e-01, 7.6998e-01, + 6.0210e-01, 1.4799e-01, 4.0369e-01, 5.5275e-01, + 5.6840e-01, 3.7878e-02, 3.9622e-01, 1.8217e-01, + 3.4162e-01, 6.7866e-01, 7.1868e-01, 6.7883e-01, + 1.2529e-01, 4.3035e-01, 9.3351e-01, 8.2335e-02, + 5.3977e-01, 1.7512e-01, 6.0881e-01, 8.6248e-01, + 7.3078e-02, 1.8153e-01, 3.9403e-01, 4.1817e-01, + 3.3126e-01, 3.6032e-01, 8.9726e-01, 2.7930e-01, + 4.8792e-01, 1.3891e-01, 2.4751e-01, 2.7167e-01, + 5.0927e-01, 4.1055e-01, 5.6724e-01, 2.4879e-01, + 2.5197e-02, 1.5971e-01, 3.7132e-01, 4.2815e-01, + 4.7405e-01, 7.3214e-03, 9.0357e-01, 8.8350e-01, + 4.2344e-01, 8.0222e-01, 7.7010e-01, 2.6446e-02, + 5.6419e-01, 5.7975e-01, 5.3498e-01, 9.6349e-01, + 3.2383e-01, 4.7991e-02, 4.7514e-01, 2.0036e-01, + 8.6415e-04, 9.9960e-01, 5.2178e-01, 6.4406e-01, + 9.3008e-01, 5.5881e-01, 5.6610e-01, 7.9121e-01, + 9.2545e-02, 1.7558e-01, 4.5158e-01, 3.8172e-01, + 6.0582e-02, 5.0875e-01, 2.3341e-01, 8.0988e-01, + 8.7313e-02, 4.8022e-01, 2.4678e-01, 8.3788e-01, + 7.9942e-01, 9.3012e-01, 5.4482e-01, 5.7269e-01, + 2.2313e-01, 5.1440e-01, 8.5668e-01, 2.1900e-01, + 1.4248e-01, 6.0751e-01, 5.3076e-01, 8.7603e-01, + 4.7428e-01, 2.8933e-01, 1.1634e-01, 8.0641e-01, + 3.4647e-01, 2.6982e-01, 7.9889e-01, 7.3652e-01, + 8.6185e-02, 6.1090e-01, 1.8993e-01, 2.4439e-01, + 7.7528e-01, 5.1831e-01, 9.1209e-01, 4.2962e-02, + 9.5979e-01, 7.4704e-01, 3.4324e-02, 9.3159e-01, + 6.9336e-01, 2.2471e-02, 9.5922e-01, 8.1222e-02, + 8.0286e-01, 4.4178e-01, 5.1294e-01, 9.6079e-02, + 2.4623e-01, 2.3545e-01, 5.7434e-01, 4.0688e-01, + 6.4868e-01, 4.6111e-01, 2.1279e-01, 9.6345e-01, + 6.5069e-01, 5.7718e-01, 3.1769e-01, 1.3943e-01, + 2.0508e-01, 8.1624e-01, 7.2834e-01, 6.5481e-01, + 8.5438e-01, 5.3953e-01, 9.2988e-01, 1.0805e-01, + 2.8558e-01, 3.1275e-01, 1.8994e-01, 9.4125e-01, + 5.2808e-01, 8.8916e-01, 2.7744e-01, 1.4636e-01, + 6.6310e-01, 3.0763e-01, 3.7309e-01, 1.1286e-01, + 4.0607e-01, 5.7583e-01, 9.4268e-01, 7.0912e-01, + 7.5993e-01, 4.2294e-01, 2.2556e-01, 6.7211e-01, + 7.0234e-01, 5.8525e-02, 2.9419e-01, 7.4268e-01, + 2.3487e-01, 5.2578e-01]), size=(5000, 5000), nnz=250, + layout=torch.sparse_csr) +tensor([0.6897, 0.5913, 0.2174, ..., 0.0385, 0.9360, 0.0281]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 8.796557664871216 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 275920 -ss 5000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.428882360458374} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([1877, 1947, 3773, 1993, 3968, 2885, 3290, 1001, 3173, + 4910, 3638, 1123, 363, 1623, 2284, 1294, 3337, 1282, + 4527, 2181, 2496, 1486, 3269, 4914, 4411, 4627, 359, + 3937, 665, 2560, 4669, 1367, 4279, 2817, 2471, 714, + 3730, 1285, 2127, 1225, 3561, 1263, 4928, 962, 4246, + 2702, 4253, 1515, 1836, 1347, 3425, 3010, 841, 367, + 1474, 2557, 196, 3492, 4052, 1834, 372, 3142, 1541, + 3239, 1385, 3426, 430, 2192, 532, 4474, 1430, 267, + 833, 2225, 483, 2285, 3698, 4524, 1621, 1341, 4764, + 3118, 3570, 1901, 1111, 4654, 3844, 3263, 2577, 3400, + 4581, 4373, 3789, 4354, 2343, 2834, 3928, 1783, 4873, + 2054, 1997, 2249, 2170, 946, 1584, 4950, 1563, 3039, + 584, 2993, 3861, 3063, 1816, 784, 2505, 3309, 3091, + 3813, 1955, 2014, 1513, 2785, 1124, 4921, 2653, 215, + 1720, 4008, 467, 2665, 934, 4083, 732, 447, 3024, + 3508, 4583, 1928, 3999, 2112, 430, 3549, 2224, 4453, + 292, 788, 4633, 434, 1519, 2797, 4314, 3456, 1463, + 1133, 1520, 2779, 195, 566, 4705, 4339, 87, 3759, + 1171, 632, 4702, 4443, 3675, 4063, 3423, 1515, 3264, + 3975, 3586, 907, 4416, 890, 2296, 2089, 4867, 4932, + 4241, 1398, 950, 4682, 2581, 4604, 1861, 1492, 4359, + 3001, 171, 3190, 4056, 2779, 2102, 2341, 2228, 666, + 4124, 3282, 4080, 1125, 1782, 4068, 4582, 1989, 1861, + 2397, 1906, 3592, 4009, 2809, 3893, 4602, 4885, 4329, + 1546, 3221, 1533, 1812, 711, 832, 3637, 2430, 702, + 1951, 2527, 1663, 4378, 3187, 1848, 1976, 4944, 1611, + 3986, 4768, 1832, 171, 533, 127, 3370, 4616, 3556, + 3675, 2756, 3820, 3848, 2775, 4085, 1946]), + values=tensor([0.3630, 0.4957, 0.7258, 0.9637, 0.5431, 0.7370, 0.5194, + 0.1412, 0.9194, 0.8806, 0.2809, 0.4495, 0.3054, 0.7229, + 0.6894, 0.5378, 0.4829, 0.7917, 0.1077, 0.9396, 0.0834, + 0.8145, 0.2291, 0.0220, 0.8667, 0.8206, 0.7176, 0.1748, + 0.5433, 0.5398, 0.6732, 0.5495, 0.1751, 0.1751, 0.5534, + 0.4533, 0.5127, 0.9043, 0.7276, 0.3139, 0.4018, 0.6593, + 0.5712, 0.8906, 0.5321, 0.0490, 0.8603, 0.3211, 0.9292, + 0.2516, 0.5976, 0.6960, 0.6822, 0.0183, 0.1419, 0.0510, + 0.5915, 0.9381, 0.7663, 0.9175, 0.1026, 0.1428, 0.3603, + 0.1690, 0.2574, 0.9703, 0.3816, 0.3120, 0.6138, 0.6402, + 0.0171, 0.1702, 0.0571, 0.1251, 0.4789, 0.2100, 0.4597, + 0.8236, 0.2093, 0.3392, 0.8809, 0.8206, 0.6653, 0.7105, + 0.9427, 0.4744, 0.2605, 0.1657, 0.1195, 0.1792, 0.5307, + 0.1174, 0.6758, 0.8184, 0.0607, 0.0558, 0.3782, 0.8926, + 0.6897, 0.9924, 0.7956, 0.0060, 0.2666, 0.9269, 0.6602, + 0.5276, 0.2277, 0.4849, 0.8321, 0.2135, 0.2296, 0.7282, + 0.5446, 0.1493, 0.5845, 0.2697, 0.2635, 0.0055, 0.3342, + 0.6531, 0.8835, 0.6970, 0.3925, 0.6332, 0.2833, 0.7464, + 0.9403, 0.9564, 0.8529, 0.8534, 0.4902, 0.3672, 0.4884, + 0.3826, 0.8277, 0.2524, 0.5006, 0.8262, 0.8556, 0.5518, + 0.9345, 0.1818, 0.7419, 0.5510, 0.7359, 0.2338, 0.5242, + 0.8847, 0.7894, 0.5148, 0.5220, 0.3152, 0.5588, 0.6758, + 0.0222, 0.8094, 0.8800, 0.5482, 0.7029, 0.4511, 0.5521, + 0.1426, 0.5819, 0.4684, 0.3203, 0.4558, 0.0605, 0.4645, + 0.6967, 0.5420, 0.5383, 0.3399, 0.6017, 0.2217, 0.2779, + 0.6034, 0.6186, 0.5877, 0.7226, 0.4771, 0.2736, 0.9442, + 0.4016, 0.5813, 0.3926, 0.6636, 0.2000, 0.5234, 0.8594, + 0.4283, 0.8253, 0.1300, 0.3810, 0.0496, 0.8722, 0.5976, + 0.0028, 0.5374, 0.0379, 0.0610, 0.9205, 0.9022, 0.6780, + 0.7337, 0.3928, 0.7007, 0.0730, 0.0899, 0.4352, 0.2480, + 0.7721, 0.6286, 0.0462, 0.5434, 0.2214, 0.2005, 0.5352, + 0.2866, 0.1634, 0.3716, 0.1574, 0.2559, 0.6104, 0.9417, + 0.5436, 0.9351, 0.6446, 0.8506, 0.6360, 0.5124, 0.9341, + 0.9751, 0.4728, 0.6908, 0.5778, 0.2603, 0.9571, 0.5985, + 0.0453, 0.2921, 0.4748, 0.9573, 0.6189, 0.2369, 0.4918, + 0.2829, 0.0867, 0.8730, 0.1781, 0.6966]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.7582, 0.3275, 0.7400, ..., 0.8955, 0.3174, 0.3280]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.428882360458374 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([1877, 1947, 3773, 1993, 3968, 2885, 3290, 1001, 3173, + 4910, 3638, 1123, 363, 1623, 2284, 1294, 3337, 1282, + 4527, 2181, 2496, 1486, 3269, 4914, 4411, 4627, 359, + 3937, 665, 2560, 4669, 1367, 4279, 2817, 2471, 714, + 3730, 1285, 2127, 1225, 3561, 1263, 4928, 962, 4246, + 2702, 4253, 1515, 1836, 1347, 3425, 3010, 841, 367, + 1474, 2557, 196, 3492, 4052, 1834, 372, 3142, 1541, + 3239, 1385, 3426, 430, 2192, 532, 4474, 1430, 267, + 833, 2225, 483, 2285, 3698, 4524, 1621, 1341, 4764, + 3118, 3570, 1901, 1111, 4654, 3844, 3263, 2577, 3400, + 4581, 4373, 3789, 4354, 2343, 2834, 3928, 1783, 4873, + 2054, 1997, 2249, 2170, 946, 1584, 4950, 1563, 3039, + 584, 2993, 3861, 3063, 1816, 784, 2505, 3309, 3091, + 3813, 1955, 2014, 1513, 2785, 1124, 4921, 2653, 215, + 1720, 4008, 467, 2665, 934, 4083, 732, 447, 3024, + 3508, 4583, 1928, 3999, 2112, 430, 3549, 2224, 4453, + 292, 788, 4633, 434, 1519, 2797, 4314, 3456, 1463, + 1133, 1520, 2779, 195, 566, 4705, 4339, 87, 3759, + 1171, 632, 4702, 4443, 3675, 4063, 3423, 1515, 3264, + 3975, 3586, 907, 4416, 890, 2296, 2089, 4867, 4932, + 4241, 1398, 950, 4682, 2581, 4604, 1861, 1492, 4359, + 3001, 171, 3190, 4056, 2779, 2102, 2341, 2228, 666, + 4124, 3282, 4080, 1125, 1782, 4068, 4582, 1989, 1861, + 2397, 1906, 3592, 4009, 2809, 3893, 4602, 4885, 4329, + 1546, 3221, 1533, 1812, 711, 832, 3637, 2430, 702, + 1951, 2527, 1663, 4378, 3187, 1848, 1976, 4944, 1611, + 3986, 4768, 1832, 171, 533, 127, 3370, 4616, 3556, + 3675, 2756, 3820, 3848, 2775, 4085, 1946]), + values=tensor([0.3630, 0.4957, 0.7258, 0.9637, 0.5431, 0.7370, 0.5194, + 0.1412, 0.9194, 0.8806, 0.2809, 0.4495, 0.3054, 0.7229, + 0.6894, 0.5378, 0.4829, 0.7917, 0.1077, 0.9396, 0.0834, + 0.8145, 0.2291, 0.0220, 0.8667, 0.8206, 0.7176, 0.1748, + 0.5433, 0.5398, 0.6732, 0.5495, 0.1751, 0.1751, 0.5534, + 0.4533, 0.5127, 0.9043, 0.7276, 0.3139, 0.4018, 0.6593, + 0.5712, 0.8906, 0.5321, 0.0490, 0.8603, 0.3211, 0.9292, + 0.2516, 0.5976, 0.6960, 0.6822, 0.0183, 0.1419, 0.0510, + 0.5915, 0.9381, 0.7663, 0.9175, 0.1026, 0.1428, 0.3603, + 0.1690, 0.2574, 0.9703, 0.3816, 0.3120, 0.6138, 0.6402, + 0.0171, 0.1702, 0.0571, 0.1251, 0.4789, 0.2100, 0.4597, + 0.8236, 0.2093, 0.3392, 0.8809, 0.8206, 0.6653, 0.7105, + 0.9427, 0.4744, 0.2605, 0.1657, 0.1195, 0.1792, 0.5307, + 0.1174, 0.6758, 0.8184, 0.0607, 0.0558, 0.3782, 0.8926, + 0.6897, 0.9924, 0.7956, 0.0060, 0.2666, 0.9269, 0.6602, + 0.5276, 0.2277, 0.4849, 0.8321, 0.2135, 0.2296, 0.7282, + 0.5446, 0.1493, 0.5845, 0.2697, 0.2635, 0.0055, 0.3342, + 0.6531, 0.8835, 0.6970, 0.3925, 0.6332, 0.2833, 0.7464, + 0.9403, 0.9564, 0.8529, 0.8534, 0.4902, 0.3672, 0.4884, + 0.3826, 0.8277, 0.2524, 0.5006, 0.8262, 0.8556, 0.5518, + 0.9345, 0.1818, 0.7419, 0.5510, 0.7359, 0.2338, 0.5242, + 0.8847, 0.7894, 0.5148, 0.5220, 0.3152, 0.5588, 0.6758, + 0.0222, 0.8094, 0.8800, 0.5482, 0.7029, 0.4511, 0.5521, + 0.1426, 0.5819, 0.4684, 0.3203, 0.4558, 0.0605, 0.4645, + 0.6967, 0.5420, 0.5383, 0.3399, 0.6017, 0.2217, 0.2779, + 0.6034, 0.6186, 0.5877, 0.7226, 0.4771, 0.2736, 0.9442, + 0.4016, 0.5813, 0.3926, 0.6636, 0.2000, 0.5234, 0.8594, + 0.4283, 0.8253, 0.1300, 0.3810, 0.0496, 0.8722, 0.5976, + 0.0028, 0.5374, 0.0379, 0.0610, 0.9205, 0.9022, 0.6780, + 0.7337, 0.3928, 0.7007, 0.0730, 0.0899, 0.4352, 0.2480, + 0.7721, 0.6286, 0.0462, 0.5434, 0.2214, 0.2005, 0.5352, + 0.2866, 0.1634, 0.3716, 0.1574, 0.2559, 0.6104, 0.9417, + 0.5436, 0.9351, 0.6446, 0.8506, 0.6360, 0.5124, 0.9341, + 0.9751, 0.4728, 0.6908, 0.5778, 0.2603, 0.9571, 0.5985, + 0.0453, 0.2921, 0.4748, 0.9573, 0.6189, 0.2369, 0.4918, + 0.2829, 0.0867, 0.8730, 0.1781, 0.6966]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.7582, 0.3275, 0.7400, ..., 0.8955, 0.3174, 0.3280]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.428882360458374 seconds + +[20.56, 20.68, 20.64, 20.44, 20.64, 20.48, 20.48, 20.64, 20.68, 20.72] +[20.84, 20.84, 21.12, 24.24, 25.12, 25.84, 26.16, 24.96, 23.68, 23.68, 23.56, 23.8, 24.0, 23.88] +14.239971160888672 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 275920, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.428882360458374, 'TIME_S_1KI': 0.037796761236801875, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 315.0905113220215, 'W': 22.127187461407587} +[20.56, 20.68, 20.64, 20.44, 20.64, 20.48, 20.48, 20.64, 20.68, 20.72, 20.56, 20.72, 20.56, 20.64, 20.56, 20.24, 20.2, 20.44, 20.2, 20.2] +369.26 +18.463 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 275920, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.428882360458374, 'TIME_S_1KI': 0.037796761236801875, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 315.0905113220215, 'W': 22.127187461407587, 'J_1KI': 1.141963291251165, 'W_1KI': 0.08019421376271234, 'W_D': 3.664187461407586, 'J_D': 52.177923778533895, 'W_D_1KI': 0.013279890770540686, 'J_D_1KI': 4.812949684887172e-05} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.0001.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.0001.json index 87aabb6..4a28d6c 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.0001.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.0001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 66395, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.71402621269226, "TIME_S_1KI": 0.16136796765859268, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1809.3928852605818, "W": 143.23, "J_1KI": 27.251944954598716, "W_1KI": 2.15724075608103, "W_D": 107.452, "J_D": 1357.417330915451, "W_D_1KI": 1.6183748776263271, "J_D_1KI": 0.02437495109008701} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 70787, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 11.243863582611084, "TIME_S_1KI": 0.15884079820604183, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2065.8814449405672, "W": 142.52, "J_1KI": 29.184475185282146, "W_1KI": 2.01336403576928, "W_D": 106.74100000000001, "J_D": 1547.2512722032072, "W_D_1KI": 1.5079181205588599, "J_D_1KI": 0.02130218995802704} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.0001.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.0001.output index bc4163e..320d8b0 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.0001.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.0001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '100000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.2295377254486084} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.22568225860595703} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 19, ..., 999980, - 999992, 1000000]), - col_indices=tensor([ 2595, 16687, 29551, ..., 82666, 84305, 92330]), - values=tensor([0.2399, 0.6496, 0.1067, ..., 0.4780, 0.9034, 0.0304]), +tensor(crow_indices=tensor([ 0, 9, 21, ..., 999976, + 999987, 1000000]), + col_indices=tensor([66167, 77335, 80388, ..., 91843, 96961, 99110]), + values=tensor([0.4269, 0.3181, 0.3880, ..., 0.8858, 0.0510, 0.2541]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.9784, 0.5709, 0.3671, ..., 0.6067, 0.7821, 0.8363]) +tensor([0.0143, 0.7097, 0.7299, ..., 0.1191, 0.1743, 0.7741]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 0.2295377254486084 seconds +Time: 0.22568225860595703 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '45744', '-ss', '100000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 7.2341063022613525} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '46525', '-ss', '100000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 6.901124477386475} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 10, 18, ..., 999977, - 999994, 1000000]), - col_indices=tensor([ 464, 33291, 41816, ..., 39255, 78479, 83666]), - values=tensor([0.4695, 0.4859, 0.9230, ..., 0.6746, 0.1683, 0.8174]), +tensor(crow_indices=tensor([ 0, 9, 22, ..., 999980, + 999991, 1000000]), + col_indices=tensor([ 6899, 15825, 20330, ..., 53773, 69034, 81991]), + values=tensor([0.2590, 0.4256, 0.8626, ..., 0.0809, 0.7182, 0.1540]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.0937, 0.3379, 0.3499, ..., 0.6520, 0.3862, 0.7030]) +tensor([0.5717, 0.8218, 0.0250, ..., 0.8733, 0.0737, 0.0088]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,19 +36,19 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 7.2341063022613525 seconds +Time: 6.901124477386475 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '66395', '-ss', '100000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.71402621269226} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '70787', '-ss', '100000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 11.243863582611084} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 21, ..., 999982, +tensor(crow_indices=tensor([ 0, 11, 22, ..., 999983, 999990, 1000000]), - col_indices=tensor([ 7090, 12502, 14648, ..., 47720, 74306, 81506]), - values=tensor([0.0325, 0.8127, 0.1017, ..., 0.2993, 0.6676, 0.4101]), + col_indices=tensor([ 361, 1115, 8788, ..., 71181, 76543, 91304]), + values=tensor([0.4904, 0.2440, 0.4094, ..., 0.6184, 0.1804, 0.3924]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.9849, 0.0117, 0.6257, ..., 0.6699, 0.0244, 0.0988]) +tensor([0.2770, 0.4028, 0.6616, ..., 0.6682, 0.3245, 0.3679]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -56,16 +56,16 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 10.71402621269226 seconds +Time: 11.243863582611084 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 21, ..., 999982, +tensor(crow_indices=tensor([ 0, 11, 22, ..., 999983, 999990, 1000000]), - col_indices=tensor([ 7090, 12502, 14648, ..., 47720, 74306, 81506]), - values=tensor([0.0325, 0.8127, 0.1017, ..., 0.2993, 0.6676, 0.4101]), + col_indices=tensor([ 361, 1115, 8788, ..., 71181, 76543, 91304]), + values=tensor([0.4904, 0.2440, 0.4094, ..., 0.6184, 0.1804, 0.3924]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.9849, 0.0117, 0.6257, ..., 0.6699, 0.0244, 0.0988]) +tensor([0.2770, 0.4028, 0.6616, ..., 0.6682, 0.3245, 0.3679]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -73,13 +73,13 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 10.71402621269226 seconds +Time: 11.243863582611084 seconds -[41.74, 39.92, 40.13, 39.36, 40.44, 39.29, 39.29, 39.23, 40.06, 40.03] -[143.23] -12.632778644561768 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 66395, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.71402621269226, 'TIME_S_1KI': 0.16136796765859268, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1809.3928852605818, 'W': 143.23} -[41.74, 39.92, 40.13, 39.36, 40.44, 39.29, 39.29, 39.23, 40.06, 40.03, 39.88, 40.13, 39.21, 40.07, 39.16, 39.33, 39.33, 40.09, 39.55, 40.29] -715.56 -35.778 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 66395, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.71402621269226, 'TIME_S_1KI': 0.16136796765859268, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1809.3928852605818, 'W': 143.23, 'J_1KI': 27.251944954598716, 'W_1KI': 2.15724075608103, 'W_D': 107.452, 'J_D': 1357.417330915451, 'W_D_1KI': 1.6183748776263271, 'J_D_1KI': 0.02437495109008701} +[40.71, 39.67, 39.62, 40.0, 39.39, 40.7, 39.22, 40.0, 39.3, 39.88] +[142.52] +14.495379209518433 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 70787, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 11.243863582611084, 'TIME_S_1KI': 0.15884079820604183, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2065.8814449405672, 'W': 142.52} +[40.71, 39.67, 39.62, 40.0, 39.39, 40.7, 39.22, 40.0, 39.3, 39.88, 39.86, 40.15, 39.35, 40.1, 39.35, 40.22, 39.27, 39.7, 39.16, 40.31] +715.5799999999999 +35.778999999999996 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 70787, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 11.243863582611084, 'TIME_S_1KI': 0.15884079820604183, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2065.8814449405672, 'W': 142.52, 'J_1KI': 29.184475185282146, 'W_1KI': 2.01336403576928, 'W_D': 106.74100000000001, 'J_D': 1547.2512722032072, 'W_D_1KI': 1.5079181205588599, 'J_D_1KI': 0.02130218995802704} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.001.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.001.json new file mode 100644 index 0000000..0f55c41 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 4257, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 11.433454513549805, "TIME_S_1KI": 2.685800919321072, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1901.6043726658822, "W": 126.17, "J_1KI": 446.7005808470477, "W_1KI": 29.638242894056848, "W_D": 90.424, "J_D": 1362.8491225643158, "W_D_1KI": 21.241249706365988, "J_D_1KI": 4.989722740513504} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.001.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.001.output new file mode 100644 index 0000000..dc2dadc --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.001.output @@ -0,0 +1,65 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '100000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.4663901329040527} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 118, 225, ..., 9999805, + 9999897, 10000000]), + col_indices=tensor([ 1682, 1744, 2076, ..., 96929, 97254, 99780]), + values=tensor([0.4019, 0.5057, 0.8739, ..., 0.0479, 0.2913, 0.6813]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.2475, 0.7795, 0.3565, ..., 0.8481, 0.6371, 0.4321]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 2.4663901329040527 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '4257', '-ss', '100000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 11.433454513549805} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 96, 213, ..., 9999811, + 9999904, 10000000]), + col_indices=tensor([ 561, 663, 1931, ..., 97741, 99513, 99851]), + values=tensor([0.7974, 0.7905, 0.8203, ..., 0.5966, 0.6231, 0.2009]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.4967, 0.7208, 0.9275, ..., 0.8267, 0.3582, 0.8531]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 11.433454513549805 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 96, 213, ..., 9999811, + 9999904, 10000000]), + col_indices=tensor([ 561, 663, 1931, ..., 97741, 99513, 99851]), + values=tensor([0.7974, 0.7905, 0.8203, ..., 0.5966, 0.6231, 0.2009]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.4967, 0.7208, 0.9275, ..., 0.8267, 0.3582, 0.8531]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 11.433454513549805 seconds + +[41.19, 39.18, 40.43, 39.7, 39.84, 39.19, 40.16, 39.17, 40.24, 39.08] +[126.17] +15.071763277053833 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 4257, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 11.433454513549805, 'TIME_S_1KI': 2.685800919321072, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1901.6043726658822, 'W': 126.17} +[41.19, 39.18, 40.43, 39.7, 39.84, 39.19, 40.16, 39.17, 40.24, 39.08, 41.18, 39.54, 39.33, 39.89, 39.75, 39.57, 39.78, 39.56, 39.24, 39.25] +714.9200000000001 +35.746 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 4257, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 11.433454513549805, 'TIME_S_1KI': 2.685800919321072, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1901.6043726658822, 'W': 126.17, 'J_1KI': 446.7005808470477, 'W_1KI': 29.638242894056848, 'W_D': 90.424, 'J_D': 1362.8491225643158, 'W_D_1KI': 21.241249706365988, 'J_D_1KI': 4.989722740513504} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_1e-05.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_1e-05.json index 9f14449..382c4a4 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_1e-05.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_1e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 102925, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 11.207890748977661, "TIME_S_1KI": 0.10889376486740501, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1545.566165971756, "W": 113.81, "J_1KI": 15.016431051462288, "W_1KI": 1.1057566188972554, "W_D": 78.15, "J_D": 1061.2951047420502, "W_D_1KI": 0.7592907456886082, "J_D_1KI": 0.007377126506568941} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 103292, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.412234544754028, "TIME_S_1KI": 0.10080388166318813, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1494.9555576324462, "W": 114.72, "J_1KI": 14.473101088491328, "W_1KI": 1.1106378035085003, "W_D": 77.68374999999999, "J_D": 1012.3235163897275, "W_D_1KI": 0.7520790574294233, "J_D_1KI": 0.0072810968654825475} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_1e-05.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_1e-05.output index 73316f3..a8441fc 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_1e-05.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_1e-05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '100000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.1461803913116455} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.13699960708618164} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 3, ..., 99996, 99998, +tensor(crow_indices=tensor([ 0, 2, 3, ..., 99999, 99999, 100000]), - col_indices=tensor([53462, 64739, 8211, ..., 77032, 12066, 66338]), - values=tensor([0.7526, 0.8412, 0.0484, ..., 0.1652, 0.9362, 0.7970]), + col_indices=tensor([ 8916, 68486, 49297, ..., 83214, 51117, 46502]), + values=tensor([0.0565, 0.4187, 0.1663, ..., 0.8089, 0.3832, 0.9501]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.2578, 0.3705, 0.8367, ..., 0.6623, 0.7950, 0.3656]) +tensor([0.6605, 0.5566, 0.3055, ..., 0.1791, 0.1309, 0.6380]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 0.1461803913116455 seconds +Time: 0.13699960708618164 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '71829', '-ss', '100000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 7.327654123306274} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '76642', '-ss', '100000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 7.790924310684204} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 99999, 99999, +tensor(crow_indices=tensor([ 0, 0, 1, ..., 99997, 99998, 100000]), - col_indices=tensor([53445, 61427, 55256, ..., 99710, 79743, 76910]), - values=tensor([0.2043, 0.7921, 0.3637, ..., 0.3183, 0.9272, 0.3273]), + col_indices=tensor([17249, 94297, 21433, ..., 88389, 79911, 81112]), + values=tensor([0.0934, 0.2541, 0.4263, ..., 0.3405, 0.2702, 0.1947]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.6989, 0.9157, 0.2952, ..., 0.1186, 0.5845, 0.8882]) +tensor([0.1521, 0.7703, 0.8999, ..., 0.0235, 0.4756, 0.0049]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,19 +36,19 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 7.327654123306274 seconds +Time: 7.790924310684204 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '102925', '-ss', '100000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 11.207890748977661} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '103292', '-ss', '100000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.412234544754028} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 2, ..., 100000, 100000, +tensor(crow_indices=tensor([ 0, 2, 3, ..., 99998, 99999, 100000]), - col_indices=tensor([13249, 39443, 49972, ..., 18781, 78628, 93775]), - values=tensor([0.7488, 0.1329, 0.0380, ..., 0.8918, 0.6119, 0.7720]), + col_indices=tensor([40816, 84426, 84611, ..., 44515, 10095, 58427]), + values=tensor([0.3036, 0.7331, 0.5691, ..., 0.0050, 0.0920, 0.5982]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.5554, 0.6245, 0.8914, ..., 0.6605, 0.7651, 0.7091]) +tensor([0.8398, 0.7355, 0.2034, ..., 0.0172, 0.0859, 0.7739]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -56,16 +56,16 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 11.207890748977661 seconds +Time: 10.412234544754028 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 2, ..., 100000, 100000, +tensor(crow_indices=tensor([ 0, 2, 3, ..., 99998, 99999, 100000]), - col_indices=tensor([13249, 39443, 49972, ..., 18781, 78628, 93775]), - values=tensor([0.7488, 0.1329, 0.0380, ..., 0.8918, 0.6119, 0.7720]), + col_indices=tensor([40816, 84426, 84611, ..., 44515, 10095, 58427]), + values=tensor([0.3036, 0.7331, 0.5691, ..., 0.0050, 0.0920, 0.5982]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.5554, 0.6245, 0.8914, ..., 0.6605, 0.7651, 0.7091]) +tensor([0.8398, 0.7355, 0.2034, ..., 0.0172, 0.0859, 0.7739]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -73,13 +73,13 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 11.207890748977661 seconds +Time: 10.412234544754028 seconds -[41.24, 39.24, 40.12, 39.1, 40.04, 39.21, 39.63, 39.07, 40.14, 39.14] -[113.81] -13.580231666564941 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 102925, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 11.207890748977661, 'TIME_S_1KI': 0.10889376486740501, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1545.566165971756, 'W': 113.81} -[41.24, 39.24, 40.12, 39.1, 40.04, 39.21, 39.63, 39.07, 40.14, 39.14, 41.44, 39.12, 40.01, 39.13, 39.64, 39.12, 40.27, 39.05, 39.93, 38.94] -713.2 -35.660000000000004 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 102925, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 11.207890748977661, 'TIME_S_1KI': 0.10889376486740501, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1545.566165971756, 'W': 113.81, 'J_1KI': 15.016431051462288, 'W_1KI': 1.1057566188972554, 'W_D': 78.15, 'J_D': 1061.2951047420502, 'W_D_1KI': 0.7592907456886082, 'J_D_1KI': 0.007377126506568941} +[39.92, 39.29, 39.24, 40.21, 39.35, 45.54, 40.38, 40.14, 39.43, 39.27] +[114.72] +13.031342029571533 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 103292, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.412234544754028, 'TIME_S_1KI': 0.10080388166318813, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1494.9555576324462, 'W': 114.72} +[39.92, 39.29, 39.24, 40.21, 39.35, 45.54, 40.38, 40.14, 39.43, 39.27, 39.96, 39.14, 45.44, 43.38, 51.52, 39.13, 40.45, 39.57, 39.41, 39.06] +740.7250000000001 +37.03625000000001 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 103292, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.412234544754028, 'TIME_S_1KI': 0.10080388166318813, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1494.9555576324462, 'W': 114.72, 'J_1KI': 14.473101088491328, 'W_1KI': 1.1106378035085003, 'W_D': 77.68374999999999, 'J_D': 1012.3235163897275, 'W_D_1KI': 0.7520790574294233, 'J_D_1KI': 0.0072810968654825475} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.0001.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.0001.json index 5c9115a..2ba417a 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.0001.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.0001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 289350, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.562561988830566, "TIME_S_1KI": 0.036504447861864756, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1275.0872325897217, "W": 96.48, "J_1KI": 4.406729678900023, "W_1KI": 0.3334370139968896, "W_D": 61.3225, "J_D": 810.4429604113102, "W_D_1KI": 0.21193191636426473, "J_D_1KI": 0.0007324413905797986} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 289765, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.56649661064148, "TIME_S_1KI": 0.03646574503698334, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1246.5325114130974, "W": 97.77, "J_1KI": 4.3018739717118955, "W_1KI": 0.33741135057719185, "W_D": 62.23799999999999, "J_D": 793.5122271180152, "W_D_1KI": 0.21478784532293407, "J_D_1KI": 0.0007412484093073148} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.0001.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.0001.output index 1a5d3e8..4b8f527 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.0001.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.0001.output @@ -1,13 +1,13 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.052317142486572266} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.053604841232299805} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 4, ..., 9999, 10000, 10000]), - col_indices=tensor([1023, 5622, 6334, ..., 8476, 7727, 1588]), - values=tensor([0.9992, 0.3273, 0.0949, ..., 0.9070, 0.7782, 0.9129]), +tensor(crow_indices=tensor([ 0, 1, 2, ..., 10000, 10000, 10000]), + col_indices=tensor([4181, 1858, 2276, ..., 2485, 7240, 8510]), + values=tensor([0.9106, 0.2407, 0.2677, ..., 0.1883, 0.5204, 0.9919]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.3589, 0.4614, 0.1782, ..., 0.3543, 0.5532, 0.1489]) +tensor([0.4673, 0.8867, 0.2183, ..., 0.9392, 0.5032, 0.8250]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -15,18 +15,18 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 0.052317142486572266 seconds +Time: 0.053604841232299805 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '200699', '-ss', '10000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 7.282996416091919} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '195877', '-ss', '10000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 7.097846031188965} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 3, ..., 9998, 10000, 10000]), - col_indices=tensor([5654, 2010, 6092, ..., 8357, 4618, 8765]), - values=tensor([0.6548, 0.7548, 0.4241, ..., 0.2252, 0.7987, 0.4358]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 9997, 9998, 10000]), + col_indices=tensor([6113, 1564, 232, ..., 3255, 2043, 9640]), + values=tensor([0.7859, 0.4083, 0.3727, ..., 0.9664, 0.2618, 0.1646]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.1881, 0.6615, 0.7402, ..., 0.4130, 0.3712, 0.1085]) +tensor([0.1848, 0.2081, 0.2382, ..., 0.7788, 0.6054, 0.6678]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -34,18 +34,18 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 7.282996416091919 seconds +Time: 7.097846031188965 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '289350', '-ss', '10000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.562561988830566} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '289765', '-ss', '10000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.56649661064148} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 3, ..., 9997, 9999, 10000]), - col_indices=tensor([ 41, 4057, 4525, ..., 395, 6429, 4913]), - values=tensor([0.6795, 0.3093, 0.3215, ..., 0.9868, 0.7022, 0.9945]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 10000, 10000, 10000]), + col_indices=tensor([4848, 1770, 22, ..., 2903, 374, 1123]), + values=tensor([0.4832, 0.4922, 0.1673, ..., 0.2881, 0.3225, 0.2417]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.0893, 0.5810, 0.8251, ..., 0.0535, 0.5355, 0.1364]) +tensor([0.7933, 0.2380, 0.0639, ..., 0.5554, 0.1913, 0.9685]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,15 +53,15 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 10.562561988830566 seconds +Time: 10.56649661064148 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 3, ..., 9997, 9999, 10000]), - col_indices=tensor([ 41, 4057, 4525, ..., 395, 6429, 4913]), - values=tensor([0.6795, 0.3093, 0.3215, ..., 0.9868, 0.7022, 0.9945]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 10000, 10000, 10000]), + col_indices=tensor([4848, 1770, 22, ..., 2903, 374, 1123]), + values=tensor([0.4832, 0.4922, 0.1673, ..., 0.2881, 0.3225, 0.2417]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.0893, 0.5810, 0.8251, ..., 0.0535, 0.5355, 0.1364]) +tensor([0.7933, 0.2380, 0.0639, ..., 0.5554, 0.1913, 0.9685]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -69,13 +69,13 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 10.562561988830566 seconds +Time: 10.56649661064148 seconds -[40.18, 38.78, 39.39, 38.64, 39.47, 38.59, 40.17, 38.45, 39.45, 38.53] -[96.48] -13.216078281402588 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 289350, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.562561988830566, 'TIME_S_1KI': 0.036504447861864756, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1275.0872325897217, 'W': 96.48} -[40.18, 38.78, 39.39, 38.64, 39.47, 38.59, 40.17, 38.45, 39.45, 38.53, 39.33, 38.72, 38.74, 39.53, 38.66, 39.45, 38.52, 39.57, 38.65, 38.7] -703.1500000000001 -35.157500000000006 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 289350, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.562561988830566, 'TIME_S_1KI': 0.036504447861864756, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1275.0872325897217, 'W': 96.48, 'J_1KI': 4.406729678900023, 'W_1KI': 0.3334370139968896, 'W_D': 61.3225, 'J_D': 810.4429604113102, 'W_D_1KI': 0.21193191636426473, 'J_D_1KI': 0.0007324413905797986} +[39.58, 39.81, 39.03, 39.86, 38.88, 39.9, 39.07, 38.97, 41.59, 39.84] +[97.77] +12.749642133712769 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 289765, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.56649661064148, 'TIME_S_1KI': 0.03646574503698334, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1246.5325114130974, 'W': 97.77} +[39.58, 39.81, 39.03, 39.86, 38.88, 39.9, 39.07, 38.97, 41.59, 39.84, 40.25, 39.66, 38.91, 39.84, 38.8, 39.81, 38.94, 39.0, 38.89, 39.69] +710.6400000000001 +35.532000000000004 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 289765, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.56649661064148, 'TIME_S_1KI': 0.03646574503698334, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1246.5325114130974, 'W': 97.77, 'J_1KI': 4.3018739717118955, 'W_1KI': 0.33741135057719185, 'W_D': 62.23799999999999, 'J_D': 793.5122271180152, 'W_D_1KI': 0.21478784532293407, 'J_D_1KI': 0.0007412484093073148} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.001.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.001.json index b5102bb..463b3c7 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.001.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 187965, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.065882205963135, "TIME_S_1KI": 0.053551896395409436, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1344.2036644935608, "W": 106.8, "J_1KI": 7.1513508604982885, "W_1KI": 0.568190886601229, "W_D": 70.53074999999998, "J_D": 887.7124776168464, "W_D_1KI": 0.3752334211156331, "J_D_1KI": 0.001996294103240673} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 132694, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.224570512771606, "TIME_S_1KI": 0.07705375158463537, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1067.35663690567, "W": 103.59999999999998, "J_1KI": 8.043744531822615, "W_1KI": 0.7807436658778842, "W_D": 68.22474999999997, "J_D": 702.8971014838812, "W_D_1KI": 0.5141509789440364, "J_D_1KI": 0.003874711584126158} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.001.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.001.output index 81bf5e6..f18d35d 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.001.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.07673120498657227} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.07912898063659668} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 12, 16, ..., 99979, 99991, +tensor(crow_indices=tensor([ 0, 9, 15, ..., 99979, 99988, 100000]), - col_indices=tensor([ 168, 470, 1159, ..., 7824, 8386, 8755]), - values=tensor([0.2770, 0.4979, 0.7971, ..., 0.1786, 0.3153, 0.6794]), + col_indices=tensor([ 430, 646, 878, ..., 7983, 8028, 8773]), + values=tensor([0.1249, 0.1009, 0.6404, ..., 0.8347, 0.6604, 0.7086]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.8782, 0.5630, 0.5978, ..., 0.9864, 0.4940, 0.0083]) +tensor([0.6668, 0.6238, 0.5068, ..., 0.0173, 0.0134, 0.2844]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 0.07673120498657227 seconds +Time: 0.07912898063659668 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '136841', '-ss', '10000', '-sd', '0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 7.644104957580566} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '132694', '-ss', '10000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.224570512771606} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 11, 23, ..., 99978, 99990, +tensor(crow_indices=tensor([ 0, 11, 19, ..., 99972, 99988, 100000]), - col_indices=tensor([1562, 4109, 4242, ..., 5789, 5816, 7878]), - values=tensor([0.3397, 0.5295, 0.0107, ..., 0.2250, 0.1834, 0.1775]), + col_indices=tensor([ 681, 2736, 3433, ..., 9108, 9366, 9692]), + values=tensor([0.5511, 0.6516, 0.1231, ..., 0.0939, 0.8699, 0.6381]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.8704, 0.9073, 0.5102, ..., 0.5120, 0.6818, 0.6416]) +tensor([0.2594, 0.0089, 0.5427, ..., 0.9106, 0.5838, 0.6290]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,19 +36,16 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 7.644104957580566 seconds - -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '187965', '-ss', '10000', '-sd', '0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.065882205963135} +Time: 10.224570512771606 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 13, 25, ..., 99981, 99991, +tensor(crow_indices=tensor([ 0, 11, 19, ..., 99972, 99988, 100000]), - col_indices=tensor([ 564, 1289, 1589, ..., 8514, 9743, 9976]), - values=tensor([0.9535, 0.4673, 0.4047, ..., 0.1356, 0.2907, 0.4698]), + col_indices=tensor([ 681, 2736, 3433, ..., 9108, 9366, 9692]), + values=tensor([0.5511, 0.6516, 0.1231, ..., 0.0939, 0.8699, 0.6381]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.6755, 0.5642, 0.0135, ..., 0.9982, 0.6342, 0.7704]) +tensor([0.2594, 0.0089, 0.5427, ..., 0.9106, 0.5838, 0.6290]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -56,30 +53,13 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 10.065882205963135 seconds +Time: 10.224570512771606 seconds -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 13, 25, ..., 99981, 99991, - 100000]), - col_indices=tensor([ 564, 1289, 1589, ..., 8514, 9743, 9976]), - values=tensor([0.9535, 0.4673, 0.4047, ..., 0.1356, 0.2907, 0.4698]), - size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.6755, 0.5642, 0.0135, ..., 0.9982, 0.6342, 0.7704]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 100000 -Density: 0.001 -Time: 10.065882205963135 seconds - -[45.4, 39.59, 38.94, 38.84, 38.95, 40.73, 38.79, 39.87, 38.85, 39.58] -[106.8] -12.586176633834839 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 187965, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.065882205963135, 'TIME_S_1KI': 0.053551896395409436, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1344.2036644935608, 'W': 106.8} -[45.4, 39.59, 38.94, 38.84, 38.95, 40.73, 38.79, 39.87, 38.85, 39.58, 39.31, 39.68, 39.37, 39.04, 45.6, 48.05, 38.82, 39.58, 38.6, 39.88] -725.3850000000001 -36.26925000000001 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 187965, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.065882205963135, 'TIME_S_1KI': 0.053551896395409436, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1344.2036644935608, 'W': 106.8, 'J_1KI': 7.1513508604982885, 'W_1KI': 0.568190886601229, 'W_D': 70.53074999999998, 'J_D': 887.7124776168464, 'W_D_1KI': 0.3752334211156331, 'J_D_1KI': 0.001996294103240673} +[39.52, 39.72, 39.2, 38.94, 38.87, 40.37, 39.0, 39.66, 38.95, 39.86] +[103.6] +10.302670240402222 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 132694, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.224570512771606, 'TIME_S_1KI': 0.07705375158463537, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1067.35663690567, 'W': 103.59999999999998} +[39.52, 39.72, 39.2, 38.94, 38.87, 40.37, 39.0, 39.66, 38.95, 39.86, 39.41, 39.62, 39.01, 39.67, 38.86, 38.84, 38.84, 39.8, 38.92, 39.68] +707.505 +35.37525 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 132694, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.224570512771606, 'TIME_S_1KI': 0.07705375158463537, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1067.35663690567, 'W': 103.59999999999998, 'J_1KI': 8.043744531822615, 'W_1KI': 0.7807436658778842, 'W_D': 68.22474999999997, 'J_D': 702.8971014838812, 'W_D_1KI': 0.5141509789440364, 'J_D_1KI': 0.003874711584126158} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.01.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.01.json index be0819d..995b9fb 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.01.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.01.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 105478, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.5971040725708, "TIME_S_1KI": 0.10046743465529115, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1740.769259557724, "W": 131.98, "J_1KI": 16.503624069073396, "W_1KI": 1.2512561861241205, "W_D": 96.29974999999999, "J_D": 1270.159452213168, "W_D_1KI": 0.912984224198411, "J_D_1KI": 0.008655683879087687} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 107069, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 11.18355131149292, "TIME_S_1KI": 0.10445181435796468, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1763.49504935503, "W": 132.69, "J_1KI": 16.470640889099833, "W_1KI": 1.2392942868617434, "W_D": 96.9815, "J_D": 1288.9169879344702, "W_D_1KI": 0.905785054497567, "J_D_1KI": 0.008459825481675993} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.01.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.01.output index 1731635..afaa491 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.01.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.01.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '0.01'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.16547083854675293} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.1338520050048828} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 96, 207, ..., 999810, - 999906, 1000000]), - col_indices=tensor([ 26, 37, 76, ..., 9653, 9723, 9999]), - values=tensor([0.3241, 0.3803, 0.4811, ..., 0.7106, 0.6386, 0.1440]), +tensor(crow_indices=tensor([ 0, 91, 190, ..., 999794, + 999887, 1000000]), + col_indices=tensor([ 40, 344, 548, ..., 9830, 9841, 9960]), + values=tensor([0.4008, 0.1162, 0.8586, ..., 0.0804, 0.9517, 0.8982]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.9687, 0.4748, 0.5344, ..., 0.6395, 0.7779, 0.2708]) +tensor([0.6204, 0.8036, 0.5749, ..., 0.0150, 0.4782, 0.5342]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 0.16547083854675293 seconds +Time: 0.1338520050048828 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '63455', '-ss', '10000', '-sd', '0.01'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 6.316709756851196} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '78444', '-ss', '10000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 8.085982084274292} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 90, 176, ..., 999794, - 999890, 1000000]), - col_indices=tensor([ 23, 147, 291, ..., 9810, 9851, 9893]), - values=tensor([0.8158, 0.9343, 0.8649, ..., 0.9539, 0.1935, 0.2240]), +tensor(crow_indices=tensor([ 0, 93, 187, ..., 999812, + 999901, 1000000]), + col_indices=tensor([ 276, 302, 470, ..., 9539, 9540, 9930]), + values=tensor([0.4664, 0.1616, 0.7456, ..., 0.5929, 0.0487, 0.3579]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.7787, 0.2300, 0.4854, ..., 0.5355, 0.5696, 0.8377]) +tensor([0.7338, 0.4039, 0.6812, ..., 0.4093, 0.7174, 0.1386]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,19 +36,19 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 6.316709756851196 seconds +Time: 8.085982084274292 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '105478', '-ss', '10000', '-sd', '0.01'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.5971040725708} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '101862', '-ss', '10000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 9.98931097984314} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 112, 216, ..., 999816, - 999921, 1000000]), - col_indices=tensor([ 50, 64, 228, ..., 9846, 9935, 9998]), - values=tensor([0.2081, 0.8355, 0.6203, ..., 0.0415, 0.1924, 0.6602]), +tensor(crow_indices=tensor([ 0, 97, 208, ..., 999782, + 999887, 1000000]), + col_indices=tensor([ 113, 292, 413, ..., 9756, 9814, 9863]), + values=tensor([0.7037, 0.4902, 0.2249, ..., 0.1343, 0.1681, 0.3653]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.3579, 0.4434, 0.7372, ..., 0.2272, 0.7887, 0.7519]) +tensor([0.0898, 0.3365, 0.9954, ..., 0.9623, 0.9055, 0.9870]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -56,16 +56,19 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 10.5971040725708 seconds +Time: 9.98931097984314 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '107069', '-ss', '10000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 11.18355131149292} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 112, 216, ..., 999816, - 999921, 1000000]), - col_indices=tensor([ 50, 64, 228, ..., 9846, 9935, 9998]), - values=tensor([0.2081, 0.8355, 0.6203, ..., 0.0415, 0.1924, 0.6602]), +tensor(crow_indices=tensor([ 0, 101, 199, ..., 999771, + 999895, 1000000]), + col_indices=tensor([ 30, 45, 94, ..., 9508, 9668, 9839]), + values=tensor([0.9351, 0.0667, 0.7279, ..., 0.8651, 0.3266, 0.8240]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.3579, 0.4434, 0.7372, ..., 0.2272, 0.7887, 0.7519]) +tensor([0.5009, 0.1141, 0.1222, ..., 0.6365, 0.9492, 0.1421]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -73,13 +76,30 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 10.5971040725708 seconds +Time: 11.18355131149292 seconds -[41.21, 38.88, 39.8, 38.81, 39.92, 39.04, 39.53, 38.73, 39.74, 38.85] -[131.98] -13.18964433670044 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 105478, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.5971040725708, 'TIME_S_1KI': 0.10046743465529115, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1740.769259557724, 'W': 131.98} -[41.21, 38.88, 39.8, 38.81, 39.92, 39.04, 39.53, 38.73, 39.74, 38.85, 39.66, 39.98, 38.99, 40.02, 39.22, 39.28, 39.07, 41.17, 38.93, 45.27] -713.6050000000001 -35.68025000000001 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 105478, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.5971040725708, 'TIME_S_1KI': 0.10046743465529115, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1740.769259557724, 'W': 131.98, 'J_1KI': 16.503624069073396, 'W_1KI': 1.2512561861241205, 'W_D': 96.29974999999999, 'J_D': 1270.159452213168, 'W_D_1KI': 0.912984224198411, 'J_D_1KI': 0.008655683879087687} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 101, 199, ..., 999771, + 999895, 1000000]), + col_indices=tensor([ 30, 45, 94, ..., 9508, 9668, 9839]), + values=tensor([0.9351, 0.0667, 0.7279, ..., 0.8651, 0.3266, 0.8240]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.5009, 0.1141, 0.1222, ..., 0.6365, 0.9492, 0.1421]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 11.18355131149292 seconds + +[39.9, 40.21, 39.53, 40.02, 39.22, 40.03, 39.19, 40.06, 39.89, 39.6] +[132.69] +13.29033875465393 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 107069, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 11.18355131149292, 'TIME_S_1KI': 0.10445181435796468, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1763.49504935503, 'W': 132.69} +[39.9, 40.21, 39.53, 40.02, 39.22, 40.03, 39.19, 40.06, 39.89, 39.6, 40.49, 39.14, 39.91, 39.61, 39.91, 39.69, 39.58, 39.1, 39.07, 40.03] +714.1700000000001 +35.7085 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 107069, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 11.18355131149292, 'TIME_S_1KI': 0.10445181435796468, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1763.49504935503, 'W': 132.69, 'J_1KI': 16.470640889099833, 'W_1KI': 1.2392942868617434, 'W_D': 96.9815, 'J_D': 1288.9169879344702, 'W_D_1KI': 0.905785054497567, 'J_D_1KI': 0.008459825481675993} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.05.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.05.json index 2c99a83..05bc7cf 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.05.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 28261, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.545162916183472, "TIME_S_1KI": 0.37313481179659147, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2163.6921325206754, "W": 147.39, "J_1KI": 76.56106056122131, "W_1KI": 5.2153143908566575, "W_D": 111.76774999999999, "J_D": 1640.7558270204065, "W_D_1KI": 3.9548405930434165, "J_D_1KI": 0.13993986741599437} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 28163, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.459697484970093, "TIME_S_1KI": 0.371398554307783, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2122.055966448784, "W": 150.9, "J_1KI": 75.34907383619587, "W_1KI": 5.358093953058979, "W_D": 115.1085, "J_D": 1618.7321352814438, "W_D_1KI": 4.087224372403509, "J_D_1KI": 0.1451274499308848} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.05.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.05.output index d58d64d..e823d57 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.05.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '0.05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.4614067077636719} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.4528634548187256} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 523, 1040, ..., 4999055, - 4999519, 5000000]), - col_indices=tensor([ 1, 5, 26, ..., 9948, 9962, 9996]), - values=tensor([0.6869, 0.8475, 0.6936, ..., 0.3132, 0.2618, 0.7215]), +tensor(crow_indices=tensor([ 0, 492, 984, ..., 4999007, + 4999498, 5000000]), + col_indices=tensor([ 17, 26, 49, ..., 9943, 9965, 9968]), + values=tensor([0.3785, 0.7951, 0.2972, ..., 0.3720, 0.7853, 0.1204]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.5929, 0.6414, 0.0366, ..., 0.9216, 0.5044, 0.3359]) +tensor([0.5665, 0.1637, 0.5801, ..., 0.5211, 0.8646, 0.6970]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 0.4614067077636719 seconds +Time: 0.4528634548187256 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '22756', '-ss', '10000', '-sd', '0.05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 8.454672574996948} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '23185', '-ss', '10000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 8.643981695175171} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 518, 995, ..., 4998951, - 4999482, 5000000]), - col_indices=tensor([ 3, 5, 12, ..., 9960, 9985, 9990]), - values=tensor([0.0194, 0.0116, 0.2988, ..., 0.0510, 0.2477, 0.0241]), +tensor(crow_indices=tensor([ 0, 462, 943, ..., 4999021, + 4999500, 5000000]), + col_indices=tensor([ 4, 33, 72, ..., 9956, 9968, 9998]), + values=tensor([0.9717, 0.2077, 0.4481, ..., 0.1268, 0.5535, 0.1753]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.8184, 0.3974, 0.7641, ..., 0.0303, 0.5906, 0.4265]) +tensor([0.9761, 0.2557, 0.3900, ..., 0.3250, 0.2223, 0.7021]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,19 +36,19 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 8.454672574996948 seconds +Time: 8.643981695175171 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '28261', '-ss', '10000', '-sd', '0.05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.545162916183472} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '28163', '-ss', '10000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.459697484970093} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 472, 967, ..., 4998984, - 4999479, 5000000]), - col_indices=tensor([ 28, 36, 55, ..., 9923, 9953, 9987]), - values=tensor([0.3537, 0.0932, 0.3681, ..., 0.2268, 0.3044, 0.8997]), +tensor(crow_indices=tensor([ 0, 510, 1022, ..., 4999000, + 4999485, 5000000]), + col_indices=tensor([ 31, 34, 40, ..., 9926, 9941, 9984]), + values=tensor([0.9067, 0.8635, 0.5661, ..., 0.0254, 0.7052, 0.7869]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.5459, 0.4301, 0.8105, ..., 0.9349, 0.4459, 0.6946]) +tensor([0.2490, 0.1590, 0.1294, ..., 0.2235, 0.7822, 0.7952]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -56,16 +56,16 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 10.545162916183472 seconds +Time: 10.459697484970093 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 472, 967, ..., 4998984, - 4999479, 5000000]), - col_indices=tensor([ 28, 36, 55, ..., 9923, 9953, 9987]), - values=tensor([0.3537, 0.0932, 0.3681, ..., 0.2268, 0.3044, 0.8997]), +tensor(crow_indices=tensor([ 0, 510, 1022, ..., 4999000, + 4999485, 5000000]), + col_indices=tensor([ 31, 34, 40, ..., 9926, 9941, 9984]), + values=tensor([0.9067, 0.8635, 0.5661, ..., 0.0254, 0.7052, 0.7869]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.5459, 0.4301, 0.8105, ..., 0.9349, 0.4459, 0.6946]) +tensor([0.2490, 0.1590, 0.1294, ..., 0.2235, 0.7822, 0.7952]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -73,13 +73,13 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 10.545162916183472 seconds +Time: 10.459697484970093 seconds -[39.85, 40.19, 39.24, 39.5, 39.23, 40.01, 39.26, 40.01, 39.09, 39.94] -[147.39] -14.680047035217285 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 28261, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.545162916183472, 'TIME_S_1KI': 0.37313481179659147, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2163.6921325206754, 'W': 147.39} -[39.85, 40.19, 39.24, 39.5, 39.23, 40.01, 39.26, 40.01, 39.09, 39.94, 39.9, 39.27, 40.14, 39.15, 40.0, 39.07, 39.99, 39.51, 39.37, 39.14] -712.4449999999999 -35.622249999999994 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 28261, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.545162916183472, 'TIME_S_1KI': 0.37313481179659147, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2163.6921325206754, 'W': 147.39, 'J_1KI': 76.56106056122131, 'W_1KI': 5.2153143908566575, 'W_D': 111.76774999999999, 'J_D': 1640.7558270204065, 'W_D_1KI': 3.9548405930434165, 'J_D_1KI': 0.13993986741599437} +[40.87, 39.6, 40.38, 39.19, 40.47, 39.26, 39.48, 39.2, 40.2, 39.44] +[150.9] +14.062663793563843 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 28163, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.459697484970093, 'TIME_S_1KI': 0.371398554307783, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2122.055966448784, 'W': 150.9} +[40.87, 39.6, 40.38, 39.19, 40.47, 39.26, 39.48, 39.2, 40.2, 39.44, 41.45, 39.66, 39.99, 39.31, 39.56, 39.24, 40.15, 39.52, 39.84, 39.8] +715.8299999999999 +35.7915 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 28163, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.459697484970093, 'TIME_S_1KI': 0.371398554307783, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2122.055966448784, 'W': 150.9, 'J_1KI': 75.34907383619587, 'W_1KI': 5.358093953058979, 'W_D': 115.1085, 'J_D': 1618.7321352814438, 'W_D_1KI': 4.087224372403509, 'J_D_1KI': 0.1451274499308848} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.1.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.1.json new file mode 100644 index 0000000..8638631 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 5238, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 11.192334175109863, "TIME_S_1KI": 2.1367571926517495, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2038.619791316986, "W": 124.02, "J_1KI": 389.1981273991955, "W_1KI": 23.676975945017183, "W_D": 88.21424999999999, "J_D": 1450.0509266746044, "W_D_1KI": 16.841208476517753, "J_D_1KI": 3.2151982582126295} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.1.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.1.output new file mode 100644 index 0000000..6292d32 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.1.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 2.209188461303711} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 961, 2007, ..., 9997952, + 9998968, 10000000]), + col_indices=tensor([ 14, 18, 26, ..., 9968, 9972, 9997]), + values=tensor([0.9669, 0.3653, 0.3089, ..., 0.5289, 0.5202, 0.9028]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.8016, 0.0222, 0.4456, ..., 0.4115, 0.6943, 0.5313]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 2.209188461303711 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '4752', '-ss', '10000', '-sd', '0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 9.52530813217163} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 954, 1940, ..., 9998038, + 9998994, 10000000]), + col_indices=tensor([ 0, 3, 4, ..., 9964, 9979, 9998]), + values=tensor([0.5875, 0.0019, 0.5119, ..., 0.4152, 0.5002, 0.2921]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.8144, 0.0248, 0.0526, ..., 0.0067, 0.4287, 0.2758]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 9.52530813217163 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '5238', '-ss', '10000', '-sd', '0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 11.192334175109863} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1001, 2002, ..., 9997918, + 9998966, 10000000]), + col_indices=tensor([ 9, 21, 97, ..., 9973, 9981, 9990]), + values=tensor([0.6111, 0.6801, 0.6895, ..., 0.1092, 0.3002, 0.2815]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.0277, 0.0823, 0.3111, ..., 0.6513, 0.2238, 0.0558]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 11.192334175109863 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1001, 2002, ..., 9997918, + 9998966, 10000000]), + col_indices=tensor([ 9, 21, 97, ..., 9973, 9981, 9990]), + values=tensor([0.6111, 0.6801, 0.6895, ..., 0.1092, 0.3002, 0.2815]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.0277, 0.0823, 0.3111, ..., 0.6513, 0.2238, 0.0558]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 11.192334175109863 seconds + +[41.1, 40.26, 39.38, 39.47, 39.45, 40.18, 39.58, 40.31, 39.46, 40.14] +[124.02] +16.437830924987793 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 5238, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 11.192334175109863, 'TIME_S_1KI': 2.1367571926517495, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2038.619791316986, 'W': 124.02} +[41.1, 40.26, 39.38, 39.47, 39.45, 40.18, 39.58, 40.31, 39.46, 40.14, 40.09, 40.24, 39.54, 40.08, 39.32, 40.12, 39.35, 39.35, 39.19, 40.34] +716.115 +35.80575 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 5238, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 11.192334175109863, 'TIME_S_1KI': 2.1367571926517495, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2038.619791316986, 'W': 124.02, 'J_1KI': 389.1981273991955, 'W_1KI': 23.676975945017183, 'W_D': 88.21424999999999, 'J_D': 1450.0509266746044, 'W_D_1KI': 16.841208476517753, 'J_D_1KI': 3.2151982582126295} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_1e-05.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_1e-05.json index e9f145d..4449f80 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_1e-05.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_1e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 352057, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.363084554672241, "TIME_S_1KI": 0.029435814526262056, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1223.9154741740226, "W": 94.69, "J_1KI": 3.4764696460346554, "W_1KI": 0.2689621282917255, "W_D": 59.05925, "J_D": 763.3702605144381, "W_D_1KI": 0.1677547953882468, "J_D_1KI": 0.0004764989629186376} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 362169, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.681929588317871, "TIME_S_1KI": 0.029494323336116207, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1262.685609395504, "W": 96.13, "J_1KI": 3.4864541399056903, "W_1KI": 0.2654285706396737, "W_D": 60.50875, "J_D": 794.7937986841798, "W_D_1KI": 0.1670732448111241, "J_D_1KI": 0.0004613129362566208} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_1e-05.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_1e-05.output index f1c1f4f..f681b74 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_1e-05.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_1e-05.output @@ -1,266 +1,373 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.05632638931274414} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.044791460037231445} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([6812, 5345, 3814, 3851, 1180, 2370, 9747, 9157, 9309, - 1844, 451, 6602, 4443, 8006, 5413, 9948, 6902, 4781, - 5273, 3102, 9193, 6090, 9660, 8598, 9786, 3453, 7823, - 8095, 5864, 1933, 5014, 3401, 1663, 8599, 9714, 5815, - 973, 4504, 306, 2971, 7185, 220, 7724, 5778, 2532, - 0, 8277, 8525, 5899, 2513, 5457, 8721, 2772, 4422, - 997, 2101, 9163, 4690, 3655, 646, 1228, 2676, 5080, - 9204, 4653, 8512, 580, 9554, 3549, 201, 5889, 9262, - 3348, 7948, 7695, 1711, 5747, 7743, 1681, 5808, 2747, - 7029, 7665, 8165, 7858, 569, 2064, 4739, 7568, 177, - 9310, 4386, 8240, 6642, 4389, 3996, 4876, 1054, 4163, - 3621, 8213, 1627, 3052, 4037, 3228, 47, 4120, 8716, - 1140, 654, 1138, 8841, 9286, 6853, 8247, 7250, 6739, - 1808, 169, 5660, 5955, 2424, 5623, 268, 7108, 2287, - 739, 2574, 9748, 9883, 2172, 4242, 8003, 2617, 6886, - 7295, 7725, 4620, 498, 2580, 442, 5852, 4654, 5268, - 6076, 7672, 5783, 3582, 3254, 3994, 929, 1878, 4949, - 400, 6765, 9975, 779, 4319, 980, 2110, 2886, 8932, - 3, 9221, 5560, 5736, 9363, 3301, 2015, 4960, 9665, - 6658, 7513, 8632, 1117, 8631, 3102, 6495, 3285, 5928, - 5063, 2953, 415, 9325, 6645, 1813, 4912, 9756, 1834, - 6588, 7867, 7612, 8434, 3793, 6053, 5323, 8947, 265, - 2804, 6632, 4473, 4355, 2581, 2353, 7271, 4824, 4144, - 6126, 4560, 5442, 4479, 555, 2007, 6423, 5193, 6710, - 6829, 1599, 2342, 3108, 3317, 3816, 713, 4617, 7607, - 6987, 4294, 1833, 4504, 3983, 4882, 6215, 2108, 4859, - 168, 3488, 619, 9439, 3067, 7601, 4742, 6465, 3039, - 9230, 7199, 4541, 3988, 1559, 4055, 8422, 7652, 2090, - 8489, 4261, 7601, 530, 9082, 2933, 9378, 585, 2209, - 353, 9325, 2381, 8704, 6565, 7086, 807, 7854, 8680, - 5552, 8266, 6318, 725, 6560, 2538, 6556, 9098, 566, - 7395, 4316, 1599, 6631, 2981, 4986, 5873, 8559, 6556, - 9629, 7512, 6636, 8262, 8317, 4749, 6563, 215, 5397, - 9055, 3143, 2265, 4227, 1735, 9879, 4387, 9710, 7805, - 9768, 2516, 6943, 7878, 4239, 2682, 5508, 5275, 427, - 5011, 4823, 1510, 3965, 7563, 828, 1786, 3737, 1515, - 269, 2676, 8151, 6784, 6882, 1125, 302, 4605, 1945, - 6282, 2720, 7960, 9965, 9839, 8018, 2892, 8630, 8988, - 8058, 2636, 1587, 7889, 7520, 9635, 2793, 7912, 1905, - 631, 3576, 1928, 6739, 9503, 9116, 9690, 2391, 8077, - 9638, 818, 5239, 8992, 8413, 8946, 9291, 434, 4760, - 600, 2737, 3702, 2860, 2917, 4958, 1280, 1230, 5019, - 5065, 5289, 5197, 4954, 2386, 5251, 792, 7396, 8666, - 4922, 8408, 7875, 5791, 6075, 4833, 2748, 2161, 7484, - 5145, 7287, 9831, 9820, 3198, 2650, 3260, 5083, 6023, - 8121, 870, 833, 5254, 3774, 8535, 3595, 4463, 1952, - 5991, 1993, 6152, 6091, 5087, 5581, 5515, 3210, 6360, - 2597, 277, 5906, 3480, 439, 7815, 4775, 7398, 6551, - 5541, 9773, 2181, 9106, 3626, 5171, 7202, 6895, 23, - 9676, 9138, 9156, 7516, 1167, 2814, 2329, 7570, 6770, - 7617, 5385, 9773, 9980, 5735, 9957, 7645, 5947, 8994, - 7638, 2770, 6308, 5051, 7910, 6205, 4570, 9838, 3952, - 9132, 6570, 5996, 5683, 6764, 5338, 6161, 136, 3045, - 3151, 6038, 9806, 8969, 2182, 180, 1821, 8019, 320, - 1345, 1010, 2232, 3381, 4659, 4538, 8452, 3930, 6824, - 7217, 1062, 3414, 6613, 6887, 6460, 1275, 344, 8115, - 257, 6494, 9814, 229, 4222, 1492, 9380, 7766, 1297, - 7423, 9312, 6124, 9696, 2437, 9460, 8453, 7101, 9822, - 5976, 7507, 8207, 5249, 6225, 2059, 7406, 1310, 3009, - 606, 4670, 434, 9941, 370, 6664, 1036, 40, 7277, - 3524, 7462, 1087, 3313, 1832, 8546, 5657, 8955, 3723, - 6681, 222, 7634, 737, 4866, 8322, 6756, 1757, 4703, - 8794, 8179, 2766, 2134, 7836, 7352, 1539, 736, 6655, - 1396, 8908, 3976, 2995, 1799, 6890, 978, 3082, 6123, - 8796, 9842, 2267, 6370, 5889, 9113, 5728, 7437, 5990, - 7086, 2210, 9180, 5982, 6894, 8136, 1156, 2679, 7661, - 3787, 217, 3916, 5807, 7734, 7922, 9303, 1765, 2858, - 4216, 8125, 5252, 4569, 5310, 3783, 16, 3025, 4295, - 578, 1594, 1672, 1050, 9822, 2615, 6707, 4468, 6027, - 66, 1328, 8431, 257, 9880, 3598, 9767, 2821, 2303, - 5492, 1720, 3408, 358, 3996, 1997, 6771, 4735, 8558, - 4531, 7842, 6311, 7016, 1679, 779, 2052, 8915, 1176, - 522, 3533, 138, 654, 3578, 1043, 7210, 8571, 276, - 5147, 8019, 2085, 1401, 5462, 9199, 79, 9311, 9605, - 4460, 1609, 4637, 6624, 934, 4056, 799, 7810, 2936, - 3852, 4770, 9470, 816, 3382, 4532, 733, 1595, 7053, - 1819, 8446, 7918, 2571, 2414, 3053, 306, 303, 2633, - 4471, 48, 7302, 9747, 7430, 4452, 4601, 6494, 2373, - 3478, 3671, 723, 7454, 6441, 9325, 5380, 5725, 8477, - 334, 678, 1476, 3512, 8513, 1554, 8955, 7700, 1680, - 6637, 2048, 4501, 208, 7677, 7361, 1726, 3061, 4484, - 6212, 2667, 4315, 5569, 9793, 7424, 1358, 4019, 7298, - 7707, 5875, 8612, 3636, 4969, 6955, 7099, 3862, 675, - 138, 6876, 7840, 3119, 3123, 437, 66, 7091, 9180, - 9545, 1727, 6209, 9333, 9620, 8522, 5749, 403, 8954, - 2903, 6312, 6118, 7115, 4062, 4160, 9841, 9152, 1571, - 890, 7281, 7826, 6454, 2498, 2667, 3964, 2183, 2545, - 1284, 6164, 2551, 8365, 7699, 152, 4082, 7241, 6902, - 9468, 1940, 3235, 5919, 7405, 2523, 7892, 4383, 4211, - 6857, 3014, 1436, 1295, 1132, 3473, 4321, 8486, 3395, - 6627, 7213, 9218, 3796, 1200, 414, 7109, 1218, 4318, - 6987, 1038, 6709, 1934, 1437, 3938, 6293, 1025, 3363, - 1011, 133, 8158, 2737, 6168, 9466, 8847, 1359, 9492, - 7373, 9376, 2753, 5234, 5071, 7674, 2438, 8408, 7454, - 4445, 6836, 1319, 1378, 8765, 9038, 326, 9536, 6715, - 3944, 4572, 4151, 3843, 604, 6969, 7925, 5459, 960, - 6925, 1149, 5386, 2457, 9589, 6754, 7120, 9840, 180, - 9588, 5227, 5482, 1931, 6657, 5746, 1036, 1419, 9953, - 1811, 5316, 2608, 4058, 6786, 2888, 1007, 7074, 8740, - 3707, 6549, 26, 7217, 949, 9048, 6325, 6499, 2844, - 7971, 1510, 4139, 8131, 1772, 6475, 9754, 3482, 3690, - 4507, 6256, 5350, 1093, 3151, 4904, 9667, 8010, 2247, - 9532, 8943, 5825, 3800, 4476, 2175, 7472, 983, 6432, - 9871, 743, 2957, 6994, 3585, 8225, 9704, 5945, 7595, - 9840, 9738, 7080, 2140, 9449, 7134, 4148, 4201, 2192, - 6737, 7193, 5551, 1830, 6330, 7422, 940, 2419, 6922, - 4018, 9800, 6974, 6399, 2872, 9185, 6325, 4933, 8102, - 8215, 7611, 7799, 8985, 3813, 1167, 1876, 2700, 7140, - 7771, 9155, 8383, 2171, 4826, 8725, 198, 5196, 9408, - 1796, 8747, 3124, 9094, 9219, 2538, 9372, 4142, 4310, - 4925, 362, 482, 6434, 5850, 7726, 9623, 7875, 7573, - 1683]), - values=tensor([0.9501, 0.0608, 0.4582, 0.8648, 0.4288, 0.3335, 0.2990, - 0.8436, 0.9595, 0.9570, 0.4546, 0.7255, 0.7472, 0.1665, - 0.5384, 0.4888, 0.2254, 0.1459, 0.3427, 0.7837, 0.8071, - 0.3083, 0.0792, 0.9156, 0.9593, 0.7189, 0.7561, 0.5744, - 0.7648, 0.8494, 0.2755, 0.2656, 0.9565, 0.5722, 0.0439, - 0.9058, 0.6108, 0.8943, 0.2506, 0.1078, 0.4612, 0.6801, - 0.2129, 0.5436, 0.7432, 0.2689, 0.4989, 0.4374, 0.9920, - 0.6987, 0.6555, 0.1657, 0.9034, 0.6618, 0.5497, 0.4722, - 0.4870, 0.3130, 0.6841, 0.0160, 0.0667, 0.1192, 0.6321, - 0.4472, 0.3518, 0.0174, 0.6075, 0.4096, 0.2354, 0.9944, - 0.8872, 0.2447, 0.1414, 0.1605, 0.9779, 0.0801, 0.5806, - 0.3669, 0.8238, 0.4501, 0.1242, 0.6765, 0.2581, 0.2555, - 0.4602, 0.6776, 0.5934, 0.8778, 0.6683, 0.6033, 0.0971, - 0.6110, 0.9804, 0.0413, 0.6737, 0.0124, 0.0974, 0.9284, - 0.8495, 0.6575, 0.0455, 0.3926, 0.1639, 0.9218, 0.1724, - 0.4834, 0.4166, 0.4979, 0.8689, 0.5628, 0.7445, 0.3148, - 0.3835, 0.4200, 0.3005, 0.7375, 0.8154, 0.8606, 0.8039, - 0.3336, 0.7976, 0.3820, 0.0327, 0.4339, 0.8626, 0.2111, - 0.9953, 0.8857, 0.6352, 0.7180, 0.3906, 0.1555, 0.5752, - 0.5025, 0.1237, 0.5353, 0.4418, 0.1988, 0.3485, 0.0728, - 0.7377, 0.3950, 0.3193, 0.6902, 0.7110, 0.4650, 0.7603, - 0.2230, 0.8474, 0.2803, 0.4197, 0.2239, 0.1180, 0.9181, - 0.2889, 0.4630, 0.2764, 0.3214, 0.0669, 0.6944, 0.8943, - 0.2833, 0.1402, 0.9137, 0.7784, 0.7725, 0.6923, 0.9892, - 0.0023, 0.5435, 0.0759, 0.9832, 0.3443, 0.5451, 0.3963, - 0.8994, 0.8230, 0.6172, 0.6757, 0.7087, 0.2645, 0.4790, - 0.2866, 0.9954, 0.7031, 0.4779, 0.8509, 0.2411, 0.9041, - 0.3087, 0.6675, 0.1882, 0.5192, 0.8350, 0.0060, 0.4196, - 0.9462, 0.4085, 0.2819, 0.6113, 0.3422, 0.4358, 0.8406, - 0.6092, 0.5703, 0.4865, 0.5577, 0.1766, 0.5094, 0.2001, - 0.9448, 0.8781, 0.0070, 0.3887, 0.5298, 0.9320, 0.4934, - 0.9483, 0.7212, 0.3137, 0.0083, 0.0885, 0.8131, 0.5107, - 0.4082, 0.6527, 0.6567, 0.2803, 0.1468, 0.8474, 0.7485, - 0.3926, 0.0191, 0.2248, 0.4033, 0.8399, 0.8619, 0.8824, - 0.0033, 0.8254, 0.8480, 0.2757, 0.7649, 0.7908, 0.0886, - 0.9820, 0.9088, 0.6185, 0.8805, 0.2926, 0.8590, 0.6755, - 0.4517, 0.7710, 0.7408, 0.0482, 0.0098, 0.3668, 0.5847, - 0.2808, 0.0515, 0.2714, 0.4954, 0.8899, 0.4432, 0.5186, - 0.4362, 0.4363, 0.9279, 0.3383, 0.3890, 0.5320, 0.0925, - 0.9077, 0.3611, 0.3517, 0.3097, 0.5473, 0.9561, 0.5761, - 0.7688, 0.1131, 0.4757, 0.3798, 0.4153, 0.0408, 0.1497, - 0.5965, 0.7398, 0.0723, 0.1293, 0.2633, 0.9885, 0.2993, - 0.4042, 0.3044, 0.4369, 0.6707, 0.3228, 0.1104, 0.1811, - 0.7410, 0.5294, 0.4691, 0.0753, 0.1968, 0.2229, 0.0496, - 0.2775, 0.1441, 0.7398, 0.9927, 0.6779, 0.3495, 0.2171, - 0.5931, 0.1314, 0.7589, 0.5741, 0.7003, 0.4931, 0.6617, - 0.0465, 0.8797, 0.9432, 0.2718, 0.6102, 0.5730, 0.7091, - 0.9230, 0.7483, 0.9933, 0.4006, 0.9655, 0.1924, 0.1673, - 0.2167, 0.6767, 0.7325, 0.0967, 0.8262, 0.5445, 0.5750, - 0.8056, 0.7511, 0.2635, 0.4773, 0.0086, 0.4549, 0.1287, - 0.6574, 0.3479, 0.5748, 0.7003, 0.4796, 0.3264, 0.7746, - 0.0992, 0.1274, 0.7083, 0.4710, 0.5040, 0.3106, 0.8949, - 0.5875, 0.8904, 0.1259, 0.9984, 0.1215, 0.1287, 0.7359, - 0.9667, 0.3807, 0.0084, 0.9446, 0.1267, 0.7260, 0.3622, - 0.3648, 0.3971, 0.8243, 0.3290, 0.1302, 0.2602, 0.1594, - 0.2818, 0.9563, 0.5028, 0.0674, 0.7184, 0.7897, 0.3423, - 0.7642, 0.8709, 0.7777, 0.6086, 0.0773, 0.7051, 0.2099, - 0.9218, 0.4964, 0.3211, 0.8454, 0.9153, 0.7265, 0.8799, - 0.4445, 0.6706, 0.5465, 0.4843, 0.4900, 0.6685, 0.1600, - 0.5873, 0.7582, 0.9589, 0.3086, 0.2822, 0.7154, 0.8235, - 0.4883, 0.4282, 0.2481, 0.3159, 0.8125, 0.4767, 0.5842, - 0.8266, 0.4464, 0.2498, 0.0198, 0.1142, 0.9760, 0.7470, - 0.2814, 0.3180, 0.6628, 0.5393, 0.8968, 0.7089, 0.7283, - 0.5978, 0.9504, 0.7784, 0.4140, 0.0453, 0.3982, 0.7121, - 0.8387, 0.3073, 0.2971, 0.0644, 0.3933, 0.3265, 0.5892, - 0.9306, 0.6032, 0.6663, 0.2458, 0.4183, 0.2601, 0.1482, - 0.7875, 0.6715, 0.9037, 0.6264, 0.1359, 0.3737, 0.8124, - 0.5579, 0.8032, 0.5250, 0.0484, 0.3798, 0.9181, 0.6990, - 0.4709, 0.3607, 0.2263, 0.9491, 0.1381, 0.9349, 0.5587, - 0.9109, 0.9521, 0.5111, 0.0699, 0.3049, 0.9282, 0.2051, - 0.7337, 0.5531, 0.0461, 0.4793, 0.7666, 0.5729, 0.3304, - 0.5870, 0.0631, 0.1734, 0.9830, 0.0209, 0.9070, 0.2915, - 0.5223, 0.8838, 0.3301, 0.9421, 0.7594, 0.3115, 0.3740, - 0.1248, 0.4019, 0.5655, 0.5416, 0.7264, 0.5093, 0.1844, - 0.8141, 0.2982, 0.7382, 0.1835, 0.6278, 0.2811, 0.6067, - 0.4478, 0.8684, 0.7412, 0.6892, 0.4335, 0.3843, 0.1013, - 0.1866, 0.2359, 0.1912, 0.1664, 0.4983, 0.0451, 0.5779, - 0.1237, 0.6607, 0.8235, 0.3279, 0.3369, 0.1857, 0.8737, - 0.2822, 0.5874, 0.7770, 0.0168, 0.7548, 0.3960, 0.3835, - 0.4930, 0.3962, 0.3565, 0.6311, 0.9203, 0.1599, 0.6197, - 0.2004, 0.7715, 0.5383, 0.1841, 0.1164, 0.3781, 0.9012, - 0.9637, 0.8161, 0.0173, 0.2396, 0.0018, 0.0564, 0.8961, - 0.2930, 0.3621, 0.6069, 0.6097, 0.9727, 0.1807, 0.3231, - 0.0985, 0.1124, 0.5749, 0.4352, 0.8638, 0.0775, 0.9958, - 0.9205, 0.3899, 0.5787, 0.8568, 0.9382, 0.5573, 0.3192, - 0.0969, 0.4291, 0.7158, 0.4234, 0.1267, 0.1115, 0.5998, - 0.1213, 0.6955, 0.1359, 0.9242, 0.6837, 0.5104, 0.9705, - 0.6553, 0.7854, 0.7712, 0.8039, 0.8374, 0.2432, 0.9642, - 0.1087, 0.0239, 0.1412, 0.9577, 0.0364, 0.2602, 0.8651, - 0.7740, 0.6630, 0.1925, 0.3806, 0.0865, 0.3012, 0.5681, - 0.5340, 0.7079, 0.9004, 0.2913, 0.0786, 0.7160, 0.5092, - 0.6040, 0.2622, 0.1213, 0.1900, 0.5959, 0.6840, 0.5618, - 0.1670, 0.6118, 0.0943, 0.7556, 0.4326, 0.7941, 0.7922, - 0.3595, 0.6876, 0.3649, 0.5637, 0.9793, 0.2075, 0.6261, - 0.6043, 0.5454, 0.2742, 0.8353, 0.0427, 0.9110, 0.0773, - 0.2275, 0.2893, 0.1109, 0.9398, 0.4773, 0.7822, 0.6599, - 0.8032, 0.2465, 0.6896, 0.2226, 0.7912, 0.1714, 0.2888, - 0.1580, 0.9505, 0.0239, 0.6042, 0.7302, 0.1773, 0.5926, - 0.7722, 0.9857, 0.0965, 0.2226, 0.3385, 0.7909, 0.1570, - 0.6213, 0.2582, 0.1411, 0.9172, 0.4810, 0.6713, 0.9545, - 0.2919, 0.5330, 0.3979, 0.6047, 0.4614, 0.2037, 0.3751, - 0.3804, 0.5338, 0.3579, 0.2743, 0.0279, 0.9402, 0.0896, - 0.2399, 0.2231, 0.8045, 0.3911, 0.5905, 0.3294, 0.4349, - 0.9493, 0.9307, 0.9703, 0.4125, 0.6181, 0.4712, 0.4453, - 0.7656, 0.1164, 0.8896, 0.1964, 0.5785, 0.3681, 0.2471, - 0.4770, 0.5765, 0.6602, 0.0472, 0.6495, 0.2061, 0.8543, - 0.1225, 0.1309, 0.5365, 0.4901, 0.0481, 0.6811, 0.7748, - 0.2448, 0.4898, 0.3279, 0.8044, 0.4603, 0.5675, 0.8174, - 0.0475, 0.7111, 0.2962, 0.6540, 0.1502, 0.9524, 0.6459, - 0.8307, 0.7436, 0.4830, 0.4934, 0.8352, 0.9155, 0.5324, - 0.0716, 0.6763, 0.2247, 0.5415, 0.1357, 0.5307, 0.8078, - 0.8984, 0.3881, 0.5658, 0.5823, 0.2814, 0.5909, 0.5695, - 0.8610, 0.0475, 0.7103, 0.5588, 0.7810, 0.8470, 0.1132, - 0.2900, 0.1215, 0.9552, 0.1865, 0.5326, 0.0328, 0.4792, - 0.8894, 0.2639, 0.9686, 0.4319, 0.0369, 0.9923, 0.0728, - 0.2198, 0.9344, 0.7460, 0.9579, 0.3310, 0.7836, 0.5089, - 0.4401, 0.2425, 0.8688, 0.4712, 0.7084, 0.7189, 0.8725, - 0.1089, 0.5361, 0.8040, 0.9835, 0.0427, 0.8268, 0.6240, - 0.7359, 0.9360, 0.0588, 0.6040, 0.6846, 0.5540, 0.8911, - 0.2481, 0.3308, 0.1302, 0.3592, 0.4975, 0.4257, 0.7631, - 0.7628, 0.4599, 0.4029, 0.1705, 0.8311, 0.9296, 0.4533, - 0.1348, 0.5456, 0.6201, 0.9648, 0.0545, 0.5193, 0.2471, - 0.2210, 0.2370, 0.3579, 0.2466, 0.5527, 0.6228, 0.6627, - 0.7269, 0.0903, 0.8297, 0.0509, 0.1170, 0.4501, 0.6621, - 0.1975, 0.2451, 0.1701, 0.6420, 0.1514, 0.6671, 0.2545, - 0.2709, 0.8417, 0.6213, 0.5493, 0.8651, 0.2899, 0.0951, - 0.6363, 0.7119, 0.4153, 0.0773, 0.9973, 0.1388, 0.4317, - 0.1078, 0.8822, 0.9970, 0.3666, 0.0927, 0.7775, 0.8304, - 0.2413, 0.5245, 0.6703, 0.8672, 0.4345, 0.0666, 0.8826, - 0.4126, 0.0784, 0.6126, 0.5949, 0.2504, 0.5274, 0.4519, - 0.0299, 0.0111, 0.6969, 0.1507, 0.3909, 0.1643, 0.1775, - 0.3897, 0.2487, 0.8730, 0.9548, 0.3712, 0.6117, 0.5145, - 0.9949, 0.9666, 0.2569, 0.1800, 0.1693, 0.8375, 0.7579, - 0.4954, 0.4402, 0.3809, 0.3091, 0.6158, 0.3376, 0.8918, - 0.9671, 0.0821, 0.0024, 0.4326, 0.7265, 0.2154, 0.3397, - 0.1050, 0.9196, 0.3493, 0.1140, 0.8209, 0.4116, 0.2822, - 0.0376, 0.2910, 0.3166, 0.8309, 0.3734, 0.7911, 0.3928, - 0.4582, 0.8401, 0.4956, 0.4572, 0.7388, 0.5463, 0.4520, - 0.6132, 0.2139, 0.5397, 0.2963, 0.4168, 0.3965, 0.2486, - 0.9508, 0.7409, 0.1439, 0.2506, 0.7678, 0.8150, 0.8213, - 0.8256, 0.1807, 0.2787, 0.4611, 0.6676, 0.3133, 0.4539, - 0.1453, 0.1562, 0.7073, 0.4390, 0.9597, 0.3174, 0.0943, - 0.0190, 0.1803, 0.6325, 0.6661, 0.9783, 0.3584]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 1000, 1000, 1000]), + col_indices=tensor([9942, 6806, 8769, 3673, 2619, 2553, 2772, 6991, 9638, + 9629, 9158, 6212, 5182, 5529, 2344, 2346, 122, 7028, + 7511, 9451, 4244, 8815, 1200, 2761, 1166, 6428, 9856, + 2930, 9598, 6209, 16, 6638, 3115, 8422, 341, 3611, + 4039, 5496, 6552, 2918, 7299, 3837, 4809, 8784, 5749, + 9600, 4871, 9986, 6240, 7865, 4521, 404, 5612, 1687, + 5902, 3802, 2584, 2467, 9251, 3413, 7567, 6873, 3539, + 8911, 7564, 7425, 2467, 625, 4370, 372, 8146, 8364, + 5870, 4156, 5185, 5695, 8355, 2444, 2534, 1085, 2679, + 4192, 212, 5765, 9043, 9562, 368, 6724, 3302, 4229, + 1540, 4914, 9319, 7555, 3461, 9031, 1147, 9150, 6690, + 6357, 2415, 7319, 8280, 2601, 5406, 9377, 8412, 2908, + 2289, 9994, 4235, 8030, 4945, 152, 5704, 9454, 8885, + 7225, 8831, 9647, 762, 4585, 7294, 145, 5869, 493, + 6535, 84, 8418, 9444, 2282, 5835, 1126, 9329, 748, + 8059, 2437, 570, 5259, 2869, 6288, 7792, 2863, 6751, + 9752, 5574, 334, 7333, 4012, 5158, 8497, 5025, 6045, + 1734, 2277, 239, 1951, 5145, 6040, 4852, 1447, 7301, + 317, 7241, 5360, 9458, 9704, 7421, 686, 2876, 1216, + 8271, 4137, 6301, 6977, 8699, 9602, 3545, 2095, 5436, + 4475, 3858, 5601, 7872, 1697, 548, 846, 3712, 9584, + 6665, 6252, 4905, 2560, 8014, 4900, 4619, 1257, 3531, + 7648, 9919, 1852, 3326, 4487, 8506, 3919, 4981, 2513, + 7034, 6039, 9368, 7985, 4762, 5739, 2376, 986, 7556, + 1499, 1795, 8163, 3423, 2347, 6873, 6384, 1650, 8313, + 1196, 3938, 4396, 4553, 256, 9625, 9715, 2295, 5530, + 2128, 2920, 6394, 606, 8650, 4056, 9691, 8190, 8835, + 2767, 5109, 2152, 4962, 8309, 5313, 6703, 2055, 9418, + 9662, 829, 6489, 7349, 3629, 1976, 8133, 6494, 5734, + 3252, 9708, 2337, 7894, 9875, 1792, 1256, 340, 3043, + 7104, 8638, 2625, 1402, 3119, 7724, 7377, 4854, 1943, + 3768, 5032, 1038, 6327, 9140, 1764, 4701, 3680, 2758, + 2546, 8118, 1804, 8727, 7673, 8782, 7074, 310, 3245, + 391, 6460, 3188, 8731, 3986, 6485, 8364, 9727, 7173, + 7564, 6534, 1936, 864, 8838, 2305, 9565, 3858, 5712, + 1491, 4027, 3486, 7521, 1061, 9919, 6389, 8021, 6560, + 761, 4944, 9904, 7319, 1344, 4628, 8534, 6548, 8512, + 385, 5328, 1168, 4717, 5409, 9729, 7942, 6217, 1140, + 6456, 9909, 4138, 5953, 2961, 3523, 366, 7674, 7623, + 5122, 6041, 7023, 8946, 9960, 7313, 1781, 9275, 8786, + 2793, 9858, 4453, 4477, 9647, 1864, 7307, 688, 8021, + 1271, 6194, 1642, 2595, 6785, 3354, 1878, 5043, 398, + 6171, 6178, 8258, 9103, 5415, 9993, 7921, 2009, 5112, + 9130, 5024, 2285, 7569, 928, 3252, 5928, 5366, 3721, + 906, 2764, 261, 6097, 8713, 8271, 9037, 6526, 6608, + 3246, 6379, 2233, 734, 3569, 1130, 4701, 7544, 4759, + 5149, 7792, 8774, 166, 2005, 4199, 4556, 405, 1574, + 4504, 6872, 2121, 9764, 7754, 1920, 849, 8901, 7940, + 2276, 1164, 4120, 3418, 8433, 8534, 2124, 1407, 9941, + 8774, 3610, 6962, 7686, 9032, 2501, 4402, 5592, 7149, + 1724, 2964, 1881, 4166, 5565, 1691, 7145, 7629, 8845, + 5778, 206, 3304, 5476, 570, 233, 913, 8795, 1676, + 6523, 671, 6568, 8802, 6787, 3359, 8229, 5832, 4670, + 477, 3563, 6128, 2958, 8344, 2798, 5123, 7555, 1622, + 9944, 6188, 6831, 7880, 6553, 1015, 6573, 5109, 1750, + 455, 3272, 8257, 9704, 4102, 5542, 791, 2798, 442, + 6524, 2897, 5385, 5704, 4657, 1049, 2358, 3633, 4630, + 5093, 6143, 4158, 3721, 8602, 3915, 5080, 6602, 9642, + 3328, 3344, 5460, 9320, 8841, 3042, 7058, 3244, 8323, + 8239, 1899, 1584, 4159, 2074, 4693, 5541, 1069, 755, + 9961, 9720, 495, 7177, 2223, 5138, 6093, 65, 7686, + 4825, 6890, 9342, 2012, 1306, 5055, 8966, 6745, 742, + 2888, 353, 3930, 6018, 6027, 7409, 9284, 9749, 7080, + 4273, 1569, 4695, 4151, 2688, 6916, 3566, 7902, 3827, + 8197, 4739, 6589, 323, 7630, 5907, 4514, 6979, 3763, + 1884, 8832, 7994, 2246, 3533, 9618, 1720, 7610, 7239, + 638, 9292, 4783, 493, 349, 6931, 9183, 7694, 7367, + 1455, 8291, 5297, 6531, 9027, 3125, 5165, 1747, 2395, + 9976, 8105, 7540, 1664, 1747, 7632, 4682, 3958, 4031, + 9459, 7772, 6428, 3341, 592, 8185, 3222, 7813, 4883, + 1644, 7288, 3169, 5960, 5478, 1407, 4342, 1302, 6549, + 1358, 2842, 1329, 1540, 5662, 713, 3921, 8485, 4113, + 6858, 8797, 8861, 6915, 3146, 4236, 2551, 2825, 849, + 2700, 211, 8951, 6730, 4377, 6981, 9070, 2290, 1419, + 7032, 5090, 514, 785, 7157, 6042, 5604, 826, 1805, + 2110, 9775, 7352, 2136, 6043, 1903, 471, 6462, 3922, + 4650, 2275, 1415, 6685, 619, 1933, 8766, 3982, 3889, + 6078, 9649, 3244, 543, 4606, 5534, 6411, 8037, 3869, + 5767, 2501, 7323, 6010, 9987, 2988, 3771, 2195, 3286, + 4114, 4746, 5123, 9347, 59, 3399, 1134, 9616, 2805, + 6005, 2217, 6562, 4260, 4451, 4510, 6796, 3816, 8731, + 7099, 6120, 8783, 5843, 1286, 6064, 2505, 8992, 1831, + 1649, 2921, 725, 1022, 4968, 1923, 2310, 8148, 7666, + 7235, 3908, 5293, 59, 2659, 3454, 8439, 4164, 5105, + 930, 3399, 9150, 7660, 4670, 8057, 6043, 7343, 200, + 6766, 1610, 5601, 4418, 5607, 7065, 2665, 1090, 2907, + 6313, 259, 3791, 4708, 4342, 8329, 2983, 3117, 4878, + 4741, 5590, 5320, 4437, 4003, 6000, 8321, 6759, 1334, + 2279, 7391, 4187, 1434, 4008, 2700, 6956, 4315, 3611, + 3430, 9868, 5792, 4357, 9903, 8075, 5220, 1738, 163, + 3665, 4478, 702, 5727, 7653, 563, 6002, 2891, 8761, + 7287, 2250, 9470, 9219, 7870, 9501, 6831, 5057, 4220, + 7687, 2932, 5610, 3763, 3667, 3671, 3384, 2786, 4287, + 269, 9210, 2349, 3391, 12, 5994, 653, 2936, 9025, + 9494, 3399, 9419, 4942, 9380, 4459, 7473, 5954, 1447, + 1094, 7179, 3014, 2886, 6512, 7645, 9938, 3613, 9440, + 6699, 7451, 2452, 4865, 9654, 2308, 6980, 6767, 7495, + 1334, 9014, 782, 6309, 7997, 5366, 1284, 7925, 6962, + 3877, 1913, 1050, 9127, 2886, 1116, 6198, 6893, 1633, + 1350, 4166, 753, 3204, 1614, 5868, 245, 204, 1115, + 1068, 2494, 2135, 610, 9686, 9370, 6836, 5359, 4868, + 7140, 5229, 2658, 9723, 5848, 6639, 5836, 9766, 4393, + 5965, 1792, 8813, 7097, 4138, 3337, 895, 1120, 1285, + 4137, 8921, 4411, 3735, 1316, 3715, 7962, 401, 6803, + 3966, 9926, 7525, 847, 9443, 9072, 3130, 2543, 9739, + 4094, 6274, 2243, 7488, 626, 4122, 9743, 8348, 3980, + 4481, 111, 5455, 8904, 2079, 3488, 2101, 5755, 4829, + 7759, 8914, 9977, 7145, 6318, 581, 5259, 3348, 8544, + 4370, 4181, 1392, 3682, 710, 3677, 8541, 8499, 2889, + 5891, 4997, 7492, 1001, 3000, 8360, 5825, 6503, 7921, + 561, 3570, 9317, 9337, 9296, 4665, 296, 1083, 2303, + 1499]), + values=tensor([2.4924e-01, 5.5138e-01, 7.8146e-01, 8.2664e-01, + 2.7727e-01, 5.3856e-01, 5.5038e-01, 4.7936e-01, + 2.2536e-01, 6.0665e-01, 1.5763e-01, 6.9703e-01, + 9.2250e-03, 4.2884e-01, 9.9821e-01, 7.4158e-01, + 4.9950e-01, 4.8715e-01, 3.3051e-01, 6.3825e-01, + 1.8452e-01, 3.5967e-01, 7.5729e-01, 6.6116e-01, + 8.4341e-01, 3.7098e-01, 1.0038e-01, 5.9421e-01, + 9.3194e-01, 7.9934e-02, 3.0214e-01, 4.7105e-01, + 8.8123e-01, 2.0326e-01, 1.1678e-01, 7.3860e-02, + 3.7616e-01, 8.1842e-01, 1.2910e-01, 6.7836e-01, + 9.8021e-01, 9.8681e-01, 5.8566e-01, 5.5548e-01, + 9.2199e-01, 1.9900e-01, 4.2619e-01, 4.4780e-01, + 8.1135e-02, 9.5799e-01, 6.8159e-01, 4.9863e-01, + 9.0361e-01, 1.3871e-02, 2.6091e-01, 7.0264e-01, + 1.6233e-01, 5.7701e-01, 8.7766e-01, 9.9341e-01, + 7.9165e-03, 2.3394e-01, 3.4769e-01, 4.0280e-01, + 1.9959e-01, 8.9790e-01, 1.6539e-01, 4.2376e-01, + 5.1947e-01, 7.9921e-01, 6.2961e-01, 1.4656e-01, + 7.1967e-01, 5.1173e-01, 5.0499e-02, 5.8913e-02, + 7.2658e-01, 5.5097e-01, 6.6169e-01, 5.3772e-01, + 5.7568e-01, 2.5232e-01, 7.7131e-01, 8.5382e-01, + 1.9579e-01, 1.9993e-01, 7.2402e-02, 9.0475e-01, + 4.3615e-01, 7.8768e-01, 1.5065e-01, 5.6517e-01, + 6.0863e-01, 9.5781e-01, 3.3373e-01, 9.9653e-01, + 6.7533e-02, 1.2064e-01, 2.3851e-01, 9.1146e-01, + 8.8718e-01, 8.3923e-01, 1.6753e-01, 8.6985e-02, + 6.1194e-01, 4.6784e-01, 1.0674e-01, 1.8346e-01, + 9.5304e-01, 8.9043e-01, 7.9426e-01, 2.7578e-01, + 1.9633e-01, 7.5511e-01, 1.0642e-01, 9.0760e-01, + 6.4200e-01, 9.6876e-01, 8.4995e-01, 3.9660e-01, + 4.8973e-01, 2.5945e-01, 6.1922e-01, 8.1803e-01, + 2.0832e-01, 4.4811e-01, 4.7957e-01, 1.2390e-01, + 5.2610e-01, 4.4958e-01, 3.5176e-01, 2.4695e-01, + 7.4385e-01, 3.0128e-01, 7.0146e-01, 2.2427e-01, + 6.6222e-01, 4.9559e-02, 8.9363e-01, 4.6390e-01, + 8.1540e-01, 3.7867e-01, 5.7745e-01, 7.3291e-01, + 8.8115e-01, 9.7559e-01, 7.0693e-01, 6.0469e-01, + 6.3235e-01, 5.7336e-01, 8.5036e-01, 4.6214e-01, + 9.9242e-01, 5.1481e-01, 8.2094e-01, 4.9814e-01, + 1.6092e-01, 5.8114e-01, 6.0381e-01, 4.0373e-01, + 4.7548e-01, 5.1946e-01, 2.6720e-01, 3.7324e-01, + 4.7349e-01, 4.1972e-01, 1.7257e-01, 8.5210e-01, + 9.6499e-01, 2.3452e-01, 5.3354e-01, 5.7176e-01, + 9.5613e-01, 4.3309e-01, 7.1907e-01, 2.2168e-01, + 9.0472e-01, 8.5620e-02, 8.3562e-01, 6.9347e-01, + 6.9805e-01, 9.4473e-01, 2.9112e-01, 7.4302e-01, + 1.6621e-01, 6.8722e-01, 3.5179e-01, 4.1800e-01, + 3.4271e-01, 1.6154e-01, 9.2278e-01, 1.7938e-01, + 1.8291e-01, 8.4643e-01, 4.1537e-01, 7.2976e-02, + 7.0126e-01, 1.6574e-01, 8.8985e-01, 5.6868e-01, + 2.2932e-01, 6.2724e-01, 8.0478e-01, 4.3911e-02, + 1.1564e-01, 3.2750e-01, 6.3111e-01, 3.2014e-01, + 6.9135e-01, 9.5862e-02, 7.5360e-01, 8.3627e-01, + 4.6759e-01, 6.4025e-01, 7.0735e-01, 9.8115e-01, + 6.3451e-01, 1.0593e-01, 3.2573e-01, 8.4476e-01, + 4.8491e-01, 6.4618e-01, 2.5010e-03, 9.0118e-01, + 6.4617e-01, 1.5072e-01, 1.3331e-01, 9.2066e-02, + 1.1781e-01, 4.3006e-01, 1.5840e-01, 7.2118e-02, + 1.1382e-01, 8.0212e-01, 9.5499e-01, 3.6425e-01, + 8.1942e-01, 6.5808e-01, 9.5776e-01, 5.7078e-01, + 9.4575e-01, 3.9544e-01, 5.7117e-02, 1.0347e-03, + 5.8737e-01, 8.9480e-01, 9.5353e-02, 7.7283e-01, + 2.2356e-01, 3.6268e-01, 9.3671e-01, 6.2627e-01, + 9.9028e-01, 9.1172e-01, 1.1874e-01, 2.6596e-01, + 9.6236e-01, 7.2565e-01, 8.1658e-01, 7.7070e-01, + 2.2315e-01, 4.7096e-01, 4.7573e-01, 3.3760e-01, + 8.0342e-01, 4.7336e-02, 5.6825e-01, 9.4337e-01, + 3.4038e-01, 7.7040e-01, 5.0426e-01, 4.1706e-01, + 9.9961e-01, 2.1652e-01, 1.8457e-01, 3.3187e-01, + 5.8180e-01, 4.4096e-01, 2.1837e-01, 7.9119e-01, + 8.0254e-02, 7.4263e-02, 9.8646e-02, 9.4248e-01, + 2.3779e-01, 4.2431e-01, 1.9629e-01, 3.3304e-01, + 2.4919e-01, 1.0402e-01, 9.8188e-01, 5.4736e-01, + 1.4913e-01, 5.7587e-03, 5.6445e-01, 5.2484e-01, + 4.6956e-01, 9.3184e-01, 6.3000e-01, 1.4646e-01, + 2.1142e-01, 3.0428e-01, 1.5525e-01, 8.3433e-01, + 1.7066e-01, 7.8048e-01, 8.2016e-01, 7.0435e-01, + 4.4059e-01, 5.8760e-01, 4.5425e-01, 8.0602e-01, + 8.9803e-01, 9.3025e-01, 6.3674e-01, 1.5054e-01, + 7.9866e-01, 6.4346e-01, 7.8825e-01, 4.1129e-01, + 3.0173e-01, 4.0360e-01, 5.0553e-01, 5.9107e-01, + 6.1689e-01, 4.9456e-02, 3.4377e-03, 4.8057e-01, + 5.6853e-01, 1.8120e-01, 6.7514e-01, 2.6397e-01, + 4.2324e-01, 2.1411e-01, 5.0331e-01, 3.6631e-01, + 8.4193e-01, 7.6145e-01, 1.8110e-01, 9.6180e-01, + 7.2500e-01, 5.4517e-01, 7.1281e-01, 6.9551e-01, + 4.5515e-01, 6.2912e-01, 3.2411e-01, 8.1114e-01, + 4.2773e-01, 4.0519e-01, 7.6529e-01, 5.2978e-01, + 5.9559e-01, 8.3615e-01, 3.0327e-01, 1.9611e-02, + 1.6059e-01, 1.0841e-01, 9.3860e-01, 8.2645e-02, + 3.3380e-01, 5.7353e-01, 1.9475e-01, 5.8015e-01, + 3.0263e-01, 4.7505e-01, 2.4008e-01, 4.1915e-01, + 9.6386e-01, 4.6302e-01, 3.7857e-01, 5.2904e-01, + 3.9195e-01, 4.2521e-01, 5.9112e-01, 3.9671e-02, + 6.6317e-01, 3.5780e-01, 4.1139e-01, 2.1541e-01, + 2.8728e-01, 2.3802e-01, 6.2230e-01, 5.2438e-01, + 1.6292e-01, 9.8509e-01, 3.8738e-01, 1.7958e-01, + 7.4241e-01, 3.9140e-01, 9.1800e-01, 3.9747e-01, + 8.1517e-01, 4.7486e-01, 8.3183e-01, 8.7475e-01, + 9.3295e-01, 2.1605e-01, 9.9052e-01, 4.1845e-01, + 1.3601e-01, 2.7985e-01, 4.7862e-01, 2.9639e-02, + 8.0394e-01, 9.1266e-01, 4.0342e-01, 5.3304e-01, + 4.2243e-01, 6.2229e-01, 8.8835e-01, 7.7187e-01, + 9.6127e-01, 7.1745e-02, 4.1095e-01, 2.3731e-01, + 1.2356e-01, 1.4723e-01, 1.9539e-01, 1.8195e-01, + 1.5048e-01, 8.5689e-01, 2.0479e-01, 2.5676e-01, + 9.5713e-02, 3.2840e-01, 4.7391e-01, 9.6484e-01, + 4.8540e-01, 6.4549e-01, 5.0639e-01, 8.5705e-01, + 3.9350e-01, 2.9806e-01, 7.9391e-01, 4.8825e-01, + 1.8922e-02, 8.1563e-01, 6.7073e-01, 2.0636e-02, + 2.8434e-01, 7.2627e-01, 5.6622e-01, 4.9554e-03, + 9.2989e-01, 3.0267e-01, 9.6246e-01, 1.6650e-01, + 7.9689e-01, 3.8212e-01, 1.2655e-01, 2.6349e-01, + 2.9628e-01, 9.9809e-01, 9.6335e-02, 8.2620e-01, + 4.6294e-01, 5.3004e-01, 2.3693e-02, 9.0363e-01, + 7.1222e-01, 9.1051e-01, 3.3602e-01, 4.9970e-01, + 8.9474e-01, 2.8768e-02, 9.7860e-01, 3.1968e-01, + 5.6426e-01, 2.5855e-01, 2.4516e-01, 9.1417e-01, + 9.5623e-01, 9.8092e-01, 2.8219e-01, 1.0553e-01, + 3.6981e-01, 9.7224e-01, 6.4610e-02, 5.7271e-01, + 5.9765e-01, 4.0143e-01, 1.5411e-01, 7.8939e-04, + 5.0172e-01, 7.5011e-01, 9.9409e-01, 6.8790e-01, + 9.1909e-02, 2.8329e-01, 3.8006e-01, 4.1212e-01, + 4.4012e-01, 3.8773e-02, 5.7842e-01, 9.2291e-01, + 3.9389e-01, 3.9145e-01, 1.2421e-01, 8.3754e-01, + 2.7956e-01, 6.7254e-01, 5.2784e-01, 9.9964e-01, + 3.6905e-01, 4.3423e-01, 3.4668e-01, 8.4768e-01, + 1.5393e-01, 9.4602e-01, 4.5881e-01, 7.5602e-01, + 6.3106e-01, 1.3235e-01, 7.9169e-01, 6.0500e-01, + 9.9974e-01, 4.7588e-01, 7.8411e-01, 8.5292e-01, + 7.0966e-01, 3.2951e-01, 1.8374e-01, 4.8875e-02, + 6.2695e-01, 9.1931e-02, 3.4192e-01, 8.1190e-01, + 3.3725e-01, 7.3935e-01, 1.5630e-01, 5.1945e-01, + 1.9381e-03, 6.9304e-01, 1.8504e-01, 2.9373e-02, + 4.8455e-01, 2.6428e-01, 5.0484e-01, 1.5646e-01, + 1.2197e-01, 3.9943e-02, 3.2333e-01, 1.0585e-01, + 7.4663e-01, 6.6620e-01, 1.1201e-01, 1.2826e-01, + 5.2111e-01, 6.9997e-01, 5.4975e-02, 2.0518e-01, + 1.2700e-01, 6.2330e-01, 8.2410e-01, 2.4900e-02, + 7.5291e-01, 8.0129e-01, 1.7765e-01, 9.2550e-01, + 2.5432e-01, 2.0637e-01, 6.5874e-01, 7.2007e-01, + 8.6407e-01, 7.8405e-01, 3.1032e-01, 7.8214e-01, + 3.5389e-01, 2.7424e-01, 5.9024e-01, 4.8824e-01, + 4.7086e-02, 3.8939e-01, 4.8563e-01, 9.5702e-01, + 6.7629e-01, 7.0684e-01, 3.6810e-01, 2.3133e-01, + 4.9902e-01, 8.7202e-01, 6.7479e-02, 6.1566e-01, + 7.8290e-02, 8.3791e-02, 9.2649e-01, 8.2998e-01, + 4.0801e-01, 4.7014e-01, 5.2634e-01, 5.3655e-02, + 9.7252e-01, 2.8148e-01, 8.5595e-01, 4.6484e-01, + 7.1190e-01, 7.0612e-01, 4.2393e-01, 5.2695e-01, + 5.5544e-01, 8.8769e-01, 1.5139e-01, 8.5190e-01, + 3.1040e-01, 6.9579e-01, 8.6276e-01, 6.1546e-01, + 5.2896e-01, 8.0480e-01, 8.2431e-01, 5.0802e-01, + 6.0537e-01, 1.3267e-01, 3.3537e-01, 4.0026e-01, + 1.6728e-01, 8.3436e-01, 1.1422e-01, 3.5781e-01, + 6.9035e-01, 6.6937e-01, 7.5611e-01, 5.5854e-01, + 8.5495e-01, 7.5287e-01, 7.5036e-01, 4.6923e-01, + 5.2440e-01, 7.9414e-01, 7.6271e-01, 7.5863e-01, + 2.9887e-01, 7.8542e-01, 5.6485e-01, 8.6153e-01, + 7.6895e-01, 4.8096e-01, 5.7635e-01, 2.8813e-01, + 6.7009e-01, 9.0793e-01, 1.0755e-01, 9.1873e-01, + 5.5044e-03, 2.9826e-01, 5.9487e-01, 1.0568e-01, + 6.7877e-01, 2.8805e-01, 4.7307e-02, 6.5816e-01, + 5.1192e-01, 9.3067e-01, 5.4036e-01, 5.0440e-03, + 8.1461e-02, 7.3561e-01, 1.3695e-01, 4.4846e-01, + 2.4019e-01, 9.6165e-01, 8.1744e-01, 8.2709e-01, + 9.0129e-01, 7.3287e-01, 1.3276e-01, 8.3131e-01, + 9.3111e-01, 4.6789e-03, 3.3066e-01, 8.6225e-01, + 2.6819e-01, 4.8301e-01, 7.6794e-01, 8.3168e-01, + 9.0917e-01, 9.3363e-01, 4.8256e-01, 2.5947e-01, + 7.1854e-01, 1.3071e-01, 2.9768e-01, 6.0585e-02, + 4.9797e-01, 2.2186e-02, 4.1198e-02, 9.5089e-01, + 2.7744e-01, 2.8217e-01, 3.7438e-01, 7.9073e-01, + 6.5584e-01, 8.1943e-01, 7.8162e-01, 6.2981e-01, + 3.1742e-01, 6.8479e-01, 5.9751e-01, 4.8636e-01, + 8.1575e-01, 7.6156e-01, 1.3590e-01, 3.9901e-01, + 2.9372e-02, 6.9592e-01, 2.5219e-02, 9.0885e-01, + 9.3676e-01, 2.4186e-01, 5.4210e-01, 7.0052e-01, + 3.1218e-01, 2.4049e-02, 9.4194e-01, 3.1506e-01, + 7.5257e-01, 8.3315e-01, 2.1755e-01, 9.4989e-01, + 2.3775e-01, 6.9686e-01, 5.6024e-01, 1.4900e-01, + 2.6360e-01, 9.2505e-01, 5.2813e-01, 6.8236e-03, + 8.1968e-01, 1.5283e-01, 7.0831e-01, 5.0894e-01, + 1.7449e-01, 2.2853e-01, 9.1910e-01, 4.5896e-01, + 2.3797e-01, 4.9876e-01, 8.3151e-01, 8.0057e-01, + 1.5198e-01, 4.0839e-01, 5.3457e-01, 8.2773e-01, + 8.9742e-01, 3.6585e-01, 1.1885e-01, 7.5058e-02, + 5.1360e-01, 1.2993e-01, 6.4074e-01, 2.8187e-01, + 1.5563e-01, 4.7918e-01, 9.4829e-01, 9.6510e-01, + 1.2993e-01, 2.1684e-01, 3.9242e-02, 2.6542e-01, + 5.4851e-01, 5.3385e-01, 5.4087e-01, 8.5787e-01, + 9.7202e-01, 9.3432e-01, 1.8668e-01, 7.5762e-01, + 5.1326e-01, 8.0687e-01, 2.8309e-01, 9.0047e-01, + 7.3313e-01, 3.7340e-01, 1.3043e-01, 2.4492e-01, + 8.1951e-01, 4.0491e-01, 7.4743e-01, 7.6936e-01, + 6.8520e-01, 1.5566e-01, 5.9978e-02, 9.6069e-01, + 6.9248e-01, 3.0915e-01, 6.2034e-01, 6.1596e-01, + 3.6484e-01, 1.7480e-01, 8.4653e-01, 9.2050e-01, + 8.2065e-01, 2.9857e-01, 9.9175e-01, 1.3310e-01, + 6.5255e-01, 6.7877e-01, 2.6678e-01, 1.8992e-01, + 8.7519e-01, 2.4946e-01, 9.0498e-01, 3.5220e-01, + 3.5595e-01, 3.0962e-01, 5.8717e-01, 8.5492e-01, + 1.7066e-01, 1.2676e-01, 4.3909e-01, 9.9948e-01, + 2.9805e-01, 8.4656e-01, 3.4993e-01, 3.4980e-01, + 5.0013e-01, 6.5333e-01, 3.5691e-01, 8.4393e-01, + 8.5806e-01, 8.3126e-01, 8.3977e-01, 8.9279e-01, + 1.6967e-01, 5.3311e-01, 7.5568e-01, 4.9048e-01, + 3.9135e-01, 9.3910e-01, 6.5958e-01, 2.2055e-01, + 4.7845e-02, 9.4879e-01, 3.0447e-02, 5.7881e-01, + 4.8934e-01, 5.3255e-01, 5.2117e-01, 9.8542e-01, + 7.8901e-01, 9.8935e-01, 8.4588e-01, 5.8553e-01, + 7.0732e-01, 5.2210e-01, 4.2421e-01, 1.3138e-01, + 3.7579e-03, 2.3797e-02, 6.2773e-01, 7.8251e-01, + 1.5153e-01, 5.3533e-01, 3.0818e-01, 1.8187e-01, + 2.9566e-01, 9.3623e-01, 2.6786e-01, 3.7307e-01, + 3.6606e-02, 5.0726e-01, 8.7810e-01, 7.6366e-01, + 5.9947e-02, 5.6156e-01, 8.0858e-01, 4.2154e-01, + 8.7809e-01, 9.5811e-01, 1.1582e-01, 9.2928e-01, + 2.9681e-01, 7.2658e-03, 1.5538e-01, 6.5074e-01, + 3.9636e-02, 8.9436e-01, 7.8744e-01, 8.9518e-01, + 9.8470e-01, 5.6734e-01, 7.0415e-01, 9.3305e-01, + 5.0412e-01, 6.9225e-01, 3.7680e-01, 6.1826e-02, + 9.2996e-01, 8.0182e-01, 5.8778e-01, 7.6905e-01, + 7.1401e-01, 2.8426e-01, 5.8863e-01, 7.7111e-01, + 3.4084e-01, 8.9029e-01, 5.9812e-01, 9.9864e-01, + 9.5608e-01, 6.0548e-01, 6.6852e-01, 8.4302e-01, + 3.6006e-01, 3.3586e-02, 3.8115e-01, 9.1830e-03, + 3.8398e-01, 4.1210e-02, 6.9895e-01, 7.1032e-01, + 3.0120e-01, 9.6801e-01, 1.0851e-01, 8.0637e-01, + 3.8750e-01, 9.1804e-01, 1.1349e-01, 1.4880e-01, + 5.5597e-01, 5.7805e-01, 4.9664e-01, 8.5710e-01, + 2.7043e-01, 1.8423e-01, 3.7186e-01, 2.7686e-01, + 8.2085e-01, 9.0786e-01, 7.9438e-01, 8.5465e-01, + 3.3007e-01, 5.5600e-01, 8.5585e-01, 7.9228e-01, + 6.0820e-01, 8.9734e-01, 3.4185e-01, 2.6683e-01, + 4.6811e-01, 8.9853e-01, 8.4435e-02, 8.5459e-01, + 4.8374e-01, 8.1887e-01, 2.0131e-01, 9.6681e-01, + 1.9732e-01, 4.5077e-01, 1.5893e-01, 5.5802e-02, + 9.5327e-01, 9.9846e-01, 9.0623e-01, 5.7046e-01, + 5.4920e-01, 1.9134e-01, 8.3989e-01, 8.6713e-01, + 9.7239e-01, 2.8320e-01, 9.3550e-01, 6.0737e-01, + 3.9692e-01, 8.8510e-01, 4.8390e-01, 1.8367e-01, + 8.8882e-01, 1.8523e-01, 9.4789e-01, 5.0970e-01, + 6.8785e-01, 3.3279e-01, 9.8456e-01, 7.4517e-02, + 7.4611e-01, 6.3282e-01, 3.2770e-02, 4.9303e-01, + 1.2047e-01, 2.8452e-01, 6.4498e-01, 3.3432e-01, + 5.6706e-01, 4.1401e-01, 8.8033e-01, 6.5752e-01, + 7.1826e-01, 2.3947e-01, 8.1152e-02, 9.2556e-01, + 5.9988e-01, 8.5251e-01, 3.7673e-01, 3.4441e-01, + 9.2290e-01, 3.5271e-01, 4.6772e-01, 3.3679e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.8769, 0.8902, 0.3074, ..., 0.5063, 0.6820, 0.8149]) +tensor([0.4545, 0.4841, 0.5462, ..., 0.7193, 0.8873, 0.8657]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -268,378 +375,271 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 0.05632638931274414 seconds +Time: 0.044791460037231445 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '186413', '-ss', '10000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 5.5597083568573} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '234419', '-ss', '10000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 6.79626727104187} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([ 875, 2191, 8400, 7819, 1359, 1371, 2712, 1430, 699, - 6200, 2366, 7177, 863, 6066, 3455, 4404, 1664, 5210, - 4431, 2329, 8118, 7744, 8468, 6759, 56, 4135, 8355, - 1937, 8979, 8770, 7419, 5776, 718, 8064, 3859, 6591, - 2824, 3492, 4055, 3388, 2836, 5059, 5536, 4952, 4131, - 4038, 8683, 413, 5705, 359, 3435, 651, 1108, 9531, - 6875, 4330, 1115, 5593, 2969, 6345, 1365, 6966, 630, - 8757, 209, 7065, 9539, 2263, 5307, 3566, 6539, 5643, - 3281, 4970, 9273, 8736, 4719, 8846, 4254, 1009, 7367, - 2015, 364, 1240, 851, 7365, 8720, 4893, 9717, 9512, - 3001, 5085, 106, 3869, 9655, 8756, 4703, 6792, 2300, - 7273, 7994, 8012, 1150, 5161, 4585, 4463, 3174, 2598, - 1009, 114, 1091, 647, 6685, 1799, 5606, 4368, 8317, - 6800, 8461, 2401, 9532, 5943, 3524, 9561, 3530, 7573, - 7996, 276, 5910, 6640, 4231, 1916, 2785, 7726, 6194, - 1776, 6505, 2164, 6449, 4656, 9015, 7814, 4827, 3917, - 3607, 9242, 3409, 6093, 262, 3208, 6492, 5483, 2604, - 3740, 4527, 7351, 9717, 7470, 9592, 9912, 2180, 6243, - 8654, 8860, 7580, 2576, 509, 8078, 9320, 4728, 2358, - 3282, 9417, 3258, 2239, 2184, 7436, 647, 717, 8607, - 6893, 6043, 8574, 2094, 5465, 401, 4875, 9843, 9045, - 5958, 3312, 5152, 4044, 9669, 4185, 7058, 4759, 8962, - 2462, 88, 9585, 3409, 9818, 8176, 5753, 7934, 5454, - 1148, 7035, 3186, 0, 4081, 8603, 4226, 8008, 896, - 2469, 8717, 8384, 8025, 8100, 4427, 6199, 3161, 1565, - 8235, 1384, 4572, 6887, 9773, 6728, 6068, 1124, 956, - 6662, 8166, 2878, 9898, 5224, 3909, 511, 6289, 2609, - 9128, 8137, 4634, 8801, 8051, 4380, 3761, 3506, 2819, - 7524, 2480, 9729, 678, 558, 5135, 1217, 4049, 4198, - 5027, 8235, 5566, 1689, 9558, 573, 4726, 6916, 3817, - 902, 4701, 4498, 7662, 1264, 4117, 7339, 8578, 1497, - 2979, 1063, 8848, 7439, 6969, 2604, 102, 1953, 9761, - 631, 9833, 5901, 1619, 2348, 4477, 6574, 1941, 7421, - 1129, 9581, 1641, 1965, 1376, 529, 8173, 6747, 2286, - 6617, 6144, 2870, 9378, 9440, 1879, 7318, 3940, 5956, - 3401, 4134, 6690, 4400, 4747, 6413, 1901, 7559, 5381, - 7441, 5451, 4111, 9589, 4650, 2125, 3387, 9269, 9830, - 3416, 252, 1858, 7212, 7715, 7344, 7407, 1539, 3712, - 6759, 1085, 3754, 7800, 9271, 5280, 8962, 737, 6966, - 9246, 5155, 517, 2074, 659, 9633, 7292, 569, 2735, - 8793, 5355, 4476, 7675, 6188, 2279, 7778, 7430, 2472, - 8991, 357, 5469, 2634, 1768, 4874, 44, 1737, 564, - 644, 4054, 1837, 5927, 6265, 7759, 2627, 7859, 9465, - 521, 9843, 3170, 6972, 7535, 6527, 6037, 7706, 2994, - 3807, 6183, 756, 2600, 5196, 5717, 8193, 1298, 1699, - 582, 3997, 5609, 1668, 1765, 6916, 281, 4330, 160, - 4705, 1344, 6842, 9978, 4604, 4273, 4905, 6049, 8421, - 8347, 6760, 9083, 9481, 1707, 9273, 5708, 3542, 2477, - 5961, 4872, 6068, 3407, 3069, 617, 8754, 2697, 3152, - 5209, 6551, 8372, 9769, 2646, 6596, 6730, 7481, 9516, - 1955, 6123, 6732, 4820, 6751, 1603, 4273, 4958, 2988, - 6085, 2769, 273, 7086, 7391, 4646, 2787, 5440, 1079, - 9620, 1741, 2523, 1361, 3708, 3885, 5156, 9784, 249, - 4165, 1632, 4836, 4522, 5739, 2937, 4923, 8601, 3396, - 7650, 4326, 4289, 4952, 5072, 6736, 363, 400, 1127, - 4916, 2930, 9184, 9058, 5628, 7736, 7746, 4660, 3924, - 9186, 781, 9323, 5772, 3836, 3137, 8984, 7622, 4390, - 5833, 8822, 8970, 7549, 8138, 4086, 4203, 6758, 9660, - 986, 9295, 2758, 6699, 9576, 8154, 9732, 2023, 1156, - 1924, 8698, 3129, 1105, 1459, 8280, 5513, 3034, 3112, - 721, 8883, 4235, 5945, 2895, 2222, 6126, 3912, 204, - 831, 4512, 3475, 3689, 3223, 711, 3915, 3961, 1023, - 4584, 3349, 5279, 8282, 8462, 804, 8791, 8899, 7858, - 8814, 2036, 7234, 9675, 7983, 8715, 543, 8593, 6977, - 1286, 3463, 7404, 6174, 7685, 888, 4155, 705, 5999, - 7786, 1063, 5095, 7530, 68, 9840, 6177, 6766, 9918, - 6261, 7229, 7220, 7534, 9653, 723, 4326, 9036, 3583, - 4481, 9945, 179, 1856, 5905, 1419, 265, 9274, 4599, - 7087, 7826, 5652, 550, 292, 3541, 9483, 7398, 8507, - 1073, 8467, 7752, 5025, 9683, 7395, 2600, 5763, 4622, - 7020, 9742, 3286, 9825, 3653, 9323, 5588, 7236, 1106, - 8632, 2466, 283, 343, 972, 2360, 1711, 8346, 706, - 1332, 8979, 5805, 7157, 4196, 4735, 8857, 6601, 5750, - 6339, 3918, 4216, 5571, 4616, 1032, 2845, 3776, 2327, - 9450, 1965, 9946, 4465, 522, 9473, 5311, 9337, 4462, - 7078, 3494, 8206, 8425, 4248, 4611, 5953, 9335, 1844, - 8836, 4504, 5638, 7991, 332, 1584, 6745, 8345, 6272, - 8083, 6637, 7130, 8252, 1721, 1243, 1815, 8830, 3503, - 2165, 3040, 4401, 4947, 4350, 184, 4809, 7214, 1106, - 7456, 1433, 7277, 9911, 5801, 5683, 8320, 5474, 5269, - 4773, 2653, 5827, 6974, 2857, 7759, 7430, 7268, 2601, - 5394, 3148, 2404, 3087, 3002, 9373, 1521, 2161, 4611, - 8803, 151, 8213, 794, 1896, 9704, 9680, 1991, 6606, - 9066, 6483, 3050, 2396, 2478, 8644, 3992, 5127, 7375, - 9509, 2770, 7517, 3441, 8934, 3527, 3120, 8245, 3009, - 5028, 8665, 5585, 1615, 1348, 1997, 3092, 4901, 2240, - 2729, 5055, 9816, 8518, 6804, 2106, 2579, 2789, 5274, - 3414, 2765, 69, 5826, 1547, 9154, 9491, 77, 9428, - 5635, 7311, 1324, 9223, 6193, 500, 5582, 7469, 4868, - 8702, 8717, 6416, 4327, 994, 3209, 8411, 3081, 4656, - 6539, 6535, 3158, 3299, 2694, 5398, 2282, 819, 1258, - 5437, 8863, 3265, 8313, 9805, 1868, 3266, 6923, 2469, - 6940, 8333, 5961, 9285, 1171, 942, 7588, 3403, 147, - 938, 8591, 2575, 8884, 1578, 7427, 8963, 1628, 1615, - 5433, 2600, 3042, 5548, 5644, 6205, 6912, 2072, 278, - 4434, 1185, 848, 2825, 5403, 4224, 5882, 8129, 4066, - 457, 4125, 6160, 2360, 8281, 3873, 2558, 1746, 6178, - 673, 7395, 3653, 5624, 6198, 8956, 9762, 7818, 3895, - 9089, 8840, 7765, 9101, 1281, 3327, 4922, 7206, 4301, - 6377, 8689, 9716, 7497, 138, 4802, 6828, 1413, 1147, - 5428, 2131, 4545, 5482, 3780, 2412, 6542, 3301, 4457, - 4544, 6247, 6872, 7881, 5604, 8600, 9989, 4287, 6944, - 1696, 2143, 7963, 223, 3845, 5618, 5496, 4895, 1405, - 4589, 4357, 1735, 4892, 6286, 3791, 9809, 2423, 8481, - 8384, 9893, 9266, 5799, 1267, 9514, 9369, 4900, 1723, - 5667, 1311, 8170, 6128, 6274, 7478, 8044, 3520, 8012, - 8282, 9628, 3359, 5473, 7396, 4471, 5089, 1316, 4455, - 5377, 3589, 1907, 689, 3362, 3226, 3212, 9128, 3931, - 7092, 3938, 7099, 196, 5803, 4361, 6702, 9097, 9152, - 3210, 8486, 2335, 3639, 6311, 9682, 1127, 7240, 6077, - 8571, 1022, 6994, 5603, 368, 9701, 7690, 5927, 6899, - 6451]), - values=tensor([8.6823e-01, 8.0838e-01, 3.5925e-01, 3.4659e-01, - 4.4643e-01, 2.6828e-02, 2.5942e-01, 3.9894e-01, - 1.3286e-01, 2.1694e-01, 1.2839e-01, 3.5952e-01, - 4.9645e-01, 1.5584e-01, 8.7688e-01, 3.8371e-01, - 2.2200e-01, 3.6511e-01, 5.3234e-01, 8.5850e-01, - 9.1184e-01, 8.4884e-01, 4.0671e-01, 3.4884e-01, - 5.0292e-01, 9.0699e-01, 7.3125e-01, 9.8946e-01, - 1.1563e-01, 2.8604e-01, 7.8709e-01, 7.1484e-02, - 3.0525e-01, 6.5905e-02, 8.6657e-01, 2.3578e-01, - 2.9228e-02, 6.1915e-01, 9.3652e-01, 6.8170e-01, - 9.2965e-01, 8.9345e-02, 4.9184e-01, 3.2032e-01, - 7.6851e-02, 3.9652e-01, 8.6142e-01, 7.6478e-01, - 1.7582e-01, 4.6963e-02, 1.5046e-01, 8.1986e-01, - 5.4477e-01, 5.7408e-03, 5.1131e-02, 1.5540e-01, - 4.2340e-01, 9.2365e-01, 5.2370e-01, 2.0296e-01, - 7.9550e-01, 2.5466e-01, 3.1406e-01, 8.9008e-01, - 2.8862e-01, 1.5797e-02, 5.0832e-01, 6.4566e-01, - 8.0117e-01, 1.8690e-01, 5.8988e-01, 4.7315e-01, - 8.9811e-01, 3.3312e-01, 4.0122e-01, 2.3738e-01, - 7.5606e-01, 8.9915e-01, 5.0171e-01, 7.5482e-02, - 9.4091e-01, 4.0057e-01, 2.6338e-01, 6.3832e-01, - 1.5187e-01, 1.7096e-01, 4.0816e-01, 3.3366e-01, - 6.6848e-01, 6.8932e-01, 7.0927e-01, 3.7202e-01, - 4.6172e-01, 4.5465e-01, 6.7939e-01, 4.1346e-01, - 8.1922e-01, 2.2881e-01, 7.1333e-01, 5.7779e-01, - 3.4162e-01, 2.2802e-01, 2.0545e-01, 7.7277e-01, - 6.1202e-01, 5.6847e-01, 2.0299e-01, 3.4666e-01, - 5.5315e-01, 2.5050e-01, 8.9606e-01, 9.1408e-01, - 7.5587e-01, 9.4994e-01, 6.5977e-01, 7.6422e-01, - 9.6145e-01, 3.2171e-01, 6.8112e-02, 6.4014e-01, - 3.5231e-01, 3.8169e-01, 1.8803e-01, 2.5984e-01, - 9.0637e-01, 9.2001e-01, 2.8896e-01, 9.9356e-01, - 1.5912e-01, 1.9119e-02, 7.1265e-01, 1.7037e-01, - 4.9549e-01, 6.8698e-01, 2.1239e-01, 2.6894e-01, - 3.2673e-01, 4.4486e-01, 2.2637e-01, 5.3016e-01, - 8.5004e-01, 9.5408e-01, 2.5120e-01, 8.2596e-02, - 5.4658e-01, 6.0069e-01, 6.1749e-01, 5.8495e-01, - 9.3109e-01, 5.6752e-01, 1.4088e-03, 1.2196e-01, - 8.3150e-01, 9.8025e-01, 2.4247e-01, 1.9929e-01, - 3.7037e-01, 6.5461e-01, 9.9157e-01, 3.8166e-01, - 7.0380e-01, 9.1964e-01, 6.6890e-01, 3.7963e-01, - 3.2474e-01, 3.8677e-01, 6.2804e-01, 2.4271e-01, - 9.9426e-01, 4.8920e-01, 6.3728e-01, 4.2849e-01, - 3.7372e-02, 7.5664e-01, 9.8854e-03, 3.0092e-02, - 9.6458e-01, 2.1014e-01, 9.2310e-01, 9.2994e-01, - 9.3477e-01, 3.6325e-01, 2.4954e-01, 1.5830e-01, - 4.7563e-01, 9.2648e-01, 6.2576e-01, 3.6456e-01, - 7.7505e-01, 5.0781e-01, 1.0206e-01, 3.2541e-01, - 4.8536e-01, 1.0986e-01, 1.5449e-01, 1.6609e-03, - 7.3394e-01, 1.2150e-01, 6.5589e-01, 9.4379e-01, - 1.1052e-01, 3.7813e-01, 9.4771e-01, 3.5506e-01, - 1.8937e-01, 5.1782e-01, 6.9255e-01, 7.1767e-01, - 9.8335e-01, 8.5847e-02, 3.8291e-01, 4.4258e-01, - 3.7443e-01, 4.6126e-01, 2.2945e-01, 7.1867e-01, - 3.5091e-01, 7.4181e-01, 7.4380e-01, 1.5635e-01, - 9.4436e-01, 3.8751e-03, 1.7668e-01, 5.3526e-01, - 6.2800e-01, 2.6189e-02, 9.6565e-02, 4.4031e-01, - 6.3705e-01, 1.6700e-01, 3.8897e-01, 9.6611e-01, - 8.3920e-02, 1.7106e-03, 5.6651e-01, 3.3128e-01, - 4.6998e-01, 2.9336e-01, 2.7782e-01, 8.3681e-01, - 5.8036e-02, 1.8754e-01, 3.2847e-01, 2.5426e-01, - 2.1750e-01, 4.3550e-01, 2.0696e-01, 5.9888e-01, - 2.0671e-01, 4.7445e-01, 6.8661e-01, 3.2492e-01, - 7.0769e-01, 8.8945e-01, 9.9788e-02, 4.2536e-01, - 4.6303e-01, 4.1726e-01, 6.2259e-01, 5.2941e-01, - 8.3327e-01, 2.8592e-02, 1.7156e-01, 1.1841e-02, - 2.2064e-03, 9.1177e-01, 4.1595e-01, 7.9336e-01, - 1.6332e-01, 5.4941e-02, 5.1716e-01, 5.8288e-01, - 4.4116e-01, 5.9849e-01, 3.8445e-01, 7.7921e-02, - 5.4880e-01, 9.0920e-01, 4.5499e-01, 8.4591e-01, - 6.8422e-01, 8.8508e-01, 6.7336e-02, 2.8206e-01, - 8.0893e-01, 9.7383e-01, 1.2394e-01, 3.9040e-01, - 9.5451e-02, 8.4088e-01, 1.3274e-01, 9.6648e-01, - 2.1078e-01, 6.6015e-02, 7.6633e-01, 1.8833e-01, - 8.7447e-01, 9.2214e-01, 1.8891e-01, 4.5451e-01, - 5.1975e-01, 4.5763e-01, 7.6429e-01, 8.2877e-01, - 2.9124e-01, 2.2979e-01, 1.8860e-01, 5.0554e-01, - 2.8083e-01, 8.5323e-01, 7.9018e-01, 2.7397e-01, - 6.3370e-01, 3.3655e-01, 4.6997e-01, 2.9299e-01, - 7.5482e-01, 9.8765e-05, 3.0655e-01, 1.3551e-01, - 4.1364e-01, 8.1132e-01, 9.1918e-01, 3.0195e-01, - 4.7015e-01, 3.6947e-02, 9.1369e-01, 1.9491e-01, - 9.9299e-01, 3.0194e-01, 8.0375e-01, 4.1236e-01, - 4.3865e-01, 2.9378e-01, 8.9690e-01, 9.0520e-01, - 2.5893e-01, 3.0380e-03, 6.8688e-01, 2.4212e-01, - 9.0940e-01, 6.5713e-03, 2.7799e-01, 5.3894e-01, - 6.6940e-01, 8.5794e-01, 7.1849e-02, 9.3226e-02, - 2.9387e-01, 1.7970e-01, 4.6350e-01, 8.9238e-01, - 7.8405e-01, 1.2108e-01, 2.1542e-01, 4.1325e-01, - 8.3997e-02, 5.6381e-01, 4.5723e-01, 6.5296e-01, - 4.9396e-01, 7.8710e-02, 2.1202e-01, 5.9709e-02, - 8.5294e-01, 3.2965e-01, 9.0273e-01, 2.4342e-01, - 3.8813e-01, 5.5426e-01, 1.2201e-01, 8.0490e-01, - 5.6135e-01, 9.8410e-01, 8.4153e-01, 3.8538e-01, - 2.7735e-01, 2.5756e-01, 6.7348e-01, 5.4817e-01, - 6.5839e-01, 5.0202e-01, 7.1055e-01, 5.9718e-01, - 5.7839e-02, 8.2246e-01, 8.5368e-01, 1.6287e-01, - 7.6439e-01, 3.1435e-01, 7.8494e-02, 7.5294e-01, - 1.1713e-02, 3.2577e-01, 3.4662e-01, 2.9834e-01, - 3.4892e-02, 7.4952e-01, 6.7346e-01, 5.6097e-02, - 6.5681e-01, 6.2214e-01, 3.0840e-01, 5.4029e-01, - 7.6830e-01, 8.9474e-01, 1.1203e-03, 6.0920e-01, - 3.0628e-02, 1.6018e-01, 9.4952e-01, 2.4136e-02, - 4.6995e-01, 1.7886e-01, 5.6886e-01, 9.3589e-01, - 1.5264e-01, 6.1807e-01, 9.6887e-01, 2.4973e-01, - 8.0473e-01, 6.3057e-01, 7.8667e-01, 1.1876e-01, - 6.4951e-01, 5.2490e-01, 8.4754e-01, 4.2902e-01, - 2.8136e-01, 7.8637e-01, 9.2427e-01, 4.4727e-01, - 6.2224e-01, 7.7349e-01, 7.2505e-01, 2.2371e-01, - 8.9858e-01, 6.5900e-01, 7.5788e-01, 4.4778e-01, - 9.3813e-01, 7.0066e-01, 5.6034e-01, 9.0719e-01, - 4.1993e-01, 2.8675e-01, 5.4777e-02, 6.2620e-01, - 3.5535e-01, 3.8659e-01, 6.8676e-01, 7.2143e-01, - 9.7097e-01, 3.5291e-01, 8.1090e-01, 9.3145e-02, - 8.0229e-02, 4.7319e-01, 6.0536e-01, 8.7294e-01, - 6.9271e-02, 7.7699e-01, 4.1312e-01, 7.6671e-01, - 4.3717e-01, 3.1144e-01, 4.3115e-01, 2.0570e-01, - 4.6982e-01, 9.9040e-01, 7.7410e-01, 8.1383e-01, - 6.9968e-01, 7.5606e-01, 8.4827e-01, 7.4931e-01, - 9.2630e-01, 3.0011e-02, 5.6594e-01, 2.4683e-01, - 7.5008e-02, 1.5899e-01, 3.8436e-01, 1.1998e-01, - 1.5120e-01, 7.4443e-01, 7.1375e-01, 4.8409e-01, - 1.6347e-01, 3.5629e-01, 4.7552e-01, 1.2238e-01, - 9.4583e-01, 9.4965e-02, 4.5696e-01, 4.9948e-01, - 8.4516e-01, 6.9238e-01, 4.1051e-01, 7.6950e-01, - 6.6738e-01, 1.2959e-01, 5.9358e-01, 2.5385e-01, - 8.7385e-01, 2.4892e-01, 5.5343e-01, 3.2831e-02, - 1.3287e-01, 3.5450e-01, 2.4312e-01, 9.6476e-01, - 3.6688e-01, 1.1936e-01, 8.6777e-01, 3.6081e-01, - 1.9404e-01, 1.4007e-01, 7.2515e-01, 1.6285e-01, - 6.9766e-01, 4.9143e-01, 4.6157e-01, 3.6827e-02, - 8.6476e-01, 8.3028e-01, 6.2443e-01, 4.9638e-01, - 6.6335e-01, 2.1241e-01, 5.5259e-01, 2.8923e-01, - 9.8500e-01, 7.6373e-02, 9.0094e-01, 4.4490e-01, - 9.0619e-02, 7.1520e-01, 1.9875e-01, 3.5128e-01, - 2.2956e-02, 6.1218e-01, 8.5308e-03, 9.0570e-01, - 3.3745e-01, 7.6847e-01, 4.7549e-01, 8.3213e-01, - 7.8294e-02, 5.0841e-01, 9.5382e-01, 1.9742e-01, - 8.2324e-02, 2.2216e-01, 1.9371e-01, 1.2461e-01, - 4.0279e-01, 9.6868e-01, 5.3609e-01, 8.9643e-02, - 5.0920e-01, 5.3356e-01, 1.0316e-01, 8.9625e-01, - 9.4452e-01, 2.1535e-01, 8.7559e-01, 9.9424e-01, - 7.8689e-01, 7.3082e-01, 9.5592e-01, 1.1762e-01, - 7.2445e-01, 8.9376e-01, 3.0247e-01, 9.0417e-01, - 9.9781e-01, 6.4226e-01, 6.5839e-01, 4.3907e-01, - 3.5783e-01, 6.7715e-01, 8.0660e-01, 4.4092e-01, - 9.6004e-01, 3.7276e-01, 2.5706e-01, 9.0943e-01, - 3.8231e-01, 8.7580e-01, 2.8369e-01, 6.6827e-01, - 2.6682e-01, 9.7530e-01, 2.5133e-01, 8.0981e-01, - 2.0092e-01, 9.3491e-01, 2.6618e-01, 1.3305e-01, - 1.6010e-01, 8.4942e-01, 3.7032e-01, 4.8397e-01, - 4.4482e-01, 8.0967e-01, 7.8408e-01, 5.6654e-01, - 2.7509e-02, 7.4669e-01, 3.9983e-01, 6.4974e-01, - 8.3446e-01, 9.5686e-01, 4.1354e-01, 9.5231e-01, - 5.0327e-01, 1.1332e-01, 4.3525e-01, 9.4171e-01, - 5.4281e-01, 7.7540e-01, 8.3704e-01, 3.5103e-01, - 4.6514e-01, 7.7712e-01, 5.8921e-01, 6.4485e-01, - 3.2470e-03, 8.4354e-01, 9.5479e-01, 9.7374e-01, - 9.5765e-01, 7.7771e-01, 1.8261e-01, 9.7368e-01, - 9.6221e-01, 6.7167e-01, 8.6163e-02, 7.7590e-01, - 1.6935e-01, 5.3126e-01, 1.0190e-01, 1.1671e-01, - 2.2587e-01, 7.9488e-01, 3.6789e-01, 2.3882e-01, - 7.9196e-01, 7.7288e-01, 6.8921e-01, 4.0328e-01, - 7.7530e-01, 5.4991e-01, 7.7053e-01, 7.7949e-02, - 9.2810e-01, 3.6076e-01, 2.8750e-01, 4.1267e-01, - 1.2102e-01, 3.1089e-01, 3.8276e-01, 5.0000e-01, - 5.8007e-01, 4.5876e-01, 1.0952e-01, 5.1143e-01, - 9.5793e-01, 2.6761e-01, 4.0401e-01, 7.0621e-02, - 6.7728e-01, 7.4546e-01, 7.4395e-01, 8.1820e-02, - 3.6584e-01, 4.8464e-01, 9.5843e-01, 8.3990e-01, - 5.3420e-01, 2.7207e-01, 4.8800e-01, 3.4948e-01, - 8.9431e-01, 6.4763e-01, 6.8858e-01, 9.9632e-01, - 3.5685e-02, 2.6762e-01, 7.3524e-01, 9.7758e-01, - 4.0032e-01, 1.1419e-01, 7.1652e-01, 2.8270e-01, - 7.5806e-01, 6.7736e-01, 2.8778e-01, 6.7329e-01, - 2.1180e-01, 1.7115e-01, 4.4945e-01, 8.4053e-01, - 4.7580e-01, 2.1458e-01, 8.4256e-01, 5.0902e-01, - 9.7737e-01, 6.3393e-01, 1.3645e-01, 1.1261e-01, - 7.1418e-02, 2.4714e-01, 3.3308e-01, 4.7138e-01, - 7.2989e-02, 3.8588e-01, 4.2214e-01, 2.5460e-01, - 2.2665e-01, 5.5355e-01, 8.1486e-01, 3.6381e-01, - 7.2596e-01, 4.1389e-01, 7.4296e-01, 4.1512e-01, - 7.5823e-01, 2.2691e-03, 6.8333e-01, 9.3080e-01, - 7.4769e-01, 1.0677e-02, 3.6570e-01, 6.1797e-01, - 8.9560e-01, 7.4639e-01, 5.6766e-01, 4.1155e-01, - 3.0136e-01, 9.4942e-01, 1.6224e-01, 6.5030e-01, - 8.3076e-01, 9.4488e-01, 4.5760e-01, 6.2092e-01, - 7.2436e-01, 4.6680e-01, 2.0022e-01, 1.8419e-01, - 7.2754e-01, 6.9249e-01, 3.4890e-01, 2.1553e-01, - 7.6362e-01, 4.9520e-01, 8.5841e-01, 8.4086e-01, - 6.6675e-01, 6.1226e-01, 8.2278e-01, 1.3521e-01, - 9.3988e-01, 2.4260e-01, 6.8795e-01, 3.4746e-02, - 1.1162e-01, 4.0412e-01, 3.6938e-01, 6.8413e-01, - 1.2299e-01, 9.6572e-01, 9.4871e-01, 9.4283e-01, - 4.0019e-01, 1.0247e-01, 2.9433e-01, 6.1932e-01, - 1.5196e-01, 2.6071e-01, 5.3167e-01, 9.0583e-02, - 4.4687e-01, 5.2559e-01, 6.8484e-01, 9.7185e-01, - 8.2652e-01, 3.5938e-01, 9.1009e-01, 4.1691e-01, - 6.1293e-01, 6.5602e-01, 9.0801e-01, 2.8762e-01, - 8.3211e-02, 9.5942e-01, 5.9511e-01, 3.6537e-01, - 1.0256e-01, 9.0717e-01, 6.8129e-01, 4.9752e-01, - 1.1050e-01, 8.9068e-01, 3.5172e-01, 3.9319e-01, - 6.3711e-02, 8.1601e-01, 2.6251e-01, 8.6583e-01, - 7.0390e-01, 3.8546e-01, 8.4503e-01, 6.5384e-01, - 5.6251e-01, 3.5405e-01, 6.5455e-01, 5.1897e-02, - 5.7664e-03, 9.7974e-02, 8.1947e-01, 6.5030e-01, - 7.6138e-01, 3.6503e-01, 8.0648e-01, 6.5551e-02, - 1.5342e-01, 1.8734e-01, 9.1789e-01, 3.9419e-01, - 1.6961e-01, 6.4369e-01, 7.9296e-01, 8.9594e-01, - 4.6987e-01, 2.1132e-01, 4.1653e-01, 2.2758e-01, - 2.8067e-01, 3.1274e-01, 6.9985e-01, 2.5174e-01, - 8.7083e-01, 1.8551e-01, 1.8717e-01, 7.6623e-01, - 1.0544e-01, 4.9446e-01, 7.2296e-01, 7.2291e-01, - 5.0196e-01, 6.2705e-01, 9.2455e-01, 9.9366e-01, - 6.4828e-01, 7.0828e-01, 4.5896e-01, 1.2551e-01, - 4.1722e-02, 4.4457e-01, 9.1062e-01, 9.8465e-01, - 9.8026e-02, 8.3747e-03, 6.8958e-01, 8.0354e-01, - 5.0333e-01, 6.9085e-01, 2.7751e-01, 9.2407e-01, - 6.6256e-01, 5.1227e-01, 4.4411e-01, 2.0151e-01, - 7.1427e-01, 8.6322e-01, 2.6612e-01, 3.8776e-01, - 7.6834e-02, 2.0098e-01, 4.9222e-01, 2.7178e-01, - 2.9289e-01, 1.1752e-01, 6.5632e-01, 6.1338e-01, - 1.4060e-01, 3.3877e-03, 6.9068e-02, 4.7074e-01, - 1.1877e-01, 8.1229e-01, 5.9460e-01, 6.5360e-01, - 4.4669e-03, 6.5549e-01, 2.7101e-01, 1.1536e-01, - 7.9070e-01, 7.4375e-01, 1.5866e-01, 4.6603e-01, - 3.9363e-01, 1.9591e-01, 2.7552e-01, 9.7274e-01, - 2.6104e-01, 8.4903e-01, 4.4598e-01, 1.6572e-01, - 9.3086e-01, 8.2085e-01, 3.4182e-01, 8.7075e-01, - 4.8688e-01, 4.0946e-01, 2.5575e-01, 4.9407e-01, - 4.0802e-01, 4.7040e-01, 2.5398e-01, 6.5177e-01, - 9.9541e-01, 6.2947e-01, 6.2439e-01, 8.6083e-01, - 3.4055e-01, 7.6201e-02, 7.2483e-01, 7.2444e-02, - 3.8894e-01, 6.2772e-01, 5.1917e-01, 1.8216e-01, - 3.7472e-01, 3.8610e-01, 9.6840e-01, 6.8962e-01, - 5.4784e-01, 1.1673e-01, 9.3792e-01, 7.3859e-02, - 5.7796e-01, 8.5720e-01, 1.7075e-01, 4.0811e-01, - 9.9813e-01, 5.0100e-01, 8.1489e-01, 1.9691e-01, - 4.1673e-01, 3.8748e-01, 9.7778e-01, 6.7211e-01, - 8.8575e-01, 5.1648e-01, 3.2312e-01, 9.0133e-02, - 2.8324e-01, 5.8075e-01, 6.2420e-01, 8.0445e-01, - 1.2213e-01, 7.6901e-01, 2.9294e-01, 2.9660e-01, - 3.9362e-01, 1.6091e-02, 6.1394e-01, 4.1463e-01, - 5.4865e-02, 5.8005e-01, 8.2695e-01, 3.8065e-01, - 6.8066e-01, 6.0970e-01, 1.8976e-01, 9.5903e-01, - 2.9293e-01, 7.5231e-01, 3.9203e-01, 8.4501e-01, - 4.7513e-01, 3.8588e-01, 2.3397e-01, 7.8860e-01, - 3.8186e-01, 6.9218e-01, 9.5940e-02, 9.6027e-01, - 7.1292e-01, 6.2645e-01, 5.7711e-01, 5.1942e-01, - 2.0481e-01, 3.1942e-01, 7.5560e-01, 7.0764e-01]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 999, 999, 1000]), + col_indices=tensor([9785, 166, 4732, 1054, 2677, 737, 2692, 6702, 6696, + 4987, 1461, 2792, 4644, 6824, 7514, 9831, 4632, 9574, + 5878, 8004, 2663, 8554, 6382, 4503, 7552, 2495, 4359, + 3665, 8415, 1378, 7600, 5186, 955, 5619, 8737, 8271, + 4450, 9961, 761, 5226, 9765, 6949, 7600, 499, 6338, + 2186, 620, 8240, 8992, 3003, 4300, 6850, 7631, 6276, + 6344, 3045, 5424, 4843, 6655, 214, 8437, 1153, 3048, + 3945, 6705, 6578, 503, 4280, 3660, 2187, 4388, 1729, + 3826, 9897, 4722, 8899, 9116, 9862, 250, 3435, 9656, + 2133, 229, 8648, 3790, 2892, 3215, 8841, 9321, 9370, + 7919, 5258, 2328, 1718, 8273, 9787, 6605, 3738, 1986, + 5382, 5473, 3182, 6931, 2297, 7008, 4903, 4955, 9265, + 754, 4714, 8976, 5504, 5461, 5298, 6045, 8621, 5402, + 6079, 8510, 4644, 6547, 2179, 8728, 6010, 134, 4499, + 1213, 8522, 835, 4809, 3944, 8041, 9806, 7565, 7536, + 4656, 4837, 2879, 6946, 3464, 619, 62, 5432, 9820, + 4952, 996, 8581, 5377, 7694, 9712, 7485, 8052, 3987, + 792, 320, 7566, 9599, 1370, 2235, 8718, 1363, 2332, + 5027, 3430, 1039, 1637, 55, 6337, 6636, 2980, 6048, + 6985, 3464, 5945, 8651, 8573, 3606, 1490, 865, 905, + 1490, 3435, 6844, 9031, 3965, 6482, 2873, 1087, 9063, + 2290, 2149, 1390, 7730, 2893, 8310, 7722, 8741, 6862, + 5350, 1281, 3410, 4660, 4916, 1233, 3316, 6898, 6188, + 5449, 741, 3875, 9700, 9872, 3389, 7597, 8144, 8722, + 9746, 1550, 1244, 7613, 4719, 4426, 9724, 1811, 8910, + 8345, 957, 9931, 7286, 1642, 1, 4022, 8010, 1904, + 1644, 4817, 7131, 4114, 3549, 4195, 2668, 4121, 4405, + 2741, 2430, 2265, 1112, 7943, 3629, 7844, 7608, 8287, + 9308, 5998, 9737, 8595, 1962, 8938, 343, 6874, 9789, + 9356, 8225, 1546, 3553, 2148, 3996, 9245, 6048, 8823, + 3398, 6498, 7428, 9499, 2417, 812, 7761, 7774, 8309, + 9098, 6784, 5574, 3988, 2354, 3852, 2415, 6591, 8240, + 4397, 6666, 9763, 2093, 3435, 2169, 9681, 9101, 9169, + 7306, 3087, 7043, 543, 3648, 5184, 1467, 4507, 844, + 4286, 2769, 7533, 3344, 2717, 6573, 2325, 2539, 2728, + 7418, 3180, 681, 8690, 9379, 9577, 6255, 572, 2901, + 6468, 4233, 8000, 3259, 916, 6238, 442, 3591, 4793, + 3309, 9434, 9985, 363, 3303, 1400, 4359, 7518, 3644, + 5004, 6752, 6531, 4932, 1594, 4176, 8596, 7996, 9452, + 4255, 2970, 2679, 9053, 9602, 7809, 5895, 9805, 3931, + 4450, 2689, 8816, 4218, 8262, 3650, 6736, 2445, 3979, + 3263, 3314, 7848, 6269, 9437, 2493, 1316, 3709, 5881, + 2851, 4568, 7456, 1023, 6351, 8957, 9431, 9809, 7905, + 5620, 9526, 6013, 5365, 6397, 6944, 7671, 5462, 4356, + 941, 2415, 1563, 7405, 8273, 1303, 7168, 7136, 689, + 7807, 9742, 99, 8146, 7835, 9875, 2644, 7093, 8194, + 4600, 4882, 5801, 1693, 4045, 2037, 8709, 6551, 7297, + 7733, 8443, 3612, 1144, 1583, 4915, 632, 5411, 9764, + 8966, 6912, 2517, 8723, 5338, 6627, 4780, 5074, 252, + 2764, 4409, 9443, 5542, 3491, 7522, 6229, 6706, 2379, + 4994, 3715, 4338, 8970, 3455, 2391, 3173, 2189, 189, + 3152, 8308, 2747, 8707, 9978, 2579, 3596, 3717, 7074, + 8374, 9463, 9789, 3344, 8397, 1857, 7053, 8019, 9174, + 7289, 7973, 1302, 5745, 2471, 8049, 4561, 1609, 5496, + 5420, 6442, 9170, 3047, 7465, 7581, 2958, 6201, 6776, + 9868, 628, 9925, 8329, 2691, 3342, 8858, 7611, 5869, + 4294, 6350, 8022, 6784, 8177, 180, 2444, 827, 1240, + 2070, 9651, 5806, 3667, 5899, 9120, 4564, 7566, 8581, + 2122, 4232, 8576, 1265, 6962, 2886, 6630, 13, 977, + 4882, 3358, 9434, 7172, 312, 1663, 2578, 5247, 3309, + 6660, 1669, 2730, 6017, 5873, 3715, 6468, 4145, 2235, + 1635, 9287, 6498, 1723, 1950, 5564, 1206, 5112, 1902, + 9985, 2005, 4744, 140, 2793, 319, 6150, 378, 5699, + 9535, 1978, 7927, 9858, 9618, 5584, 6047, 5793, 9146, + 871, 5971, 2761, 6573, 1303, 8144, 7546, 7583, 8871, + 5806, 1270, 1551, 8880, 4357, 2001, 1191, 4948, 1497, + 5338, 6389, 4432, 9166, 6271, 599, 2576, 2105, 5266, + 6760, 940, 2516, 7029, 150, 1814, 6143, 119, 5761, + 6997, 1805, 9581, 3150, 1685, 2677, 5907, 3244, 6569, + 636, 5239, 328, 2606, 9873, 5694, 312, 8146, 4066, + 2356, 3554, 3284, 5042, 4281, 5763, 9098, 2638, 8485, + 6772, 5511, 6169, 5242, 6099, 4348, 7529, 4802, 8127, + 5016, 3710, 6553, 7372, 8670, 6112, 7508, 4879, 8626, + 5415, 173, 2999, 7878, 2740, 5648, 6256, 4717, 680, + 285, 2248, 6576, 6946, 8315, 6648, 6232, 3819, 207, + 2750, 1785, 9384, 4909, 4623, 6412, 4459, 7968, 1566, + 1457, 4773, 5497, 3497, 561, 561, 9827, 5162, 1927, + 5564, 5333, 2833, 2273, 2965, 3538, 9578, 4834, 9669, + 7936, 3482, 1294, 964, 2949, 1061, 6428, 4856, 8359, + 823, 835, 7553, 4805, 9310, 4440, 3742, 8345, 2534, + 2048, 2597, 1463, 3578, 8817, 698, 3041, 2184, 2470, + 6677, 6761, 6266, 8828, 403, 1862, 1860, 190, 6193, + 938, 4044, 6995, 9429, 9407, 237, 9495, 7714, 4774, + 6105, 3412, 1344, 1515, 4894, 9334, 4967, 7979, 8072, + 3534, 9065, 5060, 5244, 3805, 9427, 9942, 1579, 8946, + 6693, 6377, 5190, 6721, 8176, 1310, 9299, 3278, 7860, + 2977, 6676, 6683, 7050, 7867, 7581, 7620, 7222, 4756, + 6631, 6324, 5272, 9699, 4919, 8937, 1647, 8728, 3792, + 6721, 9851, 1772, 4894, 9362, 610, 8497, 6604, 2645, + 3943, 1830, 4247, 8187, 570, 3436, 2219, 4749, 7757, + 3374, 6190, 5390, 2365, 4451, 7704, 1737, 1474, 4069, + 4728, 1831, 420, 6368, 8431, 6243, 8033, 4258, 3893, + 27, 4401, 7951, 8739, 3130, 228, 7784, 7785, 1501, + 6951, 7338, 1793, 3222, 3729, 4330, 1726, 1335, 5426, + 4562, 4180, 8211, 2920, 5185, 9818, 731, 4604, 4720, + 7330, 9485, 961, 8821, 4694, 6303, 151, 3936, 5654, + 5697, 1356, 5185, 1362, 1778, 2202, 507, 5473, 8330, + 3989, 7967, 7406, 5227, 8280, 7073, 6810, 5099, 1669, + 1698, 7489, 8524, 9942, 7528, 1663, 4243, 2507, 1395, + 2964, 8529, 3047, 8014, 8347, 9122, 4984, 165, 397, + 5886, 7762, 6635, 8641, 9864, 5736, 8655, 5186, 6739, + 5510, 573, 352, 8688, 3386, 9209, 9968, 6024, 4140, + 5923, 473, 8204, 486, 5334, 1333, 7972, 8566, 475, + 5771, 9094, 1294, 1428, 7487, 6295, 8728, 6230, 592, + 7838, 8672, 1400, 7420, 2487, 7365, 9344, 2707, 4067, + 7923, 6146, 3694, 3458, 1082, 3163, 220, 6283, 4440, + 4207, 9808, 1906, 7825, 6343, 6969, 3880, 8804, 3924, + 4573, 6298, 8864, 3426, 8181, 3978, 3247, 565, 1550, + 402, 5683, 3735, 538, 4968, 3623, 6448, 4374, 4484, + 4981, 9965, 3979, 6037, 7993, 3906, 6774, 4197, 4077, + 6141, 5665, 216, 4728, 3404, 3490, 5845, 5442, 5936, + 2392]), + values=tensor([0.6789, 0.7902, 0.6880, 0.1459, 0.9506, 0.7545, 0.6244, + 0.9289, 0.9189, 0.9462, 0.9513, 0.6797, 0.9583, 0.6521, + 0.3572, 0.8716, 0.5241, 0.6472, 0.3203, 0.1422, 0.9924, + 0.9883, 0.3178, 0.6373, 0.7734, 0.0675, 0.9563, 0.6053, + 0.5349, 0.5673, 0.0327, 0.1179, 0.7077, 0.4694, 0.1771, + 0.1262, 0.5569, 0.0846, 0.1154, 0.8467, 0.3122, 0.2151, + 0.7148, 0.7408, 0.8150, 0.4660, 0.4917, 0.7673, 0.7452, + 0.0313, 0.3956, 0.8268, 0.3175, 0.4706, 0.4333, 0.5467, + 0.1812, 0.4264, 0.6242, 0.2334, 0.7860, 0.1513, 0.2008, + 0.5709, 0.5899, 0.1116, 0.6313, 0.6602, 0.1966, 0.5433, + 0.7515, 0.3064, 0.3549, 0.5620, 0.0543, 0.9463, 0.5389, + 0.6761, 0.7393, 0.6158, 0.0063, 0.2471, 0.4723, 0.9199, + 0.3569, 0.4061, 0.7383, 0.7275, 0.0079, 0.6368, 0.9169, + 0.6344, 0.7352, 0.2038, 0.6882, 0.0692, 0.7620, 0.4514, + 0.2523, 0.8680, 0.1348, 0.7845, 0.5002, 0.5098, 0.3488, + 0.4607, 0.0221, 0.4555, 0.4226, 0.6131, 0.7187, 0.3428, + 0.3065, 0.6724, 0.0711, 0.0179, 0.7366, 0.6267, 0.9128, + 0.1224, 0.7126, 0.0754, 0.8619, 0.7825, 0.0545, 0.2356, + 0.9584, 0.3541, 0.2836, 0.6314, 0.8234, 0.7482, 0.4493, + 0.8483, 0.4729, 0.3589, 0.4188, 0.1103, 0.2255, 0.9961, + 0.6787, 0.3921, 0.6680, 0.8683, 0.7021, 0.3865, 0.3920, + 0.1658, 0.1678, 0.4939, 0.4618, 0.4732, 0.1460, 0.5450, + 0.9787, 0.9696, 0.4490, 0.7981, 0.3843, 0.2315, 0.8056, + 0.3525, 0.1190, 0.4883, 0.6625, 0.0183, 0.5766, 0.1702, + 0.8525, 0.4266, 0.9885, 0.3533, 0.6524, 0.3520, 0.6675, + 0.3686, 0.0718, 0.4867, 0.4731, 0.5922, 0.1762, 0.0984, + 0.8628, 0.4134, 0.8916, 0.3533, 0.5971, 0.5074, 0.7582, + 0.8871, 0.2236, 0.9584, 0.0325, 0.3111, 0.7335, 0.9248, + 0.3610, 0.5237, 0.4724, 0.6363, 0.3785, 0.9824, 0.9725, + 0.9776, 0.6421, 0.3971, 0.9485, 0.6001, 0.7926, 0.8669, + 0.3153, 0.5971, 0.1508, 0.4626, 0.0885, 0.4864, 0.3318, + 0.5636, 0.0911, 0.0185, 0.5989, 0.9966, 0.4528, 0.8918, + 0.5935, 0.5481, 0.7258, 0.2687, 0.0530, 0.6100, 0.7098, + 0.6023, 0.2883, 0.5201, 0.3796, 0.5485, 0.0840, 0.2270, + 0.1228, 0.3948, 0.5803, 0.8700, 0.9491, 0.8250, 0.0603, + 0.9919, 0.7306, 0.4249, 0.6130, 0.9492, 0.3756, 0.7245, + 0.3148, 0.4093, 0.9728, 0.5261, 0.7680, 0.2256, 0.5492, + 0.4073, 0.1262, 0.9579, 0.7958, 0.6185, 0.5787, 0.0032, + 0.9180, 0.4937, 0.1203, 0.4035, 0.9433, 0.1036, 0.5838, + 0.8351, 0.6111, 0.8175, 0.9802, 0.7183, 0.1991, 0.1161, + 0.6332, 0.0124, 0.5364, 0.6217, 0.8091, 0.1122, 0.8740, + 0.1163, 0.2222, 0.2785, 0.6702, 0.0683, 0.2947, 0.0833, + 0.2798, 0.6418, 0.4443, 0.5947, 0.1590, 0.9468, 0.9361, + 0.8112, 0.8660, 0.6981, 0.5697, 0.3167, 0.0366, 0.6335, + 0.3897, 0.4571, 0.1447, 0.5764, 0.9037, 0.2483, 0.1549, + 0.1009, 0.0249, 0.7044, 0.2023, 0.8346, 0.6547, 0.8912, + 0.2294, 0.5760, 0.6906, 0.9669, 0.7653, 0.8821, 0.3934, + 0.3027, 0.6247, 0.6525, 0.0866, 0.7292, 0.9329, 0.7996, + 0.4288, 0.8616, 0.1508, 0.0988, 0.9403, 0.2822, 0.5955, + 0.6875, 0.8665, 0.1816, 0.1480, 0.4114, 0.1188, 0.7739, + 0.8942, 0.9570, 0.7041, 0.1444, 0.6212, 0.3491, 0.5667, + 0.0588, 0.7926, 0.9206, 0.1582, 0.5243, 0.4413, 0.5215, + 0.1245, 0.7716, 0.8320, 0.5173, 0.9502, 0.7001, 0.2561, + 0.0911, 0.9197, 0.7119, 0.6127, 0.0767, 0.0934, 0.5759, + 0.1581, 0.2199, 0.0678, 0.8478, 0.9549, 0.6215, 0.0890, + 0.3431, 0.8983, 0.2304, 0.5093, 0.4202, 0.5025, 0.0728, + 0.6820, 0.4595, 0.8151, 0.9650, 0.1367, 0.0489, 0.6894, + 0.8815, 0.1798, 0.4118, 0.0768, 0.3094, 0.1640, 0.8421, + 0.4927, 0.7588, 0.0142, 0.6189, 0.5015, 0.3806, 0.0976, + 0.9793, 0.6681, 0.7480, 0.0182, 0.1909, 0.2189, 0.9142, + 0.2607, 0.9170, 0.6085, 0.3877, 0.5892, 0.9975, 0.3059, + 0.7882, 0.2019, 0.1535, 0.0071, 0.8506, 0.3449, 0.0641, + 0.1592, 0.1108, 0.5805, 0.7587, 0.2160, 0.6041, 0.3713, + 0.6770, 0.7587, 0.2265, 0.8945, 0.1762, 0.0243, 0.4669, + 0.3152, 0.4374, 0.1957, 0.7650, 0.5132, 0.2117, 0.2264, + 0.0215, 0.5030, 0.8658, 0.2647, 0.3502, 0.1551, 0.6560, + 0.0067, 0.9332, 0.9856, 0.5894, 0.1238, 0.2676, 0.8649, + 0.9565, 0.9902, 0.1696, 0.6772, 0.6676, 0.3453, 0.5280, + 0.8229, 0.0610, 0.7282, 0.8611, 0.5029, 0.4703, 0.3124, + 0.1888, 0.4355, 0.4695, 0.3041, 0.5282, 0.9095, 0.8104, + 0.8367, 0.7223, 0.9052, 0.4773, 0.1809, 0.7066, 0.3981, + 0.4859, 0.7926, 0.8979, 0.4406, 0.1512, 0.0689, 0.2600, + 0.9449, 0.2340, 0.8871, 0.5326, 0.6894, 0.3020, 0.6115, + 0.7125, 0.5080, 0.9386, 0.7392, 0.9728, 0.3697, 0.5130, + 0.6615, 0.2833, 0.2191, 0.0319, 0.5162, 0.1457, 0.6820, + 0.2960, 0.7626, 0.5209, 0.8942, 0.7534, 0.1675, 0.9981, + 0.2741, 0.5042, 0.5185, 0.8251, 0.1458, 0.4001, 0.7294, + 0.2939, 0.6680, 0.2563, 0.2107, 0.3729, 0.0109, 0.9675, + 0.0578, 0.6678, 0.4549, 0.7095, 0.2695, 0.1911, 0.4045, + 0.5293, 0.9947, 0.5724, 0.2173, 0.6766, 0.8022, 0.6141, + 0.6448, 0.5921, 0.5903, 0.6152, 0.3252, 0.8434, 0.9784, + 0.8383, 0.0725, 0.0125, 0.5696, 0.7371, 0.3359, 0.5988, + 0.4416, 0.1805, 0.0079, 0.6909, 0.1542, 0.8025, 0.6226, + 0.6922, 0.6664, 0.0150, 0.7185, 0.7912, 0.0152, 0.7974, + 0.2282, 0.4197, 0.1100, 0.6712, 0.2565, 0.3027, 0.8066, + 0.0145, 0.7787, 0.9272, 0.7222, 0.8899, 0.5677, 0.3948, + 0.8801, 0.6723, 0.2421, 0.7796, 0.9543, 0.4170, 0.8510, + 0.5686, 0.0369, 0.5583, 0.2476, 0.4567, 0.0569, 0.0422, + 0.7673, 0.1146, 0.1025, 0.3896, 0.6882, 0.0079, 0.0575, + 0.2938, 0.3742, 0.1241, 0.9931, 0.6975, 0.8840, 0.9317, + 0.9806, 0.1991, 0.8507, 0.4474, 0.7682, 0.9940, 0.5509, + 0.4476, 0.7719, 0.9465, 0.2149, 0.3612, 0.6837, 0.4167, + 0.4345, 0.1294, 0.1271, 0.2882, 0.5067, 0.0450, 0.8480, + 0.1928, 0.6057, 0.1393, 0.3987, 0.3605, 0.7978, 0.8175, + 0.8621, 0.1783, 0.9752, 0.8469, 0.7340, 0.7713, 0.8094, + 0.4586, 0.2030, 0.5868, 0.6787, 0.2227, 0.5771, 0.9455, + 0.5256, 0.7684, 0.1435, 0.8857, 0.6764, 0.8431, 0.5715, + 0.5742, 0.3102, 0.7053, 0.0466, 0.5354, 0.9759, 0.9769, + 0.6314, 0.6683, 0.5229, 0.9126, 0.1509, 0.6751, 0.3712, + 0.8980, 0.2049, 0.9006, 0.7145, 0.7721, 0.4147, 0.2585, + 0.1378, 0.4957, 0.6236, 0.9588, 0.0152, 0.6439, 0.0593, + 0.4480, 0.3219, 0.0447, 0.6693, 0.7359, 0.9794, 0.7913, + 0.2646, 0.3215, 0.4910, 0.9212, 0.3717, 0.5255, 0.8015, + 0.8731, 0.1589, 0.7259, 0.8090, 0.2892, 0.6692, 0.7370, + 0.0452, 0.5279, 0.1164, 0.0026, 0.7695, 0.9104, 0.0238, + 0.5287, 0.7956, 0.9774, 0.6709, 0.3624, 0.8096, 0.3404, + 0.9704, 0.4556, 0.6822, 0.4708, 0.0329, 0.1830, 0.1833, + 0.7752, 0.6472, 0.4138, 0.0312, 0.2551, 0.4062, 0.8502, + 0.2761, 0.8020, 0.0066, 0.8883, 0.6981, 0.4518, 0.8034, + 0.4933, 0.5625, 0.9452, 0.5704, 0.9596, 0.2546, 0.2030, + 0.5089, 0.0746, 0.2748, 0.9859, 0.8348, 0.9564, 0.0150, + 0.3617, 0.1413, 0.1737, 0.4018, 0.9606, 0.1423, 0.0622, + 0.5796, 0.0817, 0.4728, 0.0171, 0.3767, 0.2690, 0.4534, + 0.7834, 0.8071, 0.5450, 0.3163, 0.7240, 0.4062, 0.1469, + 0.5680, 0.7525, 0.6942, 0.2851, 0.2253, 0.9506, 0.6311, + 0.4609, 0.7752, 0.0674, 0.4022, 0.3995, 0.4727, 0.0350, + 0.3858, 0.8519, 0.1195, 0.7131, 0.5292, 0.5361, 0.8114, + 0.6976, 0.3862, 0.7850, 0.4965, 0.7133, 0.8944, 0.3454, + 0.5340, 0.0656, 0.3090, 0.8753, 0.3874, 0.7150, 0.1140, + 0.4992, 0.0324, 0.6353, 0.3283, 0.2429, 0.4620, 0.9287, + 0.8808, 0.8444, 0.4240, 0.1702, 0.9976, 0.9382, 0.1850, + 0.8858, 0.1261, 0.9756, 0.4941, 0.2977, 0.5118, 0.2978, + 0.5911, 0.0582, 0.3975, 0.7762, 0.1481, 0.7249, 0.2508, + 0.7751, 0.1051, 0.1815, 0.1046, 0.6943, 0.9499, 0.0430, + 0.6363, 0.0555, 0.2334, 0.6807, 0.9349, 0.9559, 0.0426, + 0.3891, 0.5208, 0.6682, 0.3815, 0.1620, 0.7040, 0.2277, + 0.6885, 0.9317, 0.1255, 0.2406, 0.1268, 0.4589, 0.2181, + 0.5125, 0.4312, 0.9217, 0.8160, 0.2655, 0.7553, 0.9031, + 0.6256, 0.9847, 0.7372, 0.6988, 0.0718, 0.8149, 0.4826, + 0.1186, 0.6546, 0.7683, 0.3108, 0.4796, 0.4682, 0.7402, + 0.1076, 0.7661, 0.7417, 0.6991, 0.6571, 0.0263, 0.5552, + 0.2429, 0.8947, 0.1852, 0.1789, 0.8343, 0.7982, 0.5512, + 0.8384, 0.1415, 0.4222, 0.9175, 0.0831, 0.6484, 0.5993, + 0.0336, 0.4340, 0.6749, 0.5075, 0.0886, 0.8148, 0.0117, + 0.5726, 0.4007, 0.9413, 0.7008, 0.0370, 0.4034, 0.4933, + 0.3499, 0.2260, 0.2600, 0.0656, 0.7231, 0.6211, 0.1323, + 0.0193, 0.4824, 0.8021, 0.7255, 0.4971, 0.8956, 0.2693, + 0.9646, 0.5171, 0.5343, 0.0358, 0.4159, 0.6660, 0.7518, + 0.0503, 0.0125, 0.4369, 0.9636, 0.6190, 0.9032, 0.3743, + 0.4704, 0.6239, 0.5965, 0.7163, 0.7018, 0.9679, 0.4373, + 0.2329, 0.4447, 0.1422, 0.5236, 0.5473, 0.3643, 0.1588, + 0.7733, 0.3704, 0.5703, 0.6240, 0.5476, 0.3470, 0.5155, + 0.8772, 0.0399, 0.9311, 0.7430, 0.6122, 0.6405]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.5451, 0.9325, 0.7234, ..., 0.9278, 0.0652, 0.2905]) +tensor([0.8987, 0.7248, 0.0383, ..., 0.6918, 0.0447, 0.2254]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -647,378 +647,378 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 5.5597083568573 seconds +Time: 6.79626727104187 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '352057', '-ss', '10000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.363084554672241} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '362169', '-ss', '10000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.681929588317871} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([5193, 2755, 2619, 8774, 5321, 6802, 9831, 2285, 7852, - 3999, 9956, 6338, 4303, 3557, 3117, 6782, 5048, 7592, - 3942, 736, 4361, 9482, 6490, 3337, 2778, 8169, 2811, - 209, 2573, 8564, 5262, 8591, 5293, 8927, 3544, 51, - 2528, 4507, 4161, 5578, 9752, 6784, 2306, 938, 2449, - 5328, 718, 7617, 6097, 864, 5625, 9977, 6328, 2206, - 1192, 3645, 3508, 3808, 3742, 5641, 1622, 4352, 9099, - 7155, 1778, 6225, 7403, 1744, 1586, 3123, 5186, 9952, - 4753, 6792, 5057, 2040, 1903, 4935, 4855, 6732, 8949, - 5033, 9687, 8172, 2973, 4285, 3263, 8170, 5631, 2665, - 2030, 1676, 7190, 9261, 1374, 5085, 6991, 7291, 5365, - 8790, 2603, 5128, 4726, 7347, 7445, 5508, 2405, 6862, - 927, 1040, 3233, 8284, 1163, 7143, 7742, 2101, 6504, - 7643, 3848, 7449, 288, 874, 4468, 4224, 1484, 7263, - 4340, 7167, 998, 8159, 8311, 1777, 3799, 3049, 6508, - 9603, 4316, 5196, 2183, 4727, 9055, 4438, 766, 1704, - 2955, 7889, 223, 603, 271, 3214, 3848, 7365, 3352, - 185, 1543, 1100, 7205, 5656, 8224, 7006, 460, 1632, - 5863, 1172, 5235, 1995, 4111, 11, 2411, 7389, 5139, - 7269, 8117, 8911, 1884, 4444, 8936, 4422, 7036, 5702, - 4252, 2130, 6704, 3313, 4857, 9399, 3363, 3509, 5493, - 2340, 4706, 9247, 8227, 7090, 7116, 226, 9056, 6722, - 8438, 5065, 7307, 1220, 7194, 705, 7096, 4679, 547, - 3971, 7573, 5105, 7215, 5944, 5393, 6667, 5458, 8429, - 275, 7814, 1025, 7237, 1604, 2929, 6126, 9253, 4344, - 4508, 267, 2777, 6034, 8761, 1834, 7748, 2674, 1367, - 9944, 6376, 1264, 5363, 2708, 3617, 3700, 5437, 4095, - 3959, 9704, 92, 9660, 9391, 3573, 2946, 813, 5316, - 3935, 7890, 4581, 6296, 7152, 6217, 3415, 6533, 2715, - 1688, 5201, 6175, 7050, 3595, 3662, 6840, 9726, 6036, - 7619, 5416, 6590, 7916, 4522, 6958, 8725, 6668, 2653, - 2628, 2565, 1650, 5630, 3465, 1488, 7827, 8886, 5742, - 4749, 648, 3505, 4220, 7748, 967, 839, 4075, 6270, - 6161, 5175, 1827, 7920, 1324, 2536, 3769, 8225, 5565, - 6328, 4306, 5393, 4486, 1932, 3230, 5293, 7471, 9723, - 8758, 373, 1054, 366, 7030, 2680, 6679, 3052, 3000, - 3826, 7135, 7573, 8624, 2094, 8253, 1679, 9255, 7782, - 2323, 4348, 3499, 8680, 5646, 838, 8871, 3174, 9682, - 8132, 4297, 756, 3636, 7385, 3221, 5036, 4653, 2075, - 2125, 9346, 9627, 6630, 4303, 6472, 8582, 3706, 7450, - 8033, 6481, 218, 2651, 3223, 6130, 1629, 8564, 9132, - 2060, 5147, 6339, 299, 2555, 4613, 3564, 8344, 6671, - 5893, 2906, 3816, 918, 1043, 2354, 6191, 6111, 6428, - 1535, 6205, 4349, 2774, 3014, 6169, 54, 7709, 3177, - 7317, 1857, 4166, 1657, 6193, 5261, 1312, 2305, 7110, - 6570, 2833, 5781, 7269, 8843, 6989, 8890, 1174, 6005, - 811, 1391, 1098, 636, 6355, 1696, 8042, 3865, 3213, - 262, 9122, 2345, 1147, 4236, 7137, 4556, 3780, 9838, - 7492, 7111, 529, 9470, 4720, 4229, 692, 9529, 7086, - 2082, 250, 1543, 289, 7190, 7172, 1560, 4019, 8975, - 1830, 1510, 9707, 2846, 3333, 9679, 8581, 2223, 2379, - 2778, 2110, 6343, 8400, 8819, 9110, 4987, 5901, 207, - 5848, 9965, 6853, 3891, 3084, 4227, 9187, 8352, 2804, - 244, 2773, 5879, 2163, 4413, 2823, 6063, 6812, 7469, - 3631, 6784, 6150, 9291, 1434, 4867, 7937, 8507, 4925, - 9859, 2433, 5664, 7328, 3053, 6754, 2086, 6555, 7886, - 1830, 8651, 582, 9396, 688, 2898, 4397, 1085, 4212, - 9690, 8597, 1017, 8482, 663, 1595, 2722, 495, 807, - 4378, 2814, 7029, 3658, 4499, 8945, 9032, 813, 7181, - 1952, 2910, 2796, 8972, 7959, 3329, 6355, 7208, 1762, - 701, 6311, 4802, 4644, 6623, 713, 2064, 6942, 593, - 2218, 7841, 6420, 5622, 2540, 1104, 8910, 7387, 7602, - 3882, 8077, 285, 6982, 278, 3095, 1513, 9909, 9182, - 3213, 7417, 6429, 2946, 4607, 9497, 8159, 9486, 5213, - 4740, 9629, 1996, 7823, 9038, 6968, 4206, 9366, 6179, - 4636, 7878, 248, 7209, 1504, 8559, 7776, 3701, 7946, - 1205, 4823, 3452, 2388, 2673, 5380, 9364, 1709, 3782, - 4596, 3539, 3609, 5046, 6692, 2716, 7354, 3756, 2031, - 3021, 2369, 8239, 4900, 649, 2495, 2736, 9221, 9307, - 3110, 2444, 5681, 9962, 658, 5969, 2802, 2529, 4596, - 9750, 5051, 7304, 1985, 1639, 8039, 31, 3676, 2155, - 3903, 9915, 8323, 2043, 9950, 4176, 8037, 391, 3433, - 7851, 4275, 2610, 5720, 6748, 793, 8339, 3679, 3153, - 2125, 5476, 6422, 6927, 8754, 7725, 2141, 1755, 7320, - 2935, 3107, 4817, 3917, 3837, 4030, 6563, 2811, 8883, - 8408, 3509, 6635, 4861, 2432, 7437, 506, 1386, 8922, - 5243, 9151, 2497, 8938, 6978, 2375, 7816, 1133, 9440, - 4511, 353, 8857, 951, 8442, 8719, 9229, 5886, 2784, - 867, 9558, 1353, 1986, 6386, 2976, 6556, 6466, 4952, - 8760, 6705, 3986, 248, 4939, 8572, 8033, 1694, 5483, - 1478, 924, 5915, 1868, 8180, 5092, 7794, 9104, 6732, - 6946, 3391, 3981, 8719, 2292, 9299, 7694, 4217, 8661, - 1105, 5555, 6318, 9071, 5775, 6208, 7153, 6725, 4834, - 8341, 5231, 635, 6491, 6914, 4534, 3780, 1612, 7491, - 9904, 5561, 4379, 7823, 6571, 1041, 5191, 1844, 8301, - 9882, 2613, 3531, 5574, 9708, 5720, 7268, 2236, 2552, - 1791, 6530, 7528, 9653, 5478, 49, 5543, 4607, 4929, - 6779, 4288, 4894, 5953, 9334, 2875, 8996, 4150, 7356, - 1186, 3993, 4505, 9077, 1116, 8449, 5955, 4281, 1244, - 6319, 2334, 7523, 1420, 9819, 1049, 7647, 3112, 8991, - 9729, 3869, 2710, 8813, 9524, 6132, 4591, 1916, 8391, - 5927, 2660, 5428, 5936, 3389, 323, 4780, 5804, 8256, - 6027, 8524, 109, 2252, 2388, 9629, 7006, 6247, 3274, - 8307, 2641, 3198, 7675, 4111, 618, 7105, 3170, 4299, - 5160, 4551, 5987, 4864, 4008, 4510, 1411, 474, 4094, - 7522, 9644, 8491, 401, 8653, 2482, 7935, 6580, 4043, - 1108, 2017, 2207, 9655, 1829, 6993, 5007, 4733, 7370, - 6766, 9337, 7507, 1648, 3705, 2374, 7193, 3309, 2758, - 3114, 8424, 2591, 2146, 3234, 9985, 5568, 712, 7456, - 6725, 1082, 7071, 4477, 2084, 1012, 393, 2646, 9693, - 4629, 6430, 4668, 7348, 4987, 5816, 4309, 6189, 5619, - 6356, 4170, 4871, 4240, 4621, 5427, 3415, 7229, 2801, - 5667, 2772, 2115, 9639, 6777, 5628, 5474, 6929, 7261, - 1128, 7763, 8975, 1875, 6229, 5956, 3894, 8274, 1400, - 3005, 7369, 4914, 2852, 2199, 4664, 5187, 6701, 5141, - 2836, 7341, 9140, 3613, 4273, 2795, 2402, 4117, 7860, - 2778, 346, 8610, 6929, 6113, 1593, 763, 2525, 8935, - 2101, 2835, 1362, 8394, 6460, 4773, 9741, 8111, 1860, - 3451, 7908, 7916, 6010, 8207, 8543, 7760, 8890, 7266, - 1155, 6223, 1146, 9602, 3885, 7243, 31, 7775, 3205, - 5848, 6242, 6442, 2055, 3787, 710, 1978, 8938, 7216, - 5945]), - values=tensor([5.5292e-01, 5.5339e-02, 4.5108e-01, 1.0570e-01, - 3.4688e-01, 1.9198e-01, 9.3821e-01, 9.8353e-01, - 8.8756e-01, 3.1342e-03, 5.5310e-01, 3.0156e-01, - 9.7159e-01, 5.4507e-01, 2.1473e-02, 2.0341e-02, - 8.7216e-01, 9.1887e-01, 3.0364e-02, 9.3932e-01, - 8.2611e-01, 6.7013e-01, 8.8961e-01, 1.2123e-01, - 1.9534e-01, 2.4678e-01, 1.1772e-01, 2.7037e-01, - 3.5509e-03, 2.8075e-01, 4.0535e-02, 6.3427e-01, - 3.9017e-01, 6.1389e-01, 1.0664e-01, 3.2671e-01, - 1.1828e-01, 5.4389e-01, 3.2263e-01, 9.1144e-01, - 7.3488e-02, 2.3373e-02, 9.0950e-01, 8.5203e-01, - 3.4924e-01, 7.3816e-01, 7.5268e-01, 3.6300e-02, - 2.2669e-01, 3.1511e-01, 3.8005e-01, 2.4819e-01, - 3.8328e-01, 5.0635e-01, 5.6645e-01, 1.2111e-01, - 7.6746e-01, 1.4418e-02, 7.3921e-02, 2.7487e-01, - 5.2504e-01, 4.9590e-01, 6.9415e-01, 5.9502e-01, - 1.8332e-01, 9.2079e-01, 7.3309e-01, 9.5422e-01, - 9.8358e-01, 2.7596e-01, 4.1096e-01, 6.2501e-01, - 5.8528e-01, 1.6744e-01, 3.0802e-02, 9.5302e-01, - 5.9898e-01, 2.6224e-01, 4.3792e-01, 5.1201e-01, - 2.9177e-02, 9.5366e-01, 1.2222e-02, 9.8630e-01, - 9.9589e-01, 8.9232e-01, 6.2139e-02, 7.2973e-01, - 1.7859e-01, 5.8169e-02, 8.3459e-01, 8.2388e-01, - 6.6385e-01, 6.9275e-01, 5.1824e-01, 7.1741e-01, - 6.6236e-01, 1.0698e-01, 8.2657e-01, 4.2895e-01, - 3.6838e-01, 2.7626e-01, 2.9440e-01, 9.3249e-01, - 2.8409e-01, 8.1546e-01, 4.8077e-01, 1.7037e-01, - 1.5884e-01, 7.7664e-01, 1.5836e-02, 3.3557e-01, - 2.0902e-01, 6.0679e-01, 1.5751e-01, 9.8621e-01, - 3.9991e-01, 6.0917e-01, 8.0877e-01, 7.1137e-01, - 2.9923e-01, 9.2558e-01, 3.8125e-01, 6.0657e-01, - 2.9772e-01, 7.1449e-01, 2.2410e-01, 3.3343e-01, - 6.7991e-01, 9.2217e-02, 7.2563e-01, 6.5640e-02, - 1.5008e-02, 2.3401e-01, 3.9745e-01, 1.7414e-02, - 6.1670e-01, 6.7045e-02, 8.9257e-01, 3.1882e-01, - 6.5174e-02, 9.5557e-01, 4.6108e-01, 5.3639e-01, - 2.2493e-01, 8.5831e-01, 3.6582e-01, 2.4755e-01, - 8.9200e-01, 1.4938e-01, 2.5340e-01, 2.1128e-01, - 7.9481e-01, 8.2922e-01, 4.6592e-01, 9.4400e-01, - 8.3401e-01, 9.2113e-01, 7.5652e-01, 3.0972e-01, - 4.2157e-01, 2.9254e-01, 4.9221e-01, 5.8905e-01, - 3.8325e-01, 2.2484e-01, 6.9358e-01, 6.4123e-01, - 6.7816e-01, 6.3084e-01, 9.8574e-01, 4.4427e-01, - 7.1513e-01, 3.9867e-01, 4.3878e-01, 1.9672e-01, - 5.0263e-03, 6.7740e-01, 2.5681e-01, 9.7933e-01, - 4.0394e-01, 4.1879e-02, 3.9553e-01, 7.8503e-01, - 7.4689e-01, 4.9247e-01, 4.1191e-01, 9.9678e-01, - 2.0022e-01, 6.5069e-01, 7.4428e-01, 9.0778e-01, - 5.1895e-01, 8.0675e-01, 8.3865e-01, 6.2747e-01, - 7.3217e-02, 4.9040e-01, 5.2601e-01, 8.7024e-01, - 6.0410e-01, 8.8149e-01, 8.2484e-01, 1.5845e-01, - 4.6403e-01, 8.9306e-01, 6.1336e-01, 8.0234e-01, - 7.9119e-01, 2.0744e-01, 8.8190e-01, 5.9520e-01, - 6.5300e-01, 6.2913e-01, 6.1054e-01, 9.8544e-01, - 4.4616e-01, 9.2657e-01, 4.7958e-01, 1.6243e-01, - 9.3923e-01, 2.8605e-01, 8.8992e-02, 9.4577e-01, - 3.8793e-01, 8.8493e-01, 9.4484e-01, 1.8553e-01, - 6.6811e-02, 2.8498e-01, 7.3602e-01, 5.3860e-01, - 6.3923e-01, 8.8471e-01, 8.9298e-01, 2.3408e-01, - 6.0315e-01, 7.1811e-01, 6.8929e-01, 9.2628e-01, - 3.3735e-01, 9.4330e-01, 2.9212e-01, 1.3474e-01, - 2.4841e-01, 2.9413e-01, 3.0786e-01, 4.4721e-01, - 6.4270e-01, 1.3463e-01, 7.4701e-01, 7.0796e-01, - 7.5288e-01, 4.6654e-01, 5.7533e-01, 2.5732e-01, - 9.4048e-01, 4.2449e-01, 2.7752e-01, 5.3717e-01, - 5.8754e-01, 2.2088e-01, 6.9696e-01, 3.0511e-01, - 5.4146e-01, 6.6670e-01, 1.9863e-01, 8.7473e-01, - 8.3417e-01, 1.7325e-03, 1.1431e-01, 6.6423e-01, - 1.7200e-01, 7.5624e-01, 8.6680e-01, 1.4888e-01, - 5.2062e-01, 2.3949e-01, 4.6781e-01, 8.4834e-01, - 8.3041e-01, 8.4708e-01, 5.0573e-01, 2.6783e-01, - 7.7273e-01, 6.9543e-02, 9.7146e-01, 5.8041e-01, - 8.2714e-02, 7.4425e-01, 7.8576e-01, 7.7661e-04, - 8.3968e-01, 4.5854e-02, 4.3457e-01, 7.8975e-01, - 9.9698e-01, 4.6019e-01, 9.2919e-01, 3.1968e-01, - 9.3109e-01, 9.2620e-01, 9.8663e-01, 5.4487e-01, - 9.8012e-01, 5.4934e-01, 1.7813e-02, 4.7736e-01, - 3.4974e-01, 5.3126e-01, 4.8493e-01, 1.0505e-01, - 2.6504e-01, 5.5712e-01, 9.8635e-01, 6.4827e-01, - 7.6120e-01, 4.2505e-01, 1.6706e-01, 6.8099e-01, - 9.9077e-01, 4.1107e-01, 4.8898e-01, 2.5076e-01, - 3.4645e-01, 3.1250e-01, 6.2990e-01, 6.5617e-02, - 2.3404e-01, 5.0847e-01, 6.3919e-01, 5.7855e-01, - 7.8187e-01, 8.5142e-01, 6.5013e-01, 9.1473e-01, - 3.5578e-01, 6.6380e-01, 4.7582e-01, 8.5910e-01, - 1.0565e-01, 6.2514e-01, 3.8345e-01, 1.5356e-01, - 9.9912e-02, 8.8363e-01, 1.7544e-01, 1.4346e-01, - 3.4208e-01, 6.1732e-01, 8.8918e-01, 2.9956e-01, - 9.0580e-01, 1.2071e-01, 7.4620e-01, 2.8879e-02, - 6.9099e-01, 3.9454e-01, 8.8898e-01, 8.2851e-01, - 4.6931e-01, 2.9252e-01, 8.3960e-01, 4.1263e-01, - 8.7211e-01, 9.0472e-01, 3.7009e-01, 6.8359e-01, - 9.2110e-01, 4.3054e-01, 5.9179e-01, 8.2038e-01, - 8.5760e-01, 4.2085e-01, 4.0021e-01, 6.0209e-01, - 7.9150e-01, 6.8003e-01, 2.9459e-01, 5.6288e-01, - 9.1978e-02, 2.5679e-01, 8.9665e-01, 3.8079e-01, - 4.8515e-01, 6.6981e-01, 6.6846e-01, 9.7820e-01, - 8.7582e-01, 8.0038e-01, 2.7188e-01, 4.8928e-01, - 5.2560e-01, 9.7220e-01, 3.8673e-01, 9.6365e-01, - 5.5796e-01, 3.5920e-01, 8.7943e-01, 8.6910e-01, - 6.1172e-01, 3.1763e-01, 4.6344e-01, 6.8853e-01, - 9.4884e-02, 8.3766e-01, 2.7469e-01, 1.5268e-01, - 7.2314e-01, 4.7604e-01, 4.7881e-02, 6.2107e-02, - 3.1626e-01, 2.4833e-01, 4.2443e-01, 5.1570e-01, - 7.9326e-01, 9.9465e-01, 8.7703e-01, 9.3805e-01, - 3.8399e-01, 7.9271e-01, 6.6890e-01, 5.1078e-01, - 6.5738e-01, 7.0870e-01, 6.3946e-02, 8.2076e-01, - 5.8328e-01, 9.7339e-01, 1.1501e-01, 1.8801e-01, - 5.1878e-01, 1.6410e-01, 2.5658e-02, 7.4582e-01, - 7.1671e-01, 3.2597e-01, 4.7647e-01, 3.0071e-01, - 2.1942e-01, 3.5837e-01, 2.1099e-01, 7.2833e-01, - 4.5191e-01, 8.3061e-01, 5.3493e-01, 4.6170e-01, - 7.3391e-01, 2.8119e-01, 7.3837e-01, 2.1069e-01, - 6.1466e-01, 3.1738e-01, 6.6976e-01, 4.3234e-01, - 5.3425e-01, 3.1649e-01, 6.9488e-01, 8.3116e-02, - 9.6527e-01, 7.2983e-01, 7.2952e-01, 5.7027e-01, - 5.7248e-01, 9.8809e-01, 5.6845e-01, 9.9233e-01, - 4.3437e-01, 9.1749e-01, 3.0435e-01, 6.1630e-01, - 5.6718e-01, 7.6623e-01, 7.3360e-01, 5.7167e-01, - 3.1448e-01, 2.8390e-01, 3.4381e-01, 3.6832e-01, - 9.5748e-01, 7.9723e-01, 9.3584e-02, 4.2134e-01, - 4.6396e-01, 3.4351e-01, 5.2272e-01, 7.3597e-01, - 4.0274e-01, 8.4091e-01, 1.1289e-01, 7.4283e-01, - 7.3034e-02, 8.8066e-01, 1.0006e-01, 6.3450e-01, - 9.2205e-02, 9.6993e-01, 2.0751e-01, 2.1244e-01, - 5.1143e-02, 3.6481e-01, 6.4697e-01, 2.0789e-01, - 9.5215e-01, 5.3364e-01, 9.2672e-01, 6.9821e-01, - 6.9041e-01, 8.3878e-01, 4.1297e-01, 1.8957e-01, - 2.0550e-01, 3.0226e-02, 6.2550e-01, 7.4896e-01, - 3.7984e-01, 3.6494e-01, 2.2888e-01, 3.1974e-02, - 5.6757e-01, 7.7597e-01, 8.9251e-01, 8.0742e-01, - 5.7001e-01, 6.4631e-01, 6.6361e-01, 9.5532e-01, - 9.6697e-01, 4.2484e-01, 6.2009e-01, 2.2163e-01, - 7.1692e-01, 6.5086e-01, 1.8669e-01, 9.3201e-02, - 8.4567e-01, 7.9722e-01, 5.9999e-01, 4.0199e-01, - 4.2838e-01, 9.1398e-01, 3.5932e-01, 1.2304e-01, - 3.8586e-01, 5.5759e-01, 8.5294e-01, 5.3330e-01, - 6.2723e-01, 9.8234e-01, 1.6861e-02, 5.0691e-01, - 6.1734e-02, 4.7853e-01, 5.6685e-01, 7.9571e-01, - 9.0991e-01, 9.2185e-01, 4.1263e-01, 7.1798e-01, - 6.7303e-01, 8.7499e-01, 6.2012e-01, 3.5399e-01, - 7.6377e-01, 3.3514e-01, 8.4335e-01, 8.8705e-01, - 6.8818e-01, 2.3811e-01, 9.6707e-01, 3.9398e-01, - 2.6073e-01, 8.3096e-01, 1.7007e-01, 3.0433e-01, - 4.0134e-01, 1.3059e-01, 2.4787e-01, 7.8166e-01, - 4.7521e-01, 6.8201e-01, 2.1001e-01, 7.1415e-01, - 3.2262e-01, 3.2235e-01, 4.7378e-01, 8.4827e-01, - 8.4848e-01, 5.2317e-02, 1.5766e-01, 2.2014e-03, - 1.7840e-01, 7.8631e-01, 5.2767e-01, 2.6045e-01, - 8.8588e-01, 4.0016e-01, 5.9530e-01, 2.4974e-01, - 6.6462e-01, 4.7206e-01, 1.3225e-01, 4.5137e-01, - 5.3570e-01, 2.8817e-01, 2.6505e-01, 4.1502e-01, - 6.3949e-03, 4.5409e-01, 5.8227e-01, 3.9940e-01, - 7.6494e-01, 8.8617e-01, 6.5990e-01, 3.0427e-01, - 1.9149e-01, 3.3059e-01, 1.1330e-01, 2.3008e-02, - 7.4098e-01, 2.5809e-01, 3.4906e-01, 6.0979e-01, - 5.8091e-01, 2.5328e-01, 6.2555e-01, 1.6286e-01, - 2.8329e-01, 2.2440e-02, 7.8273e-01, 2.9394e-01, - 9.8468e-01, 5.5472e-01, 6.7849e-01, 2.4774e-02, - 9.8321e-01, 4.0996e-01, 8.8539e-01, 8.9288e-01, - 5.5255e-01, 3.7365e-01, 1.0507e-01, 7.2419e-01, - 8.6134e-02, 5.7363e-01, 7.8715e-01, 2.3866e-01, - 4.1035e-01, 1.2496e-01, 4.5919e-01, 5.8619e-01, - 6.0808e-01, 4.2723e-01, 6.7449e-01, 9.2084e-01, - 8.9389e-01, 1.3942e-01, 5.4543e-01, 4.7201e-02, - 8.6576e-01, 1.4455e-01, 1.7668e-01, 4.2758e-02, - 1.1920e-01, 2.6815e-02, 7.7244e-01, 5.2841e-01, - 1.8374e-01, 3.0995e-01, 2.6686e-01, 2.2363e-01, - 6.0105e-01, 6.9802e-01, 8.7698e-01, 2.5112e-01, - 9.3430e-01, 4.3135e-01, 1.1205e-02, 9.9203e-01, - 8.3302e-01, 6.5717e-01, 2.3014e-01, 6.2920e-01, - 4.7020e-03, 2.3944e-01, 6.7821e-01, 2.6852e-01, - 4.3645e-01, 2.0642e-01, 6.3645e-01, 8.7126e-01, - 7.8323e-02, 2.0123e-01, 9.8522e-01, 6.6075e-01, - 1.0638e-01, 1.7513e-01, 7.5435e-01, 1.5965e-01, - 5.4974e-01, 8.4304e-01, 3.8315e-01, 5.0593e-01, - 1.8074e-01, 4.5465e-01, 3.5049e-01, 2.8592e-01, - 4.6064e-01, 1.3733e-01, 3.9878e-01, 8.2656e-01, - 4.3802e-01, 6.0861e-01, 2.3988e-01, 9.3018e-01, - 6.1398e-01, 8.8443e-01, 5.6659e-01, 9.7942e-01, - 5.9063e-03, 3.6857e-01, 7.0689e-01, 6.8562e-01, - 6.2652e-01, 7.1136e-01, 5.8443e-01, 2.8104e-01, - 1.1975e-01, 3.0807e-01, 8.5351e-01, 6.1232e-01, - 1.2850e-01, 6.4265e-01, 7.5862e-02, 8.1888e-01, - 1.5209e-01, 7.4651e-01, 8.9132e-01, 4.7729e-01, - 1.0458e-01, 5.8526e-03, 7.3018e-01, 3.9002e-01, - 8.3555e-01, 9.9792e-01, 1.2007e-01, 1.8173e-01, - 8.8202e-01, 6.3435e-02, 4.7842e-01, 4.3553e-01, - 2.6080e-01, 9.8947e-02, 1.9814e-01, 4.1648e-01, - 7.5812e-01, 5.7984e-01, 9.1152e-01, 8.4139e-01, - 7.3750e-02, 5.3616e-01, 2.6399e-02, 9.7508e-01, - 5.2809e-01, 9.0090e-01, 5.0747e-01, 9.1801e-01, - 2.8317e-01, 2.6351e-01, 7.4357e-01, 9.1706e-01, - 1.2509e-01, 7.5712e-01, 6.6426e-01, 3.7521e-01, - 7.4759e-01, 2.2567e-01, 6.8380e-01, 6.5098e-01, - 4.2407e-01, 9.0580e-01, 7.6534e-01, 6.2130e-01, - 7.4281e-01, 4.2870e-01, 9.5017e-01, 3.7626e-01, - 1.1094e-01, 9.0190e-01, 9.6787e-01, 4.0010e-01, - 4.7100e-01, 1.0633e-01, 8.9043e-01, 5.0640e-01, - 7.5304e-01, 3.1368e-03, 2.9130e-01, 9.0727e-01, - 8.4157e-01, 6.3215e-01, 7.1909e-01, 6.2791e-01, - 6.7473e-01, 5.2095e-01, 1.9918e-01, 4.2582e-01, - 7.0689e-01, 1.4951e-01, 9.5523e-01, 6.8796e-01, - 5.8195e-01, 7.1391e-01, 1.2233e-01, 8.4813e-02, - 8.0007e-01, 2.3040e-01, 5.2230e-01, 6.0031e-01, - 8.8717e-01, 7.2196e-01, 3.1973e-01, 9.8354e-01, - 8.2522e-01, 5.4297e-01, 8.2448e-01, 4.4039e-01, - 7.9127e-01, 1.5486e-01, 8.9541e-02, 7.5881e-01, - 7.0277e-01, 9.5355e-01, 5.7998e-01, 1.0220e-01, - 9.7185e-01, 1.5354e-01, 4.2886e-01, 9.4374e-01, - 9.6972e-01, 4.2716e-01, 7.7967e-01, 4.4084e-01, - 5.9503e-02, 2.8447e-01, 6.6930e-01, 9.2433e-01, - 8.5201e-02, 9.4074e-02, 5.7816e-02, 5.2477e-01, - 4.0431e-01, 9.3680e-01, 7.9281e-01, 5.0214e-01, - 4.3363e-01, 5.0481e-01, 6.5806e-01, 1.2108e-02, - 5.4159e-01, 5.7991e-01, 9.5810e-01, 4.5809e-01, - 5.2811e-01, 4.7398e-01, 6.0017e-01, 6.2723e-01, - 3.1668e-01, 6.6957e-01, 7.3447e-01, 9.3801e-01, - 6.3741e-01, 9.5748e-01, 6.1705e-01, 6.1947e-01, - 7.1114e-01, 7.8047e-01, 6.3606e-01, 1.1714e-01, - 2.9771e-01, 5.7763e-01, 7.2725e-01, 3.9279e-02, - 2.2740e-01, 1.1983e-01, 9.7496e-01, 9.7107e-01, - 6.8192e-01, 4.7055e-01, 7.6591e-02, 1.8872e-01, - 7.6675e-01, 8.9331e-01, 7.3735e-01, 2.2111e-01, - 3.9938e-01, 9.5547e-01, 1.5711e-01, 4.1021e-01, - 1.1412e-01, 4.7451e-01, 9.3830e-01, 2.5704e-02, - 3.1434e-01, 7.9403e-01, 7.1105e-01, 2.2231e-01, - 8.6172e-01, 7.5915e-01, 5.5640e-01, 3.9548e-01, - 7.4875e-01, 7.2545e-03, 6.9761e-02, 3.3954e-01, - 3.0983e-02, 7.8257e-01, 4.6817e-01, 1.4299e-01, - 7.3917e-01, 5.7775e-01, 7.0050e-01, 1.0170e-01, - 5.9268e-02, 7.8402e-01, 1.1116e-01, 7.0050e-01, - 6.8218e-01, 5.1559e-01, 9.0164e-01, 6.3522e-01, - 5.1084e-01, 4.0179e-01, 2.3160e-01, 8.9595e-01, - 5.3957e-01, 2.9593e-01, 8.6295e-02, 4.3092e-01, - 1.8133e-01, 7.9398e-01, 3.1231e-01, 4.6884e-01, - 9.3005e-01, 2.3156e-01, 4.6512e-01, 2.4386e-01, - 4.0889e-01, 2.1786e-01, 4.0123e-01, 6.9927e-02, - 1.2919e-01, 9.3419e-01, 7.5543e-01, 2.8780e-01, - 9.9350e-01, 4.7547e-01, 1.5017e-01, 3.4936e-01, - 7.4752e-01, 7.8208e-02, 8.9649e-02, 6.0120e-01, - 4.8680e-03, 3.9810e-01, 1.9291e-02, 2.5980e-01, - 6.4314e-01, 1.1897e-01, 1.0738e-01, 2.5415e-01, - 6.2778e-01, 3.3058e-01, 9.6010e-01, 5.7716e-01, - 5.7400e-01, 6.5654e-01, 3.8685e-01, 1.4531e-01, - 3.6711e-01, 5.5517e-01, 5.4930e-01, 1.8321e-01, - 4.6848e-01, 4.2975e-01, 8.5091e-01, 6.9266e-01, - 8.8622e-01, 5.1231e-01, 4.8174e-01, 4.5498e-02, - 7.8463e-01, 9.5652e-01, 3.0625e-01, 4.4074e-01, - 1.6669e-01, 8.4050e-01, 3.7991e-01, 6.5276e-01]), + col_indices=tensor([1138, 3141, 5372, 6298, 9895, 5592, 6656, 4614, 931, + 6509, 2541, 488, 1171, 1072, 9057, 8648, 305, 9468, + 8935, 3721, 8788, 4223, 1394, 7441, 8183, 7526, 7164, + 9501, 2074, 6095, 6430, 1576, 7765, 4984, 8210, 5345, + 6644, 4874, 9665, 9793, 4608, 6072, 7262, 5461, 8184, + 6119, 899, 3855, 5088, 3002, 502, 2723, 2838, 2671, + 245, 5685, 2372, 8774, 3148, 7424, 9384, 3212, 8505, + 9938, 1175, 4045, 4800, 98, 907, 4698, 1099, 3556, + 6117, 539, 3430, 5205, 6742, 549, 1013, 7399, 5538, + 6070, 13, 7425, 1069, 3892, 5623, 622, 3112, 6779, + 5841, 5246, 7130, 3748, 8292, 4888, 3930, 4486, 404, + 1247, 8728, 8238, 569, 8783, 9166, 5690, 2454, 272, + 8698, 4860, 6880, 3565, 3134, 6354, 865, 434, 9144, + 921, 4245, 143, 7627, 7460, 9895, 5538, 9555, 1920, + 9046, 6039, 3817, 9183, 833, 9223, 1343, 7435, 74, + 2910, 7431, 4730, 9568, 7053, 553, 6586, 3902, 2102, + 4819, 4670, 639, 3073, 9896, 2444, 4693, 9856, 1357, + 8765, 9020, 4841, 7320, 6909, 6799, 6398, 5965, 3749, + 7728, 1534, 7555, 9916, 8346, 6649, 7695, 1555, 9453, + 3570, 2292, 7726, 9222, 1870, 8326, 5496, 9558, 9331, + 1953, 8958, 9671, 4151, 4367, 6771, 6034, 9364, 5920, + 1930, 4407, 1393, 7699, 5074, 9853, 2420, 4723, 1193, + 3555, 5221, 6242, 9462, 9524, 8904, 4357, 599, 5063, + 9306, 1035, 2970, 3581, 438, 1012, 1578, 6514, 1246, + 280, 9934, 9599, 1347, 7451, 9057, 9941, 2507, 6869, + 4825, 8824, 6274, 4478, 1408, 6086, 4878, 2420, 9493, + 8982, 2379, 609, 5624, 3583, 2173, 3341, 2149, 6472, + 8232, 4799, 2443, 9805, 3726, 1960, 2222, 4203, 9706, + 6042, 2127, 9512, 2848, 5472, 464, 7833, 9395, 4397, + 1252, 536, 150, 1099, 2496, 571, 7941, 6943, 7304, + 7511, 2241, 5493, 562, 6009, 2047, 7239, 7266, 8450, + 3771, 5993, 8725, 8763, 3254, 7903, 1883, 306, 172, + 4496, 8646, 114, 4663, 5065, 5972, 5609, 5112, 7979, + 2267, 3998, 628, 3262, 4723, 5522, 333, 771, 7789, + 7411, 6161, 939, 510, 2157, 7502, 5114, 7332, 1071, + 911, 7141, 1103, 2384, 9370, 6550, 3084, 7659, 5517, + 5645, 8343, 428, 5819, 2990, 2152, 8255, 9437, 8453, + 6828, 9261, 236, 1646, 2971, 9539, 3535, 2506, 2597, + 2970, 9785, 1861, 7103, 1286, 1000, 3469, 315, 3221, + 8558, 2959, 2957, 7965, 5562, 4795, 8592, 264, 7999, + 8407, 7957, 1456, 1284, 5753, 340, 9594, 4789, 2049, + 2443, 8386, 9750, 2724, 1350, 7113, 7702, 9893, 426, + 7946, 8480, 6869, 2951, 6923, 3651, 4947, 76, 246, + 3064, 9432, 8890, 9830, 3454, 5349, 2099, 4006, 3982, + 1400, 6701, 8208, 6231, 6572, 7108, 169, 3663, 1024, + 3861, 1643, 3067, 7537, 5626, 9156, 2530, 1256, 9586, + 1939, 1349, 9610, 3932, 8683, 6507, 692, 9567, 4786, + 7294, 9227, 5955, 3765, 95, 7099, 9975, 1773, 2563, + 8275, 417, 4896, 2261, 6841, 8549, 5658, 7531, 1862, + 8554, 7945, 3708, 1599, 7281, 6945, 5282, 5274, 8382, + 9544, 6608, 5018, 9387, 8311, 4840, 5052, 5898, 3273, + 6811, 9335, 9665, 7323, 4598, 206, 8472, 1143, 5942, + 5752, 1091, 8080, 5491, 4152, 6519, 1287, 5968, 347, + 8460, 9287, 6222, 8593, 6122, 9463, 5973, 9569, 9864, + 8581, 7157, 1274, 7748, 9974, 703, 4340, 3457, 5165, + 636, 8506, 2388, 4697, 6336, 2157, 965, 2983, 4117, + 5342, 5035, 7690, 8511, 5702, 1730, 435, 2888, 3925, + 5632, 1145, 8990, 6840, 2995, 3673, 4357, 1488, 6718, + 3409, 5307, 9890, 3580, 194, 415, 75, 1173, 582, + 8512, 2152, 3651, 1736, 4406, 9709, 5688, 2037, 9202, + 616, 4398, 9978, 961, 4749, 6199, 2766, 420, 5891, + 2035, 4203, 6034, 9532, 4073, 3543, 9399, 5243, 1736, + 5205, 3639, 7636, 7945, 1076, 8052, 8786, 7516, 1783, + 592, 9245, 3631, 8018, 7142, 4227, 1771, 8550, 3919, + 40, 9025, 7922, 8752, 4829, 6233, 6992, 2147, 8671, + 1125, 9695, 1968, 1143, 899, 7218, 5368, 7349, 8299, + 4130, 2489, 1943, 8450, 8922, 2692, 6194, 2109, 3488, + 648, 128, 2619, 8466, 3510, 8010, 2563, 4452, 2449, + 3117, 3222, 8819, 6314, 2193, 4531, 9312, 6323, 8594, + 3258, 9109, 1332, 8418, 7949, 2357, 3619, 7354, 3313, + 61, 518, 2284, 2088, 1386, 2415, 3436, 7167, 5622, + 8607, 8065, 8996, 6833, 8899, 6018, 2523, 5619, 500, + 9741, 447, 3804, 906, 3011, 8029, 494, 31, 1574, + 8607, 3528, 2944, 1180, 3791, 1961, 4136, 1334, 7857, + 6620, 8883, 2382, 315, 5796, 2019, 5530, 8196, 2757, + 6981, 3617, 4634, 9758, 8925, 1556, 8556, 6115, 2436, + 456, 4547, 7030, 3590, 4197, 848, 7813, 2702, 1882, + 1803, 4494, 3216, 104, 2415, 973, 9057, 8136, 2617, + 9231, 6621, 2270, 5728, 4475, 2926, 1275, 5702, 6247, + 6804, 4756, 6356, 6362, 2856, 4487, 6258, 4333, 7608, + 5197, 8900, 2423, 8906, 4613, 4914, 7823, 7748, 1843, + 5989, 1493, 7321, 237, 2861, 4827, 700, 4808, 5985, + 9382, 3661, 2628, 1573, 4286, 4564, 6813, 6649, 2611, + 1727, 5253, 3700, 4473, 5116, 4990, 8314, 4529, 7271, + 3677, 1911, 1114, 6546, 6706, 5817, 4555, 8804, 7540, + 3808, 5203, 8535, 8025, 6109, 5300, 5651, 2490, 6109, + 6132, 3901, 8640, 7135, 8793, 5811, 9495, 4735, 1699, + 1439, 220, 2921, 4550, 4963, 9167, 1303, 244, 3665, + 7823, 7172, 8799, 8427, 6074, 1240, 3154, 9837, 5939, + 5156, 4695, 6283, 3037, 9601, 9741, 7245, 2434, 1037, + 8802, 6544, 3585, 8380, 1858, 9428, 7153, 8077, 6766, + 8197, 9547, 1973, 8258, 6508, 713, 5126, 1339, 2979, + 1166, 8047, 6847, 97, 8865, 3971, 3769, 8192, 8150, + 4789, 7871, 1531, 5674, 1575, 9808, 2800, 6837, 1264, + 6679, 5873, 3502, 1872, 8788, 1902, 7193, 7418, 5329, + 8259, 9419, 7793, 5540, 71, 6690, 4936, 794, 6462, + 2182, 6189, 1966, 9133, 7350, 871, 4346, 699, 3324, + 5869, 158, 5971, 2372, 694, 8303, 12, 2318, 1223, + 7659, 4487, 8019, 6915, 6927, 6928, 4811, 355, 3455, + 6747, 8341, 6371, 2538, 3410, 239, 3646, 5943, 6770, + 1495, 9724, 2133, 6260, 7065, 1361, 359, 4604, 3980, + 579, 6628, 3682, 1853, 6084, 9735, 8312, 1840, 4876, + 8912, 6208, 6859, 9839, 6928, 2236, 4449, 4135, 4776, + 6464, 2481, 4925, 9412, 3456, 2660, 9544, 6110, 6742, + 9061, 610, 5758, 1002, 4547, 8135, 324, 2486, 1852, + 8322, 5074, 4466, 826, 7401, 1853, 6438, 2899, 5836, + 427, 7933, 2044, 8055, 3683, 9649, 6408, 5387, 4196, + 3681, 7448, 3637, 8192, 6445, 6771, 10, 729, 9265, + 8381, 8151, 4992, 7160, 9680, 7981, 8403, 1251, 8075, + 8608, 9794, 3487, 7890, 6077, 7271, 18, 389, 9327, + 834, 2577, 4035, 1431, 7927, 9393, 643, 1226, 3816, + 5933]), + values=tensor([1.9375e-01, 3.5125e-01, 2.0768e-01, 2.6967e-01, + 6.5553e-01, 5.3217e-01, 3.3343e-01, 6.1229e-01, + 8.1898e-01, 5.0937e-01, 7.4774e-01, 5.1207e-01, + 8.0001e-01, 3.5041e-01, 6.7059e-01, 3.7672e-01, + 9.3472e-01, 9.3036e-01, 2.3990e-01, 8.7663e-01, + 4.6996e-01, 5.0217e-02, 8.4756e-01, 7.8975e-01, + 1.1314e-01, 4.4290e-01, 2.0680e-01, 9.2084e-02, + 3.5472e-01, 9.4380e-01, 7.9234e-01, 8.1876e-01, + 2.3511e-01, 7.6869e-01, 4.2133e-02, 1.0330e-01, + 1.7268e-01, 7.6007e-01, 3.8309e-03, 9.1460e-01, + 7.3496e-01, 4.9658e-01, 2.1053e-01, 4.7699e-01, + 5.0002e-01, 8.1406e-01, 5.4606e-01, 4.2159e-01, + 2.6297e-01, 2.4520e-01, 9.6103e-01, 7.7868e-01, + 7.6457e-01, 6.2481e-01, 4.5144e-01, 1.9487e-01, + 3.0538e-01, 3.2455e-02, 6.4771e-01, 8.8797e-01, + 9.3051e-01, 9.7039e-01, 9.7735e-01, 3.0647e-01, + 9.6008e-02, 6.3088e-01, 8.0975e-01, 5.3515e-01, + 8.0786e-01, 2.9740e-01, 2.4833e-01, 2.6441e-01, + 3.2550e-01, 2.1987e-01, 1.3930e-02, 1.7247e-01, + 6.8496e-01, 8.3630e-01, 7.5016e-01, 8.7730e-01, + 2.1744e-02, 6.2953e-01, 2.2457e-01, 2.0815e-01, + 1.6450e-01, 7.3002e-01, 4.9950e-01, 3.0029e-02, + 7.4701e-02, 6.2437e-01, 7.7434e-01, 3.8994e-01, + 1.0852e-01, 7.4616e-01, 4.7554e-01, 7.9127e-01, + 4.2656e-01, 7.7188e-01, 3.6412e-01, 2.5388e-01, + 9.3166e-01, 4.7512e-01, 3.1345e-01, 9.4634e-01, + 4.9501e-01, 2.5802e-01, 2.9731e-01, 4.5345e-01, + 4.5427e-01, 2.7223e-01, 3.0850e-01, 5.9089e-01, + 2.3642e-01, 7.7402e-01, 9.5831e-01, 4.5183e-01, + 4.4635e-01, 3.4103e-01, 1.9752e-01, 4.1907e-01, + 9.4180e-01, 8.3566e-01, 7.5022e-01, 3.4154e-01, + 8.8228e-01, 7.0899e-02, 1.2527e-01, 7.8104e-01, + 6.4557e-01, 3.4866e-01, 2.6732e-01, 6.2958e-01, + 1.1325e-01, 7.9593e-01, 6.9074e-01, 4.4629e-01, + 2.5885e-01, 7.2330e-01, 8.2253e-01, 9.8244e-01, + 7.9844e-01, 4.3702e-01, 9.6241e-01, 7.5657e-01, + 5.3837e-01, 1.8624e-01, 3.8968e-01, 3.4892e-01, + 7.0278e-01, 5.5261e-01, 4.0352e-01, 3.3248e-02, + 2.7925e-01, 1.5770e-01, 4.3930e-01, 1.5049e-01, + 4.8475e-01, 3.8261e-01, 5.0227e-01, 6.2634e-01, + 4.8135e-01, 9.1814e-01, 4.0382e-02, 9.3517e-01, + 5.1252e-01, 3.4347e-01, 6.1408e-01, 4.7550e-01, + 8.2509e-03, 7.1485e-01, 4.8130e-01, 1.2004e-01, + 3.3123e-01, 3.7004e-01, 4.8796e-01, 6.7218e-01, + 5.1642e-01, 2.9873e-01, 6.9024e-01, 1.8738e-01, + 6.7151e-01, 1.0462e-01, 2.4549e-01, 6.9732e-01, + 4.1806e-01, 4.7981e-02, 9.3086e-01, 4.1645e-02, + 3.4142e-01, 4.8380e-01, 4.8247e-01, 8.9969e-01, + 8.6323e-01, 9.7268e-01, 5.6983e-01, 1.3965e-02, + 5.9431e-01, 2.9727e-01, 1.6227e-02, 9.8379e-01, + 7.9038e-01, 8.8241e-01, 5.7714e-01, 3.3784e-02, + 1.0960e-01, 3.1070e-01, 5.3521e-01, 7.0199e-01, + 6.5999e-01, 2.7917e-01, 9.8173e-02, 6.2497e-01, + 9.5182e-01, 6.6789e-02, 1.5393e-02, 6.2254e-01, + 1.0971e-01, 8.8447e-01, 5.4323e-01, 2.1015e-01, + 6.6007e-01, 8.2753e-01, 3.6703e-01, 7.4051e-01, + 6.6966e-01, 9.7913e-01, 4.0712e-01, 3.4707e-01, + 3.8309e-01, 2.3070e-01, 2.2715e-01, 7.7305e-01, + 3.7610e-01, 3.4003e-01, 5.8650e-01, 5.8454e-01, + 3.8920e-01, 2.4417e-01, 2.4715e-02, 1.6482e-01, + 6.8219e-01, 4.0944e-01, 1.2251e-01, 8.3378e-01, + 2.8858e-01, 4.5315e-01, 3.7387e-01, 5.1960e-01, + 6.3346e-01, 5.5499e-02, 4.5719e-01, 8.2993e-01, + 8.1787e-01, 3.3558e-01, 2.6451e-01, 5.5224e-01, + 3.1647e-01, 7.8278e-01, 2.1696e-01, 6.5586e-01, + 2.1644e-01, 9.5937e-01, 7.6861e-01, 8.0010e-01, + 6.6128e-01, 1.2187e-01, 9.4748e-01, 9.0035e-01, + 7.1037e-01, 1.8546e-01, 7.1352e-01, 1.8524e-01, + 7.4925e-01, 2.6708e-01, 7.6244e-01, 4.1247e-01, + 1.4128e-01, 4.9211e-01, 3.7997e-01, 7.0780e-01, + 3.8386e-02, 6.6816e-01, 6.0148e-01, 9.0383e-01, + 7.2595e-01, 6.8359e-01, 2.9584e-01, 8.2414e-02, + 1.5712e-01, 4.9752e-01, 1.1561e-01, 2.6694e-01, + 7.7841e-01, 1.6152e-01, 9.8172e-01, 1.9818e-01, + 1.4269e-01, 8.9879e-02, 5.0250e-01, 5.7802e-01, + 6.6528e-01, 3.6557e-02, 1.2972e-01, 6.6852e-01, + 4.9575e-01, 8.1221e-01, 6.6098e-01, 8.1005e-02, + 3.3205e-01, 2.1959e-01, 7.2778e-01, 9.9336e-01, + 6.7973e-02, 8.8482e-01, 2.5671e-01, 8.5151e-01, + 1.4303e-01, 9.4333e-01, 1.3749e-01, 3.5676e-01, + 9.2539e-01, 3.5622e-01, 8.1502e-01, 9.8078e-01, + 3.7427e-01, 5.4771e-01, 4.4970e-01, 3.5472e-01, + 6.8737e-01, 8.9441e-01, 4.3924e-01, 9.0130e-01, + 7.3301e-01, 2.3730e-01, 1.4503e-01, 1.6294e-01, + 7.4797e-01, 3.0663e-01, 9.1635e-01, 2.2485e-01, + 8.1593e-01, 6.9641e-01, 5.8876e-01, 2.5397e-01, + 2.7194e-01, 2.3151e-01, 4.6512e-01, 3.3600e-01, + 1.2084e-01, 7.4145e-01, 8.1178e-01, 4.2986e-01, + 3.2717e-01, 8.9582e-01, 3.8058e-03, 1.1514e-01, + 2.6151e-01, 8.6861e-01, 3.9034e-01, 3.7204e-01, + 5.0550e-01, 2.5962e-01, 7.9080e-01, 7.3126e-01, + 1.9304e-01, 2.7548e-01, 4.0413e-01, 6.4024e-01, + 3.0537e-01, 6.4800e-02, 2.9508e-01, 4.4554e-01, + 7.4376e-01, 3.5356e-01, 1.2386e-01, 9.0646e-01, + 4.4257e-01, 9.9428e-01, 3.9815e-01, 1.0579e-01, + 5.3899e-01, 8.8053e-01, 4.1755e-01, 2.6286e-01, + 8.6265e-01, 3.5440e-01, 4.1767e-01, 4.3722e-01, + 2.2701e-01, 2.0119e-01, 2.0168e-01, 7.4481e-01, + 3.1458e-01, 9.1105e-01, 5.1549e-01, 4.9528e-01, + 7.2489e-01, 6.9358e-01, 9.4876e-01, 1.4933e-01, + 1.0757e-01, 7.4461e-01, 3.5457e-01, 2.9405e-01, + 3.7800e-01, 3.5033e-01, 2.4629e-01, 4.0123e-01, + 8.0207e-01, 3.0032e-01, 1.9983e-01, 3.7397e-01, + 1.0202e-01, 9.9367e-01, 7.3515e-01, 8.2278e-01, + 1.1699e-01, 7.4263e-02, 4.8947e-01, 7.3849e-01, + 1.1436e-01, 4.7523e-01, 7.7099e-01, 7.2400e-01, + 6.5547e-02, 5.7163e-01, 3.2211e-01, 6.7657e-01, + 5.1329e-02, 1.2576e-01, 8.5341e-01, 1.3857e-01, + 6.3257e-01, 4.3927e-01, 4.0713e-01, 7.1751e-01, + 8.1961e-01, 5.3252e-01, 3.3088e-01, 5.8053e-01, + 1.4918e-01, 8.0005e-01, 3.7335e-01, 9.6447e-02, + 1.7216e-01, 1.0099e-01, 1.5519e-01, 3.6774e-01, + 8.7242e-01, 4.4847e-01, 5.5891e-01, 3.0216e-01, + 8.5587e-01, 3.0503e-01, 4.5505e-01, 7.7470e-01, + 7.5445e-01, 7.0880e-01, 4.5444e-01, 9.8342e-01, + 9.9777e-01, 7.5503e-01, 9.8966e-01, 4.0026e-01, + 6.9843e-01, 1.9249e-03, 7.8839e-01, 1.3053e-01, + 1.3274e-01, 8.4923e-02, 4.2804e-01, 1.5293e-01, + 9.0170e-01, 4.9992e-01, 7.7254e-01, 7.4648e-01, + 6.2388e-01, 8.5497e-01, 8.2756e-01, 3.8686e-01, + 9.8151e-02, 5.4863e-01, 2.5265e-01, 7.3707e-01, + 2.0151e-01, 5.7024e-01, 2.8365e-01, 3.5556e-01, + 2.8799e-01, 1.3107e-02, 5.3341e-01, 3.5237e-01, + 8.8548e-01, 5.9289e-01, 5.4342e-01, 1.7633e-01, + 6.3070e-01, 5.3768e-01, 2.2342e-01, 3.3247e-01, + 6.9090e-01, 4.1567e-01, 1.1222e-01, 4.7518e-02, + 9.4288e-01, 9.5518e-01, 7.6795e-01, 5.0269e-01, + 3.6134e-01, 6.0356e-01, 8.3473e-01, 9.6104e-01, + 6.3423e-01, 1.6210e-02, 4.0034e-01, 9.7914e-01, + 3.9605e-02, 1.7596e-01, 5.7872e-01, 6.7935e-01, + 4.1868e-01, 8.3560e-01, 4.0556e-01, 4.8967e-02, + 2.2949e-01, 1.0615e-01, 5.7564e-01, 7.4653e-02, + 9.0806e-01, 6.0640e-01, 4.3840e-01, 7.2256e-01, + 2.0422e-01, 4.2972e-01, 5.8503e-01, 7.7506e-01, + 9.7508e-01, 9.4505e-01, 6.4491e-01, 2.1982e-01, + 2.7546e-01, 7.7442e-01, 5.0277e-01, 6.9720e-02, + 7.4204e-01, 6.0831e-01, 1.1658e-01, 5.0890e-01, + 1.1812e-01, 6.0273e-01, 8.2931e-01, 9.8180e-01, + 1.5799e-01, 9.8569e-01, 4.0228e-01, 4.0955e-01, + 4.7497e-02, 8.9661e-02, 4.9936e-01, 1.1148e-01, + 9.0756e-01, 4.0268e-01, 8.9360e-01, 2.2851e-01, + 2.0956e-01, 1.7882e-01, 2.9301e-01, 8.3386e-01, + 6.4286e-01, 9.9061e-01, 5.6455e-02, 7.0386e-01, + 1.2706e-01, 8.6843e-01, 1.9036e-01, 7.5009e-01, + 9.0547e-01, 3.5391e-02, 3.8624e-01, 1.1931e-01, + 9.4649e-01, 6.8202e-01, 9.5564e-01, 1.1891e-01, + 8.1752e-01, 4.3762e-01, 5.5711e-01, 2.5729e-01, + 3.5776e-01, 4.0330e-01, 2.2345e-01, 8.0837e-01, + 9.1913e-01, 5.1379e-01, 1.0360e-01, 8.8191e-01, + 6.7996e-01, 9.4555e-02, 6.2440e-01, 2.8075e-01, + 4.6693e-01, 5.4747e-01, 4.7497e-01, 8.5070e-02, + 5.6932e-02, 6.4648e-01, 1.2477e-03, 2.0721e-01, + 6.8915e-01, 8.7342e-01, 5.7258e-01, 8.6224e-01, + 5.7939e-01, 9.9515e-01, 2.0583e-01, 2.1518e-01, + 8.9509e-01, 8.0367e-01, 6.4277e-01, 5.0827e-01, + 2.5676e-02, 5.8560e-02, 4.3004e-01, 4.5788e-01, + 6.6471e-01, 3.9649e-01, 2.0012e-01, 4.3487e-01, + 9.0304e-01, 2.2243e-01, 3.0464e-01, 8.5268e-01, + 5.6222e-01, 2.4369e-01, 8.6137e-01, 7.8472e-02, + 9.1993e-02, 6.5703e-01, 2.8403e-01, 8.8040e-01, + 2.8392e-01, 7.7439e-01, 9.3284e-01, 2.0412e-01, + 4.0388e-01, 8.1648e-01, 2.1973e-01, 5.7573e-01, + 9.2892e-02, 7.5088e-01, 3.0786e-01, 1.8024e-01, + 6.5532e-01, 6.1457e-02, 2.1697e-01, 1.0836e-01, + 3.9118e-01, 4.9190e-01, 9.4466e-01, 2.1653e-01, + 2.9599e-01, 7.5911e-01, 4.5813e-03, 2.5706e-01, + 2.5307e-01, 2.9388e-01, 5.5978e-01, 8.8650e-01, + 4.8734e-01, 4.7580e-01, 9.1478e-01, 1.2022e-01, + 8.6153e-01, 4.8555e-01, 8.7075e-01, 8.6253e-01, + 1.4431e-01, 3.4625e-01, 2.9820e-01, 3.7020e-01, + 7.2331e-01, 5.3627e-01, 3.3997e-01, 4.2166e-01, + 3.2381e-01, 3.9761e-01, 7.5745e-01, 6.5813e-01, + 6.4982e-01, 2.8336e-01, 5.7049e-01, 7.2874e-01, + 4.0575e-01, 8.7412e-02, 8.6145e-01, 4.1196e-01, + 8.0483e-01, 1.8391e-01, 5.7371e-02, 8.4034e-01, + 5.2513e-01, 6.0727e-01, 1.5458e-01, 2.0643e-01, + 5.1326e-01, 6.6117e-01, 9.5947e-01, 7.8681e-01, + 6.7698e-01, 9.6081e-01, 4.5145e-01, 6.0458e-01, + 5.2329e-01, 4.8816e-01, 6.4831e-01, 5.4729e-01, + 5.0501e-01, 3.9862e-03, 4.3458e-01, 8.2356e-02, + 4.4194e-01, 4.6046e-01, 8.6119e-01, 7.2362e-01, + 3.3762e-01, 5.7168e-01, 3.7204e-01, 4.6616e-01, + 2.8453e-01, 8.4184e-01, 9.1507e-01, 7.3000e-01, + 2.3497e-01, 4.7039e-01, 3.0436e-01, 7.3392e-01, + 2.0230e-01, 7.5799e-01, 8.0132e-01, 2.6207e-01, + 4.1549e-01, 4.5825e-01, 3.5712e-04, 8.6380e-01, + 2.5643e-01, 1.4106e-01, 4.6769e-01, 1.6319e-01, + 4.2617e-01, 6.8278e-01, 9.9952e-01, 7.2726e-01, + 5.3935e-01, 7.4477e-01, 6.4322e-01, 3.4132e-01, + 9.1885e-01, 8.9713e-01, 7.1047e-01, 6.7622e-01, + 5.4440e-01, 6.6700e-01, 5.7860e-01, 2.7002e-02, + 4.8064e-01, 4.6679e-01, 3.5471e-01, 7.7442e-01, + 1.8786e-01, 7.3381e-01, 5.6379e-01, 1.0770e-01, + 3.8089e-01, 4.8584e-01, 9.5384e-01, 8.6167e-01, + 2.0388e-02, 5.9740e-01, 6.5420e-01, 9.4518e-01, + 6.0007e-01, 9.4668e-01, 2.4456e-01, 1.9964e-01, + 7.2138e-01, 2.1021e-01, 4.1505e-02, 7.3214e-01, + 5.7585e-01, 2.6983e-01, 2.1348e-01, 2.7068e-01, + 6.3100e-02, 4.9250e-01, 2.3708e-01, 6.1793e-01, + 1.5893e-01, 5.2076e-01, 6.7793e-01, 7.4495e-01, + 4.5389e-02, 3.7724e-02, 2.2370e-01, 3.9055e-01, + 1.2615e-01, 3.4002e-01, 3.5834e-01, 7.8782e-02, + 4.9173e-01, 4.1157e-01, 6.6417e-01, 6.4484e-01, + 5.0083e-01, 4.2959e-01, 9.8685e-01, 3.5260e-01, + 4.2792e-02, 1.3207e-01, 9.1311e-01, 9.6641e-01, + 8.9353e-01, 7.3994e-01, 9.4765e-01, 6.5472e-01, + 1.5391e-01, 8.8154e-01, 6.2777e-01, 1.8012e-02, + 7.1259e-01, 6.7197e-01, 7.7261e-01, 4.6110e-01, + 6.2714e-01, 9.6933e-01, 6.1243e-01, 6.0628e-01, + 7.6382e-01, 9.6854e-01, 5.1705e-01, 8.5856e-01, + 3.1705e-01, 7.2350e-01, 9.4389e-02, 2.9574e-01, + 9.1463e-01, 2.0940e-01, 1.7609e-01, 7.3978e-01, + 9.2575e-01, 6.8811e-01, 1.6098e-01, 6.4482e-02, + 2.5061e-02, 5.2998e-01, 9.8449e-01, 5.4429e-01, + 6.8862e-01, 9.4297e-01, 7.7867e-01, 1.7015e-01, + 7.0622e-01, 3.6348e-01, 3.8057e-01, 7.0215e-01, + 8.9386e-01, 7.3907e-03, 6.8824e-01, 7.3556e-01, + 2.5358e-01, 6.8417e-01, 3.7956e-01, 4.3455e-01, + 3.0873e-01, 4.7848e-01, 8.7651e-01, 5.9179e-01, + 2.4253e-01, 9.6398e-01, 4.9589e-01, 9.5203e-01, + 9.5415e-01, 8.6776e-01, 1.1685e-01, 3.3088e-01, + 7.7341e-01, 8.3175e-01, 5.7462e-01, 1.3990e-01, + 9.0461e-01, 6.0410e-01, 8.4851e-01, 9.6942e-01, + 8.4864e-01, 6.3279e-01, 9.6144e-01, 1.5080e-01, + 8.9336e-02, 9.6933e-01, 4.5647e-01, 7.3363e-01, + 3.9520e-01, 5.6769e-01, 1.2489e-01, 5.1997e-01, + 1.6970e-01, 5.2122e-02, 4.9514e-01, 5.7753e-01, + 3.1179e-01, 8.2135e-01, 3.0969e-01, 1.9110e-01, + 9.3857e-02, 5.3521e-01, 3.5248e-01, 6.2581e-01, + 9.7808e-01, 5.1285e-01, 9.7337e-01, 2.5133e-01, + 4.4027e-01, 4.3065e-01, 2.5723e-01, 1.2854e-01, + 9.8939e-02, 9.0984e-01, 8.7231e-01, 9.3467e-01, + 7.2945e-01, 3.0576e-01, 1.3236e-01, 7.1361e-02, + 3.9339e-01, 3.1714e-01, 3.2872e-01, 5.1748e-01, + 5.5217e-01, 4.1788e-01, 7.8429e-01, 6.7386e-02, + 7.7600e-01, 4.0606e-01, 6.8449e-01, 5.7668e-02, + 9.0049e-01, 8.6218e-01, 3.3053e-01, 7.6311e-01, + 5.8454e-01, 1.8191e-01, 9.8940e-01, 1.1427e-02, + 6.7147e-01, 3.5037e-01, 8.0766e-01, 9.2500e-01, + 1.0255e-01, 9.5627e-01, 4.2546e-02, 1.7540e-01, + 5.4745e-01, 5.9252e-01, 1.4245e-01, 4.0475e-01, + 9.8581e-01, 3.8861e-01, 8.0536e-01, 6.9424e-01, + 6.3616e-01, 7.9450e-01, 3.0102e-01, 4.6604e-01, + 4.0082e-01, 7.9423e-01, 6.0621e-02, 7.6039e-01, + 2.8130e-01, 7.6283e-01, 8.3019e-01, 4.7440e-01, + 9.3373e-01, 2.3127e-01, 9.8599e-01, 1.0451e-01, + 4.4318e-01, 4.3340e-01, 1.2718e-01, 6.7560e-01, + 8.0438e-01, 2.4075e-01, 5.0321e-01, 2.8248e-01, + 6.0269e-01, 1.4597e-01, 1.3511e-01, 1.7491e-01, + 8.6251e-01, 4.5483e-01, 7.5964e-01, 2.8131e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.3721, 0.5043, 0.5568, ..., 0.8647, 0.9880, 0.8941]) +tensor([0.4226, 0.0556, 0.1398, ..., 0.5751, 0.9814, 0.4838]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -1026,375 +1026,375 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 10.363084554672241 seconds +Time: 10.681929588317871 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([5193, 2755, 2619, 8774, 5321, 6802, 9831, 2285, 7852, - 3999, 9956, 6338, 4303, 3557, 3117, 6782, 5048, 7592, - 3942, 736, 4361, 9482, 6490, 3337, 2778, 8169, 2811, - 209, 2573, 8564, 5262, 8591, 5293, 8927, 3544, 51, - 2528, 4507, 4161, 5578, 9752, 6784, 2306, 938, 2449, - 5328, 718, 7617, 6097, 864, 5625, 9977, 6328, 2206, - 1192, 3645, 3508, 3808, 3742, 5641, 1622, 4352, 9099, - 7155, 1778, 6225, 7403, 1744, 1586, 3123, 5186, 9952, - 4753, 6792, 5057, 2040, 1903, 4935, 4855, 6732, 8949, - 5033, 9687, 8172, 2973, 4285, 3263, 8170, 5631, 2665, - 2030, 1676, 7190, 9261, 1374, 5085, 6991, 7291, 5365, - 8790, 2603, 5128, 4726, 7347, 7445, 5508, 2405, 6862, - 927, 1040, 3233, 8284, 1163, 7143, 7742, 2101, 6504, - 7643, 3848, 7449, 288, 874, 4468, 4224, 1484, 7263, - 4340, 7167, 998, 8159, 8311, 1777, 3799, 3049, 6508, - 9603, 4316, 5196, 2183, 4727, 9055, 4438, 766, 1704, - 2955, 7889, 223, 603, 271, 3214, 3848, 7365, 3352, - 185, 1543, 1100, 7205, 5656, 8224, 7006, 460, 1632, - 5863, 1172, 5235, 1995, 4111, 11, 2411, 7389, 5139, - 7269, 8117, 8911, 1884, 4444, 8936, 4422, 7036, 5702, - 4252, 2130, 6704, 3313, 4857, 9399, 3363, 3509, 5493, - 2340, 4706, 9247, 8227, 7090, 7116, 226, 9056, 6722, - 8438, 5065, 7307, 1220, 7194, 705, 7096, 4679, 547, - 3971, 7573, 5105, 7215, 5944, 5393, 6667, 5458, 8429, - 275, 7814, 1025, 7237, 1604, 2929, 6126, 9253, 4344, - 4508, 267, 2777, 6034, 8761, 1834, 7748, 2674, 1367, - 9944, 6376, 1264, 5363, 2708, 3617, 3700, 5437, 4095, - 3959, 9704, 92, 9660, 9391, 3573, 2946, 813, 5316, - 3935, 7890, 4581, 6296, 7152, 6217, 3415, 6533, 2715, - 1688, 5201, 6175, 7050, 3595, 3662, 6840, 9726, 6036, - 7619, 5416, 6590, 7916, 4522, 6958, 8725, 6668, 2653, - 2628, 2565, 1650, 5630, 3465, 1488, 7827, 8886, 5742, - 4749, 648, 3505, 4220, 7748, 967, 839, 4075, 6270, - 6161, 5175, 1827, 7920, 1324, 2536, 3769, 8225, 5565, - 6328, 4306, 5393, 4486, 1932, 3230, 5293, 7471, 9723, - 8758, 373, 1054, 366, 7030, 2680, 6679, 3052, 3000, - 3826, 7135, 7573, 8624, 2094, 8253, 1679, 9255, 7782, - 2323, 4348, 3499, 8680, 5646, 838, 8871, 3174, 9682, - 8132, 4297, 756, 3636, 7385, 3221, 5036, 4653, 2075, - 2125, 9346, 9627, 6630, 4303, 6472, 8582, 3706, 7450, - 8033, 6481, 218, 2651, 3223, 6130, 1629, 8564, 9132, - 2060, 5147, 6339, 299, 2555, 4613, 3564, 8344, 6671, - 5893, 2906, 3816, 918, 1043, 2354, 6191, 6111, 6428, - 1535, 6205, 4349, 2774, 3014, 6169, 54, 7709, 3177, - 7317, 1857, 4166, 1657, 6193, 5261, 1312, 2305, 7110, - 6570, 2833, 5781, 7269, 8843, 6989, 8890, 1174, 6005, - 811, 1391, 1098, 636, 6355, 1696, 8042, 3865, 3213, - 262, 9122, 2345, 1147, 4236, 7137, 4556, 3780, 9838, - 7492, 7111, 529, 9470, 4720, 4229, 692, 9529, 7086, - 2082, 250, 1543, 289, 7190, 7172, 1560, 4019, 8975, - 1830, 1510, 9707, 2846, 3333, 9679, 8581, 2223, 2379, - 2778, 2110, 6343, 8400, 8819, 9110, 4987, 5901, 207, - 5848, 9965, 6853, 3891, 3084, 4227, 9187, 8352, 2804, - 244, 2773, 5879, 2163, 4413, 2823, 6063, 6812, 7469, - 3631, 6784, 6150, 9291, 1434, 4867, 7937, 8507, 4925, - 9859, 2433, 5664, 7328, 3053, 6754, 2086, 6555, 7886, - 1830, 8651, 582, 9396, 688, 2898, 4397, 1085, 4212, - 9690, 8597, 1017, 8482, 663, 1595, 2722, 495, 807, - 4378, 2814, 7029, 3658, 4499, 8945, 9032, 813, 7181, - 1952, 2910, 2796, 8972, 7959, 3329, 6355, 7208, 1762, - 701, 6311, 4802, 4644, 6623, 713, 2064, 6942, 593, - 2218, 7841, 6420, 5622, 2540, 1104, 8910, 7387, 7602, - 3882, 8077, 285, 6982, 278, 3095, 1513, 9909, 9182, - 3213, 7417, 6429, 2946, 4607, 9497, 8159, 9486, 5213, - 4740, 9629, 1996, 7823, 9038, 6968, 4206, 9366, 6179, - 4636, 7878, 248, 7209, 1504, 8559, 7776, 3701, 7946, - 1205, 4823, 3452, 2388, 2673, 5380, 9364, 1709, 3782, - 4596, 3539, 3609, 5046, 6692, 2716, 7354, 3756, 2031, - 3021, 2369, 8239, 4900, 649, 2495, 2736, 9221, 9307, - 3110, 2444, 5681, 9962, 658, 5969, 2802, 2529, 4596, - 9750, 5051, 7304, 1985, 1639, 8039, 31, 3676, 2155, - 3903, 9915, 8323, 2043, 9950, 4176, 8037, 391, 3433, - 7851, 4275, 2610, 5720, 6748, 793, 8339, 3679, 3153, - 2125, 5476, 6422, 6927, 8754, 7725, 2141, 1755, 7320, - 2935, 3107, 4817, 3917, 3837, 4030, 6563, 2811, 8883, - 8408, 3509, 6635, 4861, 2432, 7437, 506, 1386, 8922, - 5243, 9151, 2497, 8938, 6978, 2375, 7816, 1133, 9440, - 4511, 353, 8857, 951, 8442, 8719, 9229, 5886, 2784, - 867, 9558, 1353, 1986, 6386, 2976, 6556, 6466, 4952, - 8760, 6705, 3986, 248, 4939, 8572, 8033, 1694, 5483, - 1478, 924, 5915, 1868, 8180, 5092, 7794, 9104, 6732, - 6946, 3391, 3981, 8719, 2292, 9299, 7694, 4217, 8661, - 1105, 5555, 6318, 9071, 5775, 6208, 7153, 6725, 4834, - 8341, 5231, 635, 6491, 6914, 4534, 3780, 1612, 7491, - 9904, 5561, 4379, 7823, 6571, 1041, 5191, 1844, 8301, - 9882, 2613, 3531, 5574, 9708, 5720, 7268, 2236, 2552, - 1791, 6530, 7528, 9653, 5478, 49, 5543, 4607, 4929, - 6779, 4288, 4894, 5953, 9334, 2875, 8996, 4150, 7356, - 1186, 3993, 4505, 9077, 1116, 8449, 5955, 4281, 1244, - 6319, 2334, 7523, 1420, 9819, 1049, 7647, 3112, 8991, - 9729, 3869, 2710, 8813, 9524, 6132, 4591, 1916, 8391, - 5927, 2660, 5428, 5936, 3389, 323, 4780, 5804, 8256, - 6027, 8524, 109, 2252, 2388, 9629, 7006, 6247, 3274, - 8307, 2641, 3198, 7675, 4111, 618, 7105, 3170, 4299, - 5160, 4551, 5987, 4864, 4008, 4510, 1411, 474, 4094, - 7522, 9644, 8491, 401, 8653, 2482, 7935, 6580, 4043, - 1108, 2017, 2207, 9655, 1829, 6993, 5007, 4733, 7370, - 6766, 9337, 7507, 1648, 3705, 2374, 7193, 3309, 2758, - 3114, 8424, 2591, 2146, 3234, 9985, 5568, 712, 7456, - 6725, 1082, 7071, 4477, 2084, 1012, 393, 2646, 9693, - 4629, 6430, 4668, 7348, 4987, 5816, 4309, 6189, 5619, - 6356, 4170, 4871, 4240, 4621, 5427, 3415, 7229, 2801, - 5667, 2772, 2115, 9639, 6777, 5628, 5474, 6929, 7261, - 1128, 7763, 8975, 1875, 6229, 5956, 3894, 8274, 1400, - 3005, 7369, 4914, 2852, 2199, 4664, 5187, 6701, 5141, - 2836, 7341, 9140, 3613, 4273, 2795, 2402, 4117, 7860, - 2778, 346, 8610, 6929, 6113, 1593, 763, 2525, 8935, - 2101, 2835, 1362, 8394, 6460, 4773, 9741, 8111, 1860, - 3451, 7908, 7916, 6010, 8207, 8543, 7760, 8890, 7266, - 1155, 6223, 1146, 9602, 3885, 7243, 31, 7775, 3205, - 5848, 6242, 6442, 2055, 3787, 710, 1978, 8938, 7216, - 5945]), - values=tensor([5.5292e-01, 5.5339e-02, 4.5108e-01, 1.0570e-01, - 3.4688e-01, 1.9198e-01, 9.3821e-01, 9.8353e-01, - 8.8756e-01, 3.1342e-03, 5.5310e-01, 3.0156e-01, - 9.7159e-01, 5.4507e-01, 2.1473e-02, 2.0341e-02, - 8.7216e-01, 9.1887e-01, 3.0364e-02, 9.3932e-01, - 8.2611e-01, 6.7013e-01, 8.8961e-01, 1.2123e-01, - 1.9534e-01, 2.4678e-01, 1.1772e-01, 2.7037e-01, - 3.5509e-03, 2.8075e-01, 4.0535e-02, 6.3427e-01, - 3.9017e-01, 6.1389e-01, 1.0664e-01, 3.2671e-01, - 1.1828e-01, 5.4389e-01, 3.2263e-01, 9.1144e-01, - 7.3488e-02, 2.3373e-02, 9.0950e-01, 8.5203e-01, - 3.4924e-01, 7.3816e-01, 7.5268e-01, 3.6300e-02, - 2.2669e-01, 3.1511e-01, 3.8005e-01, 2.4819e-01, - 3.8328e-01, 5.0635e-01, 5.6645e-01, 1.2111e-01, - 7.6746e-01, 1.4418e-02, 7.3921e-02, 2.7487e-01, - 5.2504e-01, 4.9590e-01, 6.9415e-01, 5.9502e-01, - 1.8332e-01, 9.2079e-01, 7.3309e-01, 9.5422e-01, - 9.8358e-01, 2.7596e-01, 4.1096e-01, 6.2501e-01, - 5.8528e-01, 1.6744e-01, 3.0802e-02, 9.5302e-01, - 5.9898e-01, 2.6224e-01, 4.3792e-01, 5.1201e-01, - 2.9177e-02, 9.5366e-01, 1.2222e-02, 9.8630e-01, - 9.9589e-01, 8.9232e-01, 6.2139e-02, 7.2973e-01, - 1.7859e-01, 5.8169e-02, 8.3459e-01, 8.2388e-01, - 6.6385e-01, 6.9275e-01, 5.1824e-01, 7.1741e-01, - 6.6236e-01, 1.0698e-01, 8.2657e-01, 4.2895e-01, - 3.6838e-01, 2.7626e-01, 2.9440e-01, 9.3249e-01, - 2.8409e-01, 8.1546e-01, 4.8077e-01, 1.7037e-01, - 1.5884e-01, 7.7664e-01, 1.5836e-02, 3.3557e-01, - 2.0902e-01, 6.0679e-01, 1.5751e-01, 9.8621e-01, - 3.9991e-01, 6.0917e-01, 8.0877e-01, 7.1137e-01, - 2.9923e-01, 9.2558e-01, 3.8125e-01, 6.0657e-01, - 2.9772e-01, 7.1449e-01, 2.2410e-01, 3.3343e-01, - 6.7991e-01, 9.2217e-02, 7.2563e-01, 6.5640e-02, - 1.5008e-02, 2.3401e-01, 3.9745e-01, 1.7414e-02, - 6.1670e-01, 6.7045e-02, 8.9257e-01, 3.1882e-01, - 6.5174e-02, 9.5557e-01, 4.6108e-01, 5.3639e-01, - 2.2493e-01, 8.5831e-01, 3.6582e-01, 2.4755e-01, - 8.9200e-01, 1.4938e-01, 2.5340e-01, 2.1128e-01, - 7.9481e-01, 8.2922e-01, 4.6592e-01, 9.4400e-01, - 8.3401e-01, 9.2113e-01, 7.5652e-01, 3.0972e-01, - 4.2157e-01, 2.9254e-01, 4.9221e-01, 5.8905e-01, - 3.8325e-01, 2.2484e-01, 6.9358e-01, 6.4123e-01, - 6.7816e-01, 6.3084e-01, 9.8574e-01, 4.4427e-01, - 7.1513e-01, 3.9867e-01, 4.3878e-01, 1.9672e-01, - 5.0263e-03, 6.7740e-01, 2.5681e-01, 9.7933e-01, - 4.0394e-01, 4.1879e-02, 3.9553e-01, 7.8503e-01, - 7.4689e-01, 4.9247e-01, 4.1191e-01, 9.9678e-01, - 2.0022e-01, 6.5069e-01, 7.4428e-01, 9.0778e-01, - 5.1895e-01, 8.0675e-01, 8.3865e-01, 6.2747e-01, - 7.3217e-02, 4.9040e-01, 5.2601e-01, 8.7024e-01, - 6.0410e-01, 8.8149e-01, 8.2484e-01, 1.5845e-01, - 4.6403e-01, 8.9306e-01, 6.1336e-01, 8.0234e-01, - 7.9119e-01, 2.0744e-01, 8.8190e-01, 5.9520e-01, - 6.5300e-01, 6.2913e-01, 6.1054e-01, 9.8544e-01, - 4.4616e-01, 9.2657e-01, 4.7958e-01, 1.6243e-01, - 9.3923e-01, 2.8605e-01, 8.8992e-02, 9.4577e-01, - 3.8793e-01, 8.8493e-01, 9.4484e-01, 1.8553e-01, - 6.6811e-02, 2.8498e-01, 7.3602e-01, 5.3860e-01, - 6.3923e-01, 8.8471e-01, 8.9298e-01, 2.3408e-01, - 6.0315e-01, 7.1811e-01, 6.8929e-01, 9.2628e-01, - 3.3735e-01, 9.4330e-01, 2.9212e-01, 1.3474e-01, - 2.4841e-01, 2.9413e-01, 3.0786e-01, 4.4721e-01, - 6.4270e-01, 1.3463e-01, 7.4701e-01, 7.0796e-01, - 7.5288e-01, 4.6654e-01, 5.7533e-01, 2.5732e-01, - 9.4048e-01, 4.2449e-01, 2.7752e-01, 5.3717e-01, - 5.8754e-01, 2.2088e-01, 6.9696e-01, 3.0511e-01, - 5.4146e-01, 6.6670e-01, 1.9863e-01, 8.7473e-01, - 8.3417e-01, 1.7325e-03, 1.1431e-01, 6.6423e-01, - 1.7200e-01, 7.5624e-01, 8.6680e-01, 1.4888e-01, - 5.2062e-01, 2.3949e-01, 4.6781e-01, 8.4834e-01, - 8.3041e-01, 8.4708e-01, 5.0573e-01, 2.6783e-01, - 7.7273e-01, 6.9543e-02, 9.7146e-01, 5.8041e-01, - 8.2714e-02, 7.4425e-01, 7.8576e-01, 7.7661e-04, - 8.3968e-01, 4.5854e-02, 4.3457e-01, 7.8975e-01, - 9.9698e-01, 4.6019e-01, 9.2919e-01, 3.1968e-01, - 9.3109e-01, 9.2620e-01, 9.8663e-01, 5.4487e-01, - 9.8012e-01, 5.4934e-01, 1.7813e-02, 4.7736e-01, - 3.4974e-01, 5.3126e-01, 4.8493e-01, 1.0505e-01, - 2.6504e-01, 5.5712e-01, 9.8635e-01, 6.4827e-01, - 7.6120e-01, 4.2505e-01, 1.6706e-01, 6.8099e-01, - 9.9077e-01, 4.1107e-01, 4.8898e-01, 2.5076e-01, - 3.4645e-01, 3.1250e-01, 6.2990e-01, 6.5617e-02, - 2.3404e-01, 5.0847e-01, 6.3919e-01, 5.7855e-01, - 7.8187e-01, 8.5142e-01, 6.5013e-01, 9.1473e-01, - 3.5578e-01, 6.6380e-01, 4.7582e-01, 8.5910e-01, - 1.0565e-01, 6.2514e-01, 3.8345e-01, 1.5356e-01, - 9.9912e-02, 8.8363e-01, 1.7544e-01, 1.4346e-01, - 3.4208e-01, 6.1732e-01, 8.8918e-01, 2.9956e-01, - 9.0580e-01, 1.2071e-01, 7.4620e-01, 2.8879e-02, - 6.9099e-01, 3.9454e-01, 8.8898e-01, 8.2851e-01, - 4.6931e-01, 2.9252e-01, 8.3960e-01, 4.1263e-01, - 8.7211e-01, 9.0472e-01, 3.7009e-01, 6.8359e-01, - 9.2110e-01, 4.3054e-01, 5.9179e-01, 8.2038e-01, - 8.5760e-01, 4.2085e-01, 4.0021e-01, 6.0209e-01, - 7.9150e-01, 6.8003e-01, 2.9459e-01, 5.6288e-01, - 9.1978e-02, 2.5679e-01, 8.9665e-01, 3.8079e-01, - 4.8515e-01, 6.6981e-01, 6.6846e-01, 9.7820e-01, - 8.7582e-01, 8.0038e-01, 2.7188e-01, 4.8928e-01, - 5.2560e-01, 9.7220e-01, 3.8673e-01, 9.6365e-01, - 5.5796e-01, 3.5920e-01, 8.7943e-01, 8.6910e-01, - 6.1172e-01, 3.1763e-01, 4.6344e-01, 6.8853e-01, - 9.4884e-02, 8.3766e-01, 2.7469e-01, 1.5268e-01, - 7.2314e-01, 4.7604e-01, 4.7881e-02, 6.2107e-02, - 3.1626e-01, 2.4833e-01, 4.2443e-01, 5.1570e-01, - 7.9326e-01, 9.9465e-01, 8.7703e-01, 9.3805e-01, - 3.8399e-01, 7.9271e-01, 6.6890e-01, 5.1078e-01, - 6.5738e-01, 7.0870e-01, 6.3946e-02, 8.2076e-01, - 5.8328e-01, 9.7339e-01, 1.1501e-01, 1.8801e-01, - 5.1878e-01, 1.6410e-01, 2.5658e-02, 7.4582e-01, - 7.1671e-01, 3.2597e-01, 4.7647e-01, 3.0071e-01, - 2.1942e-01, 3.5837e-01, 2.1099e-01, 7.2833e-01, - 4.5191e-01, 8.3061e-01, 5.3493e-01, 4.6170e-01, - 7.3391e-01, 2.8119e-01, 7.3837e-01, 2.1069e-01, - 6.1466e-01, 3.1738e-01, 6.6976e-01, 4.3234e-01, - 5.3425e-01, 3.1649e-01, 6.9488e-01, 8.3116e-02, - 9.6527e-01, 7.2983e-01, 7.2952e-01, 5.7027e-01, - 5.7248e-01, 9.8809e-01, 5.6845e-01, 9.9233e-01, - 4.3437e-01, 9.1749e-01, 3.0435e-01, 6.1630e-01, - 5.6718e-01, 7.6623e-01, 7.3360e-01, 5.7167e-01, - 3.1448e-01, 2.8390e-01, 3.4381e-01, 3.6832e-01, - 9.5748e-01, 7.9723e-01, 9.3584e-02, 4.2134e-01, - 4.6396e-01, 3.4351e-01, 5.2272e-01, 7.3597e-01, - 4.0274e-01, 8.4091e-01, 1.1289e-01, 7.4283e-01, - 7.3034e-02, 8.8066e-01, 1.0006e-01, 6.3450e-01, - 9.2205e-02, 9.6993e-01, 2.0751e-01, 2.1244e-01, - 5.1143e-02, 3.6481e-01, 6.4697e-01, 2.0789e-01, - 9.5215e-01, 5.3364e-01, 9.2672e-01, 6.9821e-01, - 6.9041e-01, 8.3878e-01, 4.1297e-01, 1.8957e-01, - 2.0550e-01, 3.0226e-02, 6.2550e-01, 7.4896e-01, - 3.7984e-01, 3.6494e-01, 2.2888e-01, 3.1974e-02, - 5.6757e-01, 7.7597e-01, 8.9251e-01, 8.0742e-01, - 5.7001e-01, 6.4631e-01, 6.6361e-01, 9.5532e-01, - 9.6697e-01, 4.2484e-01, 6.2009e-01, 2.2163e-01, - 7.1692e-01, 6.5086e-01, 1.8669e-01, 9.3201e-02, - 8.4567e-01, 7.9722e-01, 5.9999e-01, 4.0199e-01, - 4.2838e-01, 9.1398e-01, 3.5932e-01, 1.2304e-01, - 3.8586e-01, 5.5759e-01, 8.5294e-01, 5.3330e-01, - 6.2723e-01, 9.8234e-01, 1.6861e-02, 5.0691e-01, - 6.1734e-02, 4.7853e-01, 5.6685e-01, 7.9571e-01, - 9.0991e-01, 9.2185e-01, 4.1263e-01, 7.1798e-01, - 6.7303e-01, 8.7499e-01, 6.2012e-01, 3.5399e-01, - 7.6377e-01, 3.3514e-01, 8.4335e-01, 8.8705e-01, - 6.8818e-01, 2.3811e-01, 9.6707e-01, 3.9398e-01, - 2.6073e-01, 8.3096e-01, 1.7007e-01, 3.0433e-01, - 4.0134e-01, 1.3059e-01, 2.4787e-01, 7.8166e-01, - 4.7521e-01, 6.8201e-01, 2.1001e-01, 7.1415e-01, - 3.2262e-01, 3.2235e-01, 4.7378e-01, 8.4827e-01, - 8.4848e-01, 5.2317e-02, 1.5766e-01, 2.2014e-03, - 1.7840e-01, 7.8631e-01, 5.2767e-01, 2.6045e-01, - 8.8588e-01, 4.0016e-01, 5.9530e-01, 2.4974e-01, - 6.6462e-01, 4.7206e-01, 1.3225e-01, 4.5137e-01, - 5.3570e-01, 2.8817e-01, 2.6505e-01, 4.1502e-01, - 6.3949e-03, 4.5409e-01, 5.8227e-01, 3.9940e-01, - 7.6494e-01, 8.8617e-01, 6.5990e-01, 3.0427e-01, - 1.9149e-01, 3.3059e-01, 1.1330e-01, 2.3008e-02, - 7.4098e-01, 2.5809e-01, 3.4906e-01, 6.0979e-01, - 5.8091e-01, 2.5328e-01, 6.2555e-01, 1.6286e-01, - 2.8329e-01, 2.2440e-02, 7.8273e-01, 2.9394e-01, - 9.8468e-01, 5.5472e-01, 6.7849e-01, 2.4774e-02, - 9.8321e-01, 4.0996e-01, 8.8539e-01, 8.9288e-01, - 5.5255e-01, 3.7365e-01, 1.0507e-01, 7.2419e-01, - 8.6134e-02, 5.7363e-01, 7.8715e-01, 2.3866e-01, - 4.1035e-01, 1.2496e-01, 4.5919e-01, 5.8619e-01, - 6.0808e-01, 4.2723e-01, 6.7449e-01, 9.2084e-01, - 8.9389e-01, 1.3942e-01, 5.4543e-01, 4.7201e-02, - 8.6576e-01, 1.4455e-01, 1.7668e-01, 4.2758e-02, - 1.1920e-01, 2.6815e-02, 7.7244e-01, 5.2841e-01, - 1.8374e-01, 3.0995e-01, 2.6686e-01, 2.2363e-01, - 6.0105e-01, 6.9802e-01, 8.7698e-01, 2.5112e-01, - 9.3430e-01, 4.3135e-01, 1.1205e-02, 9.9203e-01, - 8.3302e-01, 6.5717e-01, 2.3014e-01, 6.2920e-01, - 4.7020e-03, 2.3944e-01, 6.7821e-01, 2.6852e-01, - 4.3645e-01, 2.0642e-01, 6.3645e-01, 8.7126e-01, - 7.8323e-02, 2.0123e-01, 9.8522e-01, 6.6075e-01, - 1.0638e-01, 1.7513e-01, 7.5435e-01, 1.5965e-01, - 5.4974e-01, 8.4304e-01, 3.8315e-01, 5.0593e-01, - 1.8074e-01, 4.5465e-01, 3.5049e-01, 2.8592e-01, - 4.6064e-01, 1.3733e-01, 3.9878e-01, 8.2656e-01, - 4.3802e-01, 6.0861e-01, 2.3988e-01, 9.3018e-01, - 6.1398e-01, 8.8443e-01, 5.6659e-01, 9.7942e-01, - 5.9063e-03, 3.6857e-01, 7.0689e-01, 6.8562e-01, - 6.2652e-01, 7.1136e-01, 5.8443e-01, 2.8104e-01, - 1.1975e-01, 3.0807e-01, 8.5351e-01, 6.1232e-01, - 1.2850e-01, 6.4265e-01, 7.5862e-02, 8.1888e-01, - 1.5209e-01, 7.4651e-01, 8.9132e-01, 4.7729e-01, - 1.0458e-01, 5.8526e-03, 7.3018e-01, 3.9002e-01, - 8.3555e-01, 9.9792e-01, 1.2007e-01, 1.8173e-01, - 8.8202e-01, 6.3435e-02, 4.7842e-01, 4.3553e-01, - 2.6080e-01, 9.8947e-02, 1.9814e-01, 4.1648e-01, - 7.5812e-01, 5.7984e-01, 9.1152e-01, 8.4139e-01, - 7.3750e-02, 5.3616e-01, 2.6399e-02, 9.7508e-01, - 5.2809e-01, 9.0090e-01, 5.0747e-01, 9.1801e-01, - 2.8317e-01, 2.6351e-01, 7.4357e-01, 9.1706e-01, - 1.2509e-01, 7.5712e-01, 6.6426e-01, 3.7521e-01, - 7.4759e-01, 2.2567e-01, 6.8380e-01, 6.5098e-01, - 4.2407e-01, 9.0580e-01, 7.6534e-01, 6.2130e-01, - 7.4281e-01, 4.2870e-01, 9.5017e-01, 3.7626e-01, - 1.1094e-01, 9.0190e-01, 9.6787e-01, 4.0010e-01, - 4.7100e-01, 1.0633e-01, 8.9043e-01, 5.0640e-01, - 7.5304e-01, 3.1368e-03, 2.9130e-01, 9.0727e-01, - 8.4157e-01, 6.3215e-01, 7.1909e-01, 6.2791e-01, - 6.7473e-01, 5.2095e-01, 1.9918e-01, 4.2582e-01, - 7.0689e-01, 1.4951e-01, 9.5523e-01, 6.8796e-01, - 5.8195e-01, 7.1391e-01, 1.2233e-01, 8.4813e-02, - 8.0007e-01, 2.3040e-01, 5.2230e-01, 6.0031e-01, - 8.8717e-01, 7.2196e-01, 3.1973e-01, 9.8354e-01, - 8.2522e-01, 5.4297e-01, 8.2448e-01, 4.4039e-01, - 7.9127e-01, 1.5486e-01, 8.9541e-02, 7.5881e-01, - 7.0277e-01, 9.5355e-01, 5.7998e-01, 1.0220e-01, - 9.7185e-01, 1.5354e-01, 4.2886e-01, 9.4374e-01, - 9.6972e-01, 4.2716e-01, 7.7967e-01, 4.4084e-01, - 5.9503e-02, 2.8447e-01, 6.6930e-01, 9.2433e-01, - 8.5201e-02, 9.4074e-02, 5.7816e-02, 5.2477e-01, - 4.0431e-01, 9.3680e-01, 7.9281e-01, 5.0214e-01, - 4.3363e-01, 5.0481e-01, 6.5806e-01, 1.2108e-02, - 5.4159e-01, 5.7991e-01, 9.5810e-01, 4.5809e-01, - 5.2811e-01, 4.7398e-01, 6.0017e-01, 6.2723e-01, - 3.1668e-01, 6.6957e-01, 7.3447e-01, 9.3801e-01, - 6.3741e-01, 9.5748e-01, 6.1705e-01, 6.1947e-01, - 7.1114e-01, 7.8047e-01, 6.3606e-01, 1.1714e-01, - 2.9771e-01, 5.7763e-01, 7.2725e-01, 3.9279e-02, - 2.2740e-01, 1.1983e-01, 9.7496e-01, 9.7107e-01, - 6.8192e-01, 4.7055e-01, 7.6591e-02, 1.8872e-01, - 7.6675e-01, 8.9331e-01, 7.3735e-01, 2.2111e-01, - 3.9938e-01, 9.5547e-01, 1.5711e-01, 4.1021e-01, - 1.1412e-01, 4.7451e-01, 9.3830e-01, 2.5704e-02, - 3.1434e-01, 7.9403e-01, 7.1105e-01, 2.2231e-01, - 8.6172e-01, 7.5915e-01, 5.5640e-01, 3.9548e-01, - 7.4875e-01, 7.2545e-03, 6.9761e-02, 3.3954e-01, - 3.0983e-02, 7.8257e-01, 4.6817e-01, 1.4299e-01, - 7.3917e-01, 5.7775e-01, 7.0050e-01, 1.0170e-01, - 5.9268e-02, 7.8402e-01, 1.1116e-01, 7.0050e-01, - 6.8218e-01, 5.1559e-01, 9.0164e-01, 6.3522e-01, - 5.1084e-01, 4.0179e-01, 2.3160e-01, 8.9595e-01, - 5.3957e-01, 2.9593e-01, 8.6295e-02, 4.3092e-01, - 1.8133e-01, 7.9398e-01, 3.1231e-01, 4.6884e-01, - 9.3005e-01, 2.3156e-01, 4.6512e-01, 2.4386e-01, - 4.0889e-01, 2.1786e-01, 4.0123e-01, 6.9927e-02, - 1.2919e-01, 9.3419e-01, 7.5543e-01, 2.8780e-01, - 9.9350e-01, 4.7547e-01, 1.5017e-01, 3.4936e-01, - 7.4752e-01, 7.8208e-02, 8.9649e-02, 6.0120e-01, - 4.8680e-03, 3.9810e-01, 1.9291e-02, 2.5980e-01, - 6.4314e-01, 1.1897e-01, 1.0738e-01, 2.5415e-01, - 6.2778e-01, 3.3058e-01, 9.6010e-01, 5.7716e-01, - 5.7400e-01, 6.5654e-01, 3.8685e-01, 1.4531e-01, - 3.6711e-01, 5.5517e-01, 5.4930e-01, 1.8321e-01, - 4.6848e-01, 4.2975e-01, 8.5091e-01, 6.9266e-01, - 8.8622e-01, 5.1231e-01, 4.8174e-01, 4.5498e-02, - 7.8463e-01, 9.5652e-01, 3.0625e-01, 4.4074e-01, - 1.6669e-01, 8.4050e-01, 3.7991e-01, 6.5276e-01]), + col_indices=tensor([1138, 3141, 5372, 6298, 9895, 5592, 6656, 4614, 931, + 6509, 2541, 488, 1171, 1072, 9057, 8648, 305, 9468, + 8935, 3721, 8788, 4223, 1394, 7441, 8183, 7526, 7164, + 9501, 2074, 6095, 6430, 1576, 7765, 4984, 8210, 5345, + 6644, 4874, 9665, 9793, 4608, 6072, 7262, 5461, 8184, + 6119, 899, 3855, 5088, 3002, 502, 2723, 2838, 2671, + 245, 5685, 2372, 8774, 3148, 7424, 9384, 3212, 8505, + 9938, 1175, 4045, 4800, 98, 907, 4698, 1099, 3556, + 6117, 539, 3430, 5205, 6742, 549, 1013, 7399, 5538, + 6070, 13, 7425, 1069, 3892, 5623, 622, 3112, 6779, + 5841, 5246, 7130, 3748, 8292, 4888, 3930, 4486, 404, + 1247, 8728, 8238, 569, 8783, 9166, 5690, 2454, 272, + 8698, 4860, 6880, 3565, 3134, 6354, 865, 434, 9144, + 921, 4245, 143, 7627, 7460, 9895, 5538, 9555, 1920, + 9046, 6039, 3817, 9183, 833, 9223, 1343, 7435, 74, + 2910, 7431, 4730, 9568, 7053, 553, 6586, 3902, 2102, + 4819, 4670, 639, 3073, 9896, 2444, 4693, 9856, 1357, + 8765, 9020, 4841, 7320, 6909, 6799, 6398, 5965, 3749, + 7728, 1534, 7555, 9916, 8346, 6649, 7695, 1555, 9453, + 3570, 2292, 7726, 9222, 1870, 8326, 5496, 9558, 9331, + 1953, 8958, 9671, 4151, 4367, 6771, 6034, 9364, 5920, + 1930, 4407, 1393, 7699, 5074, 9853, 2420, 4723, 1193, + 3555, 5221, 6242, 9462, 9524, 8904, 4357, 599, 5063, + 9306, 1035, 2970, 3581, 438, 1012, 1578, 6514, 1246, + 280, 9934, 9599, 1347, 7451, 9057, 9941, 2507, 6869, + 4825, 8824, 6274, 4478, 1408, 6086, 4878, 2420, 9493, + 8982, 2379, 609, 5624, 3583, 2173, 3341, 2149, 6472, + 8232, 4799, 2443, 9805, 3726, 1960, 2222, 4203, 9706, + 6042, 2127, 9512, 2848, 5472, 464, 7833, 9395, 4397, + 1252, 536, 150, 1099, 2496, 571, 7941, 6943, 7304, + 7511, 2241, 5493, 562, 6009, 2047, 7239, 7266, 8450, + 3771, 5993, 8725, 8763, 3254, 7903, 1883, 306, 172, + 4496, 8646, 114, 4663, 5065, 5972, 5609, 5112, 7979, + 2267, 3998, 628, 3262, 4723, 5522, 333, 771, 7789, + 7411, 6161, 939, 510, 2157, 7502, 5114, 7332, 1071, + 911, 7141, 1103, 2384, 9370, 6550, 3084, 7659, 5517, + 5645, 8343, 428, 5819, 2990, 2152, 8255, 9437, 8453, + 6828, 9261, 236, 1646, 2971, 9539, 3535, 2506, 2597, + 2970, 9785, 1861, 7103, 1286, 1000, 3469, 315, 3221, + 8558, 2959, 2957, 7965, 5562, 4795, 8592, 264, 7999, + 8407, 7957, 1456, 1284, 5753, 340, 9594, 4789, 2049, + 2443, 8386, 9750, 2724, 1350, 7113, 7702, 9893, 426, + 7946, 8480, 6869, 2951, 6923, 3651, 4947, 76, 246, + 3064, 9432, 8890, 9830, 3454, 5349, 2099, 4006, 3982, + 1400, 6701, 8208, 6231, 6572, 7108, 169, 3663, 1024, + 3861, 1643, 3067, 7537, 5626, 9156, 2530, 1256, 9586, + 1939, 1349, 9610, 3932, 8683, 6507, 692, 9567, 4786, + 7294, 9227, 5955, 3765, 95, 7099, 9975, 1773, 2563, + 8275, 417, 4896, 2261, 6841, 8549, 5658, 7531, 1862, + 8554, 7945, 3708, 1599, 7281, 6945, 5282, 5274, 8382, + 9544, 6608, 5018, 9387, 8311, 4840, 5052, 5898, 3273, + 6811, 9335, 9665, 7323, 4598, 206, 8472, 1143, 5942, + 5752, 1091, 8080, 5491, 4152, 6519, 1287, 5968, 347, + 8460, 9287, 6222, 8593, 6122, 9463, 5973, 9569, 9864, + 8581, 7157, 1274, 7748, 9974, 703, 4340, 3457, 5165, + 636, 8506, 2388, 4697, 6336, 2157, 965, 2983, 4117, + 5342, 5035, 7690, 8511, 5702, 1730, 435, 2888, 3925, + 5632, 1145, 8990, 6840, 2995, 3673, 4357, 1488, 6718, + 3409, 5307, 9890, 3580, 194, 415, 75, 1173, 582, + 8512, 2152, 3651, 1736, 4406, 9709, 5688, 2037, 9202, + 616, 4398, 9978, 961, 4749, 6199, 2766, 420, 5891, + 2035, 4203, 6034, 9532, 4073, 3543, 9399, 5243, 1736, + 5205, 3639, 7636, 7945, 1076, 8052, 8786, 7516, 1783, + 592, 9245, 3631, 8018, 7142, 4227, 1771, 8550, 3919, + 40, 9025, 7922, 8752, 4829, 6233, 6992, 2147, 8671, + 1125, 9695, 1968, 1143, 899, 7218, 5368, 7349, 8299, + 4130, 2489, 1943, 8450, 8922, 2692, 6194, 2109, 3488, + 648, 128, 2619, 8466, 3510, 8010, 2563, 4452, 2449, + 3117, 3222, 8819, 6314, 2193, 4531, 9312, 6323, 8594, + 3258, 9109, 1332, 8418, 7949, 2357, 3619, 7354, 3313, + 61, 518, 2284, 2088, 1386, 2415, 3436, 7167, 5622, + 8607, 8065, 8996, 6833, 8899, 6018, 2523, 5619, 500, + 9741, 447, 3804, 906, 3011, 8029, 494, 31, 1574, + 8607, 3528, 2944, 1180, 3791, 1961, 4136, 1334, 7857, + 6620, 8883, 2382, 315, 5796, 2019, 5530, 8196, 2757, + 6981, 3617, 4634, 9758, 8925, 1556, 8556, 6115, 2436, + 456, 4547, 7030, 3590, 4197, 848, 7813, 2702, 1882, + 1803, 4494, 3216, 104, 2415, 973, 9057, 8136, 2617, + 9231, 6621, 2270, 5728, 4475, 2926, 1275, 5702, 6247, + 6804, 4756, 6356, 6362, 2856, 4487, 6258, 4333, 7608, + 5197, 8900, 2423, 8906, 4613, 4914, 7823, 7748, 1843, + 5989, 1493, 7321, 237, 2861, 4827, 700, 4808, 5985, + 9382, 3661, 2628, 1573, 4286, 4564, 6813, 6649, 2611, + 1727, 5253, 3700, 4473, 5116, 4990, 8314, 4529, 7271, + 3677, 1911, 1114, 6546, 6706, 5817, 4555, 8804, 7540, + 3808, 5203, 8535, 8025, 6109, 5300, 5651, 2490, 6109, + 6132, 3901, 8640, 7135, 8793, 5811, 9495, 4735, 1699, + 1439, 220, 2921, 4550, 4963, 9167, 1303, 244, 3665, + 7823, 7172, 8799, 8427, 6074, 1240, 3154, 9837, 5939, + 5156, 4695, 6283, 3037, 9601, 9741, 7245, 2434, 1037, + 8802, 6544, 3585, 8380, 1858, 9428, 7153, 8077, 6766, + 8197, 9547, 1973, 8258, 6508, 713, 5126, 1339, 2979, + 1166, 8047, 6847, 97, 8865, 3971, 3769, 8192, 8150, + 4789, 7871, 1531, 5674, 1575, 9808, 2800, 6837, 1264, + 6679, 5873, 3502, 1872, 8788, 1902, 7193, 7418, 5329, + 8259, 9419, 7793, 5540, 71, 6690, 4936, 794, 6462, + 2182, 6189, 1966, 9133, 7350, 871, 4346, 699, 3324, + 5869, 158, 5971, 2372, 694, 8303, 12, 2318, 1223, + 7659, 4487, 8019, 6915, 6927, 6928, 4811, 355, 3455, + 6747, 8341, 6371, 2538, 3410, 239, 3646, 5943, 6770, + 1495, 9724, 2133, 6260, 7065, 1361, 359, 4604, 3980, + 579, 6628, 3682, 1853, 6084, 9735, 8312, 1840, 4876, + 8912, 6208, 6859, 9839, 6928, 2236, 4449, 4135, 4776, + 6464, 2481, 4925, 9412, 3456, 2660, 9544, 6110, 6742, + 9061, 610, 5758, 1002, 4547, 8135, 324, 2486, 1852, + 8322, 5074, 4466, 826, 7401, 1853, 6438, 2899, 5836, + 427, 7933, 2044, 8055, 3683, 9649, 6408, 5387, 4196, + 3681, 7448, 3637, 8192, 6445, 6771, 10, 729, 9265, + 8381, 8151, 4992, 7160, 9680, 7981, 8403, 1251, 8075, + 8608, 9794, 3487, 7890, 6077, 7271, 18, 389, 9327, + 834, 2577, 4035, 1431, 7927, 9393, 643, 1226, 3816, + 5933]), + values=tensor([1.9375e-01, 3.5125e-01, 2.0768e-01, 2.6967e-01, + 6.5553e-01, 5.3217e-01, 3.3343e-01, 6.1229e-01, + 8.1898e-01, 5.0937e-01, 7.4774e-01, 5.1207e-01, + 8.0001e-01, 3.5041e-01, 6.7059e-01, 3.7672e-01, + 9.3472e-01, 9.3036e-01, 2.3990e-01, 8.7663e-01, + 4.6996e-01, 5.0217e-02, 8.4756e-01, 7.8975e-01, + 1.1314e-01, 4.4290e-01, 2.0680e-01, 9.2084e-02, + 3.5472e-01, 9.4380e-01, 7.9234e-01, 8.1876e-01, + 2.3511e-01, 7.6869e-01, 4.2133e-02, 1.0330e-01, + 1.7268e-01, 7.6007e-01, 3.8309e-03, 9.1460e-01, + 7.3496e-01, 4.9658e-01, 2.1053e-01, 4.7699e-01, + 5.0002e-01, 8.1406e-01, 5.4606e-01, 4.2159e-01, + 2.6297e-01, 2.4520e-01, 9.6103e-01, 7.7868e-01, + 7.6457e-01, 6.2481e-01, 4.5144e-01, 1.9487e-01, + 3.0538e-01, 3.2455e-02, 6.4771e-01, 8.8797e-01, + 9.3051e-01, 9.7039e-01, 9.7735e-01, 3.0647e-01, + 9.6008e-02, 6.3088e-01, 8.0975e-01, 5.3515e-01, + 8.0786e-01, 2.9740e-01, 2.4833e-01, 2.6441e-01, + 3.2550e-01, 2.1987e-01, 1.3930e-02, 1.7247e-01, + 6.8496e-01, 8.3630e-01, 7.5016e-01, 8.7730e-01, + 2.1744e-02, 6.2953e-01, 2.2457e-01, 2.0815e-01, + 1.6450e-01, 7.3002e-01, 4.9950e-01, 3.0029e-02, + 7.4701e-02, 6.2437e-01, 7.7434e-01, 3.8994e-01, + 1.0852e-01, 7.4616e-01, 4.7554e-01, 7.9127e-01, + 4.2656e-01, 7.7188e-01, 3.6412e-01, 2.5388e-01, + 9.3166e-01, 4.7512e-01, 3.1345e-01, 9.4634e-01, + 4.9501e-01, 2.5802e-01, 2.9731e-01, 4.5345e-01, + 4.5427e-01, 2.7223e-01, 3.0850e-01, 5.9089e-01, + 2.3642e-01, 7.7402e-01, 9.5831e-01, 4.5183e-01, + 4.4635e-01, 3.4103e-01, 1.9752e-01, 4.1907e-01, + 9.4180e-01, 8.3566e-01, 7.5022e-01, 3.4154e-01, + 8.8228e-01, 7.0899e-02, 1.2527e-01, 7.8104e-01, + 6.4557e-01, 3.4866e-01, 2.6732e-01, 6.2958e-01, + 1.1325e-01, 7.9593e-01, 6.9074e-01, 4.4629e-01, + 2.5885e-01, 7.2330e-01, 8.2253e-01, 9.8244e-01, + 7.9844e-01, 4.3702e-01, 9.6241e-01, 7.5657e-01, + 5.3837e-01, 1.8624e-01, 3.8968e-01, 3.4892e-01, + 7.0278e-01, 5.5261e-01, 4.0352e-01, 3.3248e-02, + 2.7925e-01, 1.5770e-01, 4.3930e-01, 1.5049e-01, + 4.8475e-01, 3.8261e-01, 5.0227e-01, 6.2634e-01, + 4.8135e-01, 9.1814e-01, 4.0382e-02, 9.3517e-01, + 5.1252e-01, 3.4347e-01, 6.1408e-01, 4.7550e-01, + 8.2509e-03, 7.1485e-01, 4.8130e-01, 1.2004e-01, + 3.3123e-01, 3.7004e-01, 4.8796e-01, 6.7218e-01, + 5.1642e-01, 2.9873e-01, 6.9024e-01, 1.8738e-01, + 6.7151e-01, 1.0462e-01, 2.4549e-01, 6.9732e-01, + 4.1806e-01, 4.7981e-02, 9.3086e-01, 4.1645e-02, + 3.4142e-01, 4.8380e-01, 4.8247e-01, 8.9969e-01, + 8.6323e-01, 9.7268e-01, 5.6983e-01, 1.3965e-02, + 5.9431e-01, 2.9727e-01, 1.6227e-02, 9.8379e-01, + 7.9038e-01, 8.8241e-01, 5.7714e-01, 3.3784e-02, + 1.0960e-01, 3.1070e-01, 5.3521e-01, 7.0199e-01, + 6.5999e-01, 2.7917e-01, 9.8173e-02, 6.2497e-01, + 9.5182e-01, 6.6789e-02, 1.5393e-02, 6.2254e-01, + 1.0971e-01, 8.8447e-01, 5.4323e-01, 2.1015e-01, + 6.6007e-01, 8.2753e-01, 3.6703e-01, 7.4051e-01, + 6.6966e-01, 9.7913e-01, 4.0712e-01, 3.4707e-01, + 3.8309e-01, 2.3070e-01, 2.2715e-01, 7.7305e-01, + 3.7610e-01, 3.4003e-01, 5.8650e-01, 5.8454e-01, + 3.8920e-01, 2.4417e-01, 2.4715e-02, 1.6482e-01, + 6.8219e-01, 4.0944e-01, 1.2251e-01, 8.3378e-01, + 2.8858e-01, 4.5315e-01, 3.7387e-01, 5.1960e-01, + 6.3346e-01, 5.5499e-02, 4.5719e-01, 8.2993e-01, + 8.1787e-01, 3.3558e-01, 2.6451e-01, 5.5224e-01, + 3.1647e-01, 7.8278e-01, 2.1696e-01, 6.5586e-01, + 2.1644e-01, 9.5937e-01, 7.6861e-01, 8.0010e-01, + 6.6128e-01, 1.2187e-01, 9.4748e-01, 9.0035e-01, + 7.1037e-01, 1.8546e-01, 7.1352e-01, 1.8524e-01, + 7.4925e-01, 2.6708e-01, 7.6244e-01, 4.1247e-01, + 1.4128e-01, 4.9211e-01, 3.7997e-01, 7.0780e-01, + 3.8386e-02, 6.6816e-01, 6.0148e-01, 9.0383e-01, + 7.2595e-01, 6.8359e-01, 2.9584e-01, 8.2414e-02, + 1.5712e-01, 4.9752e-01, 1.1561e-01, 2.6694e-01, + 7.7841e-01, 1.6152e-01, 9.8172e-01, 1.9818e-01, + 1.4269e-01, 8.9879e-02, 5.0250e-01, 5.7802e-01, + 6.6528e-01, 3.6557e-02, 1.2972e-01, 6.6852e-01, + 4.9575e-01, 8.1221e-01, 6.6098e-01, 8.1005e-02, + 3.3205e-01, 2.1959e-01, 7.2778e-01, 9.9336e-01, + 6.7973e-02, 8.8482e-01, 2.5671e-01, 8.5151e-01, + 1.4303e-01, 9.4333e-01, 1.3749e-01, 3.5676e-01, + 9.2539e-01, 3.5622e-01, 8.1502e-01, 9.8078e-01, + 3.7427e-01, 5.4771e-01, 4.4970e-01, 3.5472e-01, + 6.8737e-01, 8.9441e-01, 4.3924e-01, 9.0130e-01, + 7.3301e-01, 2.3730e-01, 1.4503e-01, 1.6294e-01, + 7.4797e-01, 3.0663e-01, 9.1635e-01, 2.2485e-01, + 8.1593e-01, 6.9641e-01, 5.8876e-01, 2.5397e-01, + 2.7194e-01, 2.3151e-01, 4.6512e-01, 3.3600e-01, + 1.2084e-01, 7.4145e-01, 8.1178e-01, 4.2986e-01, + 3.2717e-01, 8.9582e-01, 3.8058e-03, 1.1514e-01, + 2.6151e-01, 8.6861e-01, 3.9034e-01, 3.7204e-01, + 5.0550e-01, 2.5962e-01, 7.9080e-01, 7.3126e-01, + 1.9304e-01, 2.7548e-01, 4.0413e-01, 6.4024e-01, + 3.0537e-01, 6.4800e-02, 2.9508e-01, 4.4554e-01, + 7.4376e-01, 3.5356e-01, 1.2386e-01, 9.0646e-01, + 4.4257e-01, 9.9428e-01, 3.9815e-01, 1.0579e-01, + 5.3899e-01, 8.8053e-01, 4.1755e-01, 2.6286e-01, + 8.6265e-01, 3.5440e-01, 4.1767e-01, 4.3722e-01, + 2.2701e-01, 2.0119e-01, 2.0168e-01, 7.4481e-01, + 3.1458e-01, 9.1105e-01, 5.1549e-01, 4.9528e-01, + 7.2489e-01, 6.9358e-01, 9.4876e-01, 1.4933e-01, + 1.0757e-01, 7.4461e-01, 3.5457e-01, 2.9405e-01, + 3.7800e-01, 3.5033e-01, 2.4629e-01, 4.0123e-01, + 8.0207e-01, 3.0032e-01, 1.9983e-01, 3.7397e-01, + 1.0202e-01, 9.9367e-01, 7.3515e-01, 8.2278e-01, + 1.1699e-01, 7.4263e-02, 4.8947e-01, 7.3849e-01, + 1.1436e-01, 4.7523e-01, 7.7099e-01, 7.2400e-01, + 6.5547e-02, 5.7163e-01, 3.2211e-01, 6.7657e-01, + 5.1329e-02, 1.2576e-01, 8.5341e-01, 1.3857e-01, + 6.3257e-01, 4.3927e-01, 4.0713e-01, 7.1751e-01, + 8.1961e-01, 5.3252e-01, 3.3088e-01, 5.8053e-01, + 1.4918e-01, 8.0005e-01, 3.7335e-01, 9.6447e-02, + 1.7216e-01, 1.0099e-01, 1.5519e-01, 3.6774e-01, + 8.7242e-01, 4.4847e-01, 5.5891e-01, 3.0216e-01, + 8.5587e-01, 3.0503e-01, 4.5505e-01, 7.7470e-01, + 7.5445e-01, 7.0880e-01, 4.5444e-01, 9.8342e-01, + 9.9777e-01, 7.5503e-01, 9.8966e-01, 4.0026e-01, + 6.9843e-01, 1.9249e-03, 7.8839e-01, 1.3053e-01, + 1.3274e-01, 8.4923e-02, 4.2804e-01, 1.5293e-01, + 9.0170e-01, 4.9992e-01, 7.7254e-01, 7.4648e-01, + 6.2388e-01, 8.5497e-01, 8.2756e-01, 3.8686e-01, + 9.8151e-02, 5.4863e-01, 2.5265e-01, 7.3707e-01, + 2.0151e-01, 5.7024e-01, 2.8365e-01, 3.5556e-01, + 2.8799e-01, 1.3107e-02, 5.3341e-01, 3.5237e-01, + 8.8548e-01, 5.9289e-01, 5.4342e-01, 1.7633e-01, + 6.3070e-01, 5.3768e-01, 2.2342e-01, 3.3247e-01, + 6.9090e-01, 4.1567e-01, 1.1222e-01, 4.7518e-02, + 9.4288e-01, 9.5518e-01, 7.6795e-01, 5.0269e-01, + 3.6134e-01, 6.0356e-01, 8.3473e-01, 9.6104e-01, + 6.3423e-01, 1.6210e-02, 4.0034e-01, 9.7914e-01, + 3.9605e-02, 1.7596e-01, 5.7872e-01, 6.7935e-01, + 4.1868e-01, 8.3560e-01, 4.0556e-01, 4.8967e-02, + 2.2949e-01, 1.0615e-01, 5.7564e-01, 7.4653e-02, + 9.0806e-01, 6.0640e-01, 4.3840e-01, 7.2256e-01, + 2.0422e-01, 4.2972e-01, 5.8503e-01, 7.7506e-01, + 9.7508e-01, 9.4505e-01, 6.4491e-01, 2.1982e-01, + 2.7546e-01, 7.7442e-01, 5.0277e-01, 6.9720e-02, + 7.4204e-01, 6.0831e-01, 1.1658e-01, 5.0890e-01, + 1.1812e-01, 6.0273e-01, 8.2931e-01, 9.8180e-01, + 1.5799e-01, 9.8569e-01, 4.0228e-01, 4.0955e-01, + 4.7497e-02, 8.9661e-02, 4.9936e-01, 1.1148e-01, + 9.0756e-01, 4.0268e-01, 8.9360e-01, 2.2851e-01, + 2.0956e-01, 1.7882e-01, 2.9301e-01, 8.3386e-01, + 6.4286e-01, 9.9061e-01, 5.6455e-02, 7.0386e-01, + 1.2706e-01, 8.6843e-01, 1.9036e-01, 7.5009e-01, + 9.0547e-01, 3.5391e-02, 3.8624e-01, 1.1931e-01, + 9.4649e-01, 6.8202e-01, 9.5564e-01, 1.1891e-01, + 8.1752e-01, 4.3762e-01, 5.5711e-01, 2.5729e-01, + 3.5776e-01, 4.0330e-01, 2.2345e-01, 8.0837e-01, + 9.1913e-01, 5.1379e-01, 1.0360e-01, 8.8191e-01, + 6.7996e-01, 9.4555e-02, 6.2440e-01, 2.8075e-01, + 4.6693e-01, 5.4747e-01, 4.7497e-01, 8.5070e-02, + 5.6932e-02, 6.4648e-01, 1.2477e-03, 2.0721e-01, + 6.8915e-01, 8.7342e-01, 5.7258e-01, 8.6224e-01, + 5.7939e-01, 9.9515e-01, 2.0583e-01, 2.1518e-01, + 8.9509e-01, 8.0367e-01, 6.4277e-01, 5.0827e-01, + 2.5676e-02, 5.8560e-02, 4.3004e-01, 4.5788e-01, + 6.6471e-01, 3.9649e-01, 2.0012e-01, 4.3487e-01, + 9.0304e-01, 2.2243e-01, 3.0464e-01, 8.5268e-01, + 5.6222e-01, 2.4369e-01, 8.6137e-01, 7.8472e-02, + 9.1993e-02, 6.5703e-01, 2.8403e-01, 8.8040e-01, + 2.8392e-01, 7.7439e-01, 9.3284e-01, 2.0412e-01, + 4.0388e-01, 8.1648e-01, 2.1973e-01, 5.7573e-01, + 9.2892e-02, 7.5088e-01, 3.0786e-01, 1.8024e-01, + 6.5532e-01, 6.1457e-02, 2.1697e-01, 1.0836e-01, + 3.9118e-01, 4.9190e-01, 9.4466e-01, 2.1653e-01, + 2.9599e-01, 7.5911e-01, 4.5813e-03, 2.5706e-01, + 2.5307e-01, 2.9388e-01, 5.5978e-01, 8.8650e-01, + 4.8734e-01, 4.7580e-01, 9.1478e-01, 1.2022e-01, + 8.6153e-01, 4.8555e-01, 8.7075e-01, 8.6253e-01, + 1.4431e-01, 3.4625e-01, 2.9820e-01, 3.7020e-01, + 7.2331e-01, 5.3627e-01, 3.3997e-01, 4.2166e-01, + 3.2381e-01, 3.9761e-01, 7.5745e-01, 6.5813e-01, + 6.4982e-01, 2.8336e-01, 5.7049e-01, 7.2874e-01, + 4.0575e-01, 8.7412e-02, 8.6145e-01, 4.1196e-01, + 8.0483e-01, 1.8391e-01, 5.7371e-02, 8.4034e-01, + 5.2513e-01, 6.0727e-01, 1.5458e-01, 2.0643e-01, + 5.1326e-01, 6.6117e-01, 9.5947e-01, 7.8681e-01, + 6.7698e-01, 9.6081e-01, 4.5145e-01, 6.0458e-01, + 5.2329e-01, 4.8816e-01, 6.4831e-01, 5.4729e-01, + 5.0501e-01, 3.9862e-03, 4.3458e-01, 8.2356e-02, + 4.4194e-01, 4.6046e-01, 8.6119e-01, 7.2362e-01, + 3.3762e-01, 5.7168e-01, 3.7204e-01, 4.6616e-01, + 2.8453e-01, 8.4184e-01, 9.1507e-01, 7.3000e-01, + 2.3497e-01, 4.7039e-01, 3.0436e-01, 7.3392e-01, + 2.0230e-01, 7.5799e-01, 8.0132e-01, 2.6207e-01, + 4.1549e-01, 4.5825e-01, 3.5712e-04, 8.6380e-01, + 2.5643e-01, 1.4106e-01, 4.6769e-01, 1.6319e-01, + 4.2617e-01, 6.8278e-01, 9.9952e-01, 7.2726e-01, + 5.3935e-01, 7.4477e-01, 6.4322e-01, 3.4132e-01, + 9.1885e-01, 8.9713e-01, 7.1047e-01, 6.7622e-01, + 5.4440e-01, 6.6700e-01, 5.7860e-01, 2.7002e-02, + 4.8064e-01, 4.6679e-01, 3.5471e-01, 7.7442e-01, + 1.8786e-01, 7.3381e-01, 5.6379e-01, 1.0770e-01, + 3.8089e-01, 4.8584e-01, 9.5384e-01, 8.6167e-01, + 2.0388e-02, 5.9740e-01, 6.5420e-01, 9.4518e-01, + 6.0007e-01, 9.4668e-01, 2.4456e-01, 1.9964e-01, + 7.2138e-01, 2.1021e-01, 4.1505e-02, 7.3214e-01, + 5.7585e-01, 2.6983e-01, 2.1348e-01, 2.7068e-01, + 6.3100e-02, 4.9250e-01, 2.3708e-01, 6.1793e-01, + 1.5893e-01, 5.2076e-01, 6.7793e-01, 7.4495e-01, + 4.5389e-02, 3.7724e-02, 2.2370e-01, 3.9055e-01, + 1.2615e-01, 3.4002e-01, 3.5834e-01, 7.8782e-02, + 4.9173e-01, 4.1157e-01, 6.6417e-01, 6.4484e-01, + 5.0083e-01, 4.2959e-01, 9.8685e-01, 3.5260e-01, + 4.2792e-02, 1.3207e-01, 9.1311e-01, 9.6641e-01, + 8.9353e-01, 7.3994e-01, 9.4765e-01, 6.5472e-01, + 1.5391e-01, 8.8154e-01, 6.2777e-01, 1.8012e-02, + 7.1259e-01, 6.7197e-01, 7.7261e-01, 4.6110e-01, + 6.2714e-01, 9.6933e-01, 6.1243e-01, 6.0628e-01, + 7.6382e-01, 9.6854e-01, 5.1705e-01, 8.5856e-01, + 3.1705e-01, 7.2350e-01, 9.4389e-02, 2.9574e-01, + 9.1463e-01, 2.0940e-01, 1.7609e-01, 7.3978e-01, + 9.2575e-01, 6.8811e-01, 1.6098e-01, 6.4482e-02, + 2.5061e-02, 5.2998e-01, 9.8449e-01, 5.4429e-01, + 6.8862e-01, 9.4297e-01, 7.7867e-01, 1.7015e-01, + 7.0622e-01, 3.6348e-01, 3.8057e-01, 7.0215e-01, + 8.9386e-01, 7.3907e-03, 6.8824e-01, 7.3556e-01, + 2.5358e-01, 6.8417e-01, 3.7956e-01, 4.3455e-01, + 3.0873e-01, 4.7848e-01, 8.7651e-01, 5.9179e-01, + 2.4253e-01, 9.6398e-01, 4.9589e-01, 9.5203e-01, + 9.5415e-01, 8.6776e-01, 1.1685e-01, 3.3088e-01, + 7.7341e-01, 8.3175e-01, 5.7462e-01, 1.3990e-01, + 9.0461e-01, 6.0410e-01, 8.4851e-01, 9.6942e-01, + 8.4864e-01, 6.3279e-01, 9.6144e-01, 1.5080e-01, + 8.9336e-02, 9.6933e-01, 4.5647e-01, 7.3363e-01, + 3.9520e-01, 5.6769e-01, 1.2489e-01, 5.1997e-01, + 1.6970e-01, 5.2122e-02, 4.9514e-01, 5.7753e-01, + 3.1179e-01, 8.2135e-01, 3.0969e-01, 1.9110e-01, + 9.3857e-02, 5.3521e-01, 3.5248e-01, 6.2581e-01, + 9.7808e-01, 5.1285e-01, 9.7337e-01, 2.5133e-01, + 4.4027e-01, 4.3065e-01, 2.5723e-01, 1.2854e-01, + 9.8939e-02, 9.0984e-01, 8.7231e-01, 9.3467e-01, + 7.2945e-01, 3.0576e-01, 1.3236e-01, 7.1361e-02, + 3.9339e-01, 3.1714e-01, 3.2872e-01, 5.1748e-01, + 5.5217e-01, 4.1788e-01, 7.8429e-01, 6.7386e-02, + 7.7600e-01, 4.0606e-01, 6.8449e-01, 5.7668e-02, + 9.0049e-01, 8.6218e-01, 3.3053e-01, 7.6311e-01, + 5.8454e-01, 1.8191e-01, 9.8940e-01, 1.1427e-02, + 6.7147e-01, 3.5037e-01, 8.0766e-01, 9.2500e-01, + 1.0255e-01, 9.5627e-01, 4.2546e-02, 1.7540e-01, + 5.4745e-01, 5.9252e-01, 1.4245e-01, 4.0475e-01, + 9.8581e-01, 3.8861e-01, 8.0536e-01, 6.9424e-01, + 6.3616e-01, 7.9450e-01, 3.0102e-01, 4.6604e-01, + 4.0082e-01, 7.9423e-01, 6.0621e-02, 7.6039e-01, + 2.8130e-01, 7.6283e-01, 8.3019e-01, 4.7440e-01, + 9.3373e-01, 2.3127e-01, 9.8599e-01, 1.0451e-01, + 4.4318e-01, 4.3340e-01, 1.2718e-01, 6.7560e-01, + 8.0438e-01, 2.4075e-01, 5.0321e-01, 2.8248e-01, + 6.0269e-01, 1.4597e-01, 1.3511e-01, 1.7491e-01, + 8.6251e-01, 4.5483e-01, 7.5964e-01, 2.8131e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.3721, 0.5043, 0.5568, ..., 0.8647, 0.9880, 0.8941]) +tensor([0.4226, 0.0556, 0.1398, ..., 0.5751, 0.9814, 0.4838]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -1402,13 +1402,13 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 10.363084554672241 seconds +Time: 10.681929588317871 seconds -[39.17, 38.6, 39.23, 38.35, 39.37, 38.46, 39.25, 43.56, 39.01, 38.43] -[94.69] -12.925498723983765 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 352057, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.363084554672241, 'TIME_S_1KI': 0.029435814526262056, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1223.9154741740226, 'W': 94.69} -[39.17, 38.6, 39.23, 38.35, 39.37, 38.46, 39.25, 43.56, 39.01, 38.43, 39.13, 39.47, 39.53, 39.29, 44.98, 39.37, 38.9, 38.56, 38.54, 39.56] -712.615 -35.63075 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 352057, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.363084554672241, 'TIME_S_1KI': 0.029435814526262056, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1223.9154741740226, 'W': 94.69, 'J_1KI': 3.4764696460346554, 'W_1KI': 0.2689621282917255, 'W_D': 59.05925, 'J_D': 763.3702605144381, 'W_D_1KI': 0.1677547953882468, 'J_D_1KI': 0.0004764989629186376} +[41.15, 38.98, 39.75, 38.85, 39.88, 39.03, 39.18, 38.82, 39.8, 38.85] +[96.13] +13.135187864303589 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 362169, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.681929588317871, 'TIME_S_1KI': 0.029494323336116207, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1262.685609395504, 'W': 96.13} +[41.15, 38.98, 39.75, 38.85, 39.88, 39.03, 39.18, 38.82, 39.8, 38.85, 40.18, 39.93, 38.85, 39.83, 38.86, 39.3, 38.99, 39.61, 39.19, 46.97] +712.425 +35.621249999999996 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 362169, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.681929588317871, 'TIME_S_1KI': 0.029494323336116207, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1262.685609395504, 'W': 96.13, 'J_1KI': 3.4864541399056903, 'W_1KI': 0.2654285706396737, 'W_D': 60.50875, 'J_D': 794.7937986841798, 'W_D_1KI': 0.1670732448111241, 'J_D_1KI': 0.0004613129362566208} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_500000_1e-05.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_500000_1e-05.json index 9ddd17e..c41b46b 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_500000_1e-05.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_500000_1e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 21395, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.323282241821289, "TIME_S_1KI": 0.482509102211792, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2021.2716293263436, "W": 152.47, "J_1KI": 94.47401866447038, "W_1KI": 7.1264314092077585, "W_D": 115.044, "J_D": 1525.1208324537276, "W_D_1KI": 5.377144192568356, "J_D_1KI": 0.2513271415082195} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 21272, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.296250820159912, "TIME_S_1KI": 0.4840283386686683, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2004.567332353592, "W": 151.77, "J_1KI": 94.23501938480594, "W_1KI": 7.134731101918015, "W_D": 115.36950000000002, "J_D": 1523.7921252551082, "W_D_1KI": 5.423537984204589, "J_D_1KI": 0.2549613569107084} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_500000_1e-05.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_500000_1e-05.output index 50f6332..ff821c9 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_500000_1e-05.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_500000_1e-05.output @@ -1,15 +1,15 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '500000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.532757043838501} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.5370402336120605} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 8, ..., 2499994, +tensor(crow_indices=tensor([ 0, 4, 10, ..., 2499988, 2499995, 2500000]), - col_indices=tensor([298854, 299868, 317882, ..., 208197, 239895, - 321556]), - values=tensor([0.0947, 0.1899, 0.7776, ..., 0.8480, 0.0740, 0.2913]), + col_indices=tensor([ 667, 84326, 231414, ..., 445492, 452435, + 478533]), + values=tensor([0.3723, 0.9059, 0.5582, ..., 0.5128, 0.0660, 0.1881]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.2732, 0.7262, 0.3001, ..., 0.8229, 0.3388, 0.7233]) +tensor([0.0315, 0.2189, 0.8055, ..., 0.9902, 0.0196, 0.5860]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -17,20 +17,20 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 0.532757043838501 seconds +Time: 0.5370402336120605 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '19708', '-ss', '500000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.672011375427246} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '19551', '-ss', '500000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.650388717651367} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 7, ..., 2499994, - 2500000, 2500000]), - col_indices=tensor([ 49185, 277910, 351023, ..., 230263, 378248, - 487183]), - values=tensor([0.7966, 0.8451, 0.5460, ..., 0.3570, 0.2848, 0.9857]), +tensor(crow_indices=tensor([ 0, 4, 9, ..., 2499988, + 2499994, 2500000]), + col_indices=tensor([ 11262, 76750, 152870, ..., 221537, 283064, + 452441]), + values=tensor([0.8111, 0.5495, 0.0260, ..., 0.8118, 0.4893, 0.3789]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.8196, 0.2368, 0.8865, ..., 0.6520, 0.2281, 0.7931]) +tensor([0.5436, 0.8281, 0.7063, ..., 0.1699, 0.2640, 0.5110]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -38,20 +38,20 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 9.672011375427246 seconds +Time: 9.650388717651367 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '21395', '-ss', '500000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.323282241821289} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '21272', '-ss', '500000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.296250820159912} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 8, ..., 2499995, +tensor(crow_indices=tensor([ 0, 6, 8, ..., 2499993, 2499998, 2500000]), - col_indices=tensor([ 50735, 77236, 160897, ..., 492852, 393041, - 457835]), - values=tensor([0.2461, 0.0110, 0.8932, ..., 0.0580, 0.2778, 0.4102]), + col_indices=tensor([ 13054, 157067, 258216, ..., 445117, 194165, + 431781]), + values=tensor([0.8472, 0.4724, 0.5562, ..., 0.8941, 0.8667, 0.3682]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.4625, 0.6924, 0.9316, ..., 0.4127, 0.3248, 0.5422]) +tensor([0.2043, 0.9144, 0.3718, ..., 0.9024, 0.4544, 0.2083]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -59,17 +59,17 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 10.323282241821289 seconds +Time: 10.296250820159912 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 8, ..., 2499995, +tensor(crow_indices=tensor([ 0, 6, 8, ..., 2499993, 2499998, 2500000]), - col_indices=tensor([ 50735, 77236, 160897, ..., 492852, 393041, - 457835]), - values=tensor([0.2461, 0.0110, 0.8932, ..., 0.0580, 0.2778, 0.4102]), + col_indices=tensor([ 13054, 157067, 258216, ..., 445117, 194165, + 431781]), + values=tensor([0.8472, 0.4724, 0.5562, ..., 0.8941, 0.8667, 0.3682]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.4625, 0.6924, 0.9316, ..., 0.4127, 0.3248, 0.5422]) +tensor([0.2043, 0.9144, 0.3718, ..., 0.9024, 0.4544, 0.2083]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -77,13 +77,13 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 10.323282241821289 seconds +Time: 10.296250820159912 seconds -[41.04, 39.37, 40.41, 40.48, 45.25, 40.28, 40.5, 39.5, 40.46, 39.64] -[152.47] -13.256848096847534 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 21395, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.323282241821289, 'TIME_S_1KI': 0.482509102211792, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2021.2716293263436, 'W': 152.47} -[41.04, 39.37, 40.41, 40.48, 45.25, 40.28, 40.5, 39.5, 40.46, 39.64, 42.68, 45.88, 39.43, 39.56, 39.31, 40.35, 46.4, 49.8, 39.76, 40.2] -748.52 -37.426 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 21395, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.323282241821289, 'TIME_S_1KI': 0.482509102211792, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2021.2716293263436, 'W': 152.47, 'J_1KI': 94.47401866447038, 'W_1KI': 7.1264314092077585, 'W_D': 115.044, 'J_D': 1525.1208324537276, 'W_D_1KI': 5.377144192568356, 'J_D_1KI': 0.2513271415082195} +[40.04, 40.36, 39.54, 40.31, 47.07, 40.26, 39.56, 39.95, 39.57, 41.2] +[151.77] +13.207928657531738 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 21272, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.296250820159912, 'TIME_S_1KI': 0.4840283386686683, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2004.567332353592, 'W': 151.77} +[40.04, 40.36, 39.54, 40.31, 47.07, 40.26, 39.56, 39.95, 39.57, 41.2, 46.29, 39.48, 40.21, 39.42, 40.34, 39.68, 39.47, 39.18, 40.2, 39.29] +728.01 +36.4005 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 21272, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.296250820159912, 'TIME_S_1KI': 0.4840283386686683, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2004.567332353592, 'W': 151.77, 'J_1KI': 94.23501938480594, 'W_1KI': 7.134731101918015, 'W_D': 115.36950000000002, 'J_D': 1523.7921252551082, 'W_D_1KI': 5.423537984204589, 'J_D_1KI': 0.2549613569107084} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.0001.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.0001.json index f0cbadb..8fb8b3c 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.0001.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.0001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 97887, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.967289686203003, "TIME_S_1KI": 0.11204030858237563, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1548.6584901952745, "W": 117.34, "J_1KI": 15.820880098432626, "W_1KI": 1.1987291468734356, "W_D": 81.498, "J_D": 1075.6141949372293, "W_D_1KI": 0.8325722516779552, "J_D_1KI": 0.008505442517167297} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 91738, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.729677200317383, "TIME_S_1KI": 0.1169600078518976, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1421.8676947784425, "W": 116.68, "J_1KI": 15.49922272971334, "W_1KI": 1.2718829710697859, "W_D": 81.037, "J_D": 987.5205037860871, "W_D_1KI": 0.883352591074582, "J_D_1KI": 0.009629080545407377} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.0001.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.0001.output index a9ce226..e8b4fe3 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.0001.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.0001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '50000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.1396017074584961} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.13608026504516602} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 11, ..., 249990, 249993, +tensor(crow_indices=tensor([ 0, 3, 12, ..., 249989, 249998, 250000]), - col_indices=tensor([ 1901, 17696, 37644, ..., 22666, 31352, 38471]), - values=tensor([0.6079, 0.0811, 0.7282, ..., 0.2667, 0.3886, 0.6657]), + col_indices=tensor([17323, 35611, 42973, ..., 47252, 2994, 12259]), + values=tensor([0.7287, 0.3464, 0.0193, ..., 0.7636, 0.2298, 0.3699]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.5204, 0.6126, 0.8277, ..., 0.7159, 0.4461, 0.9246]) +tensor([0.4030, 0.5063, 0.1399, ..., 0.2219, 0.6631, 0.1030]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 0.1396017074584961 seconds +Time: 0.13608026504516602 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '75213', '-ss', '50000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 8.06783390045166} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '77160', '-ss', '50000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.564647197723389} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 10, ..., 249992, 249996, +tensor(crow_indices=tensor([ 0, 5, 10, ..., 249992, 249997, 250000]), - col_indices=tensor([ 3649, 15078, 16220, ..., 32895, 36388, 49599]), - values=tensor([0.6393, 0.2992, 0.9532, ..., 0.0270, 0.3430, 0.6378]), + col_indices=tensor([ 7731, 9587, 38710, ..., 32177, 32664, 36235]), + values=tensor([0.0671, 0.3654, 0.2011, ..., 0.4377, 0.9797, 0.5456]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.0844, 0.1224, 0.7905, ..., 0.3661, 0.3101, 0.4173]) +tensor([0.4354, 0.6450, 0.5949, ..., 0.4585, 0.1162, 0.0017]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,19 +36,19 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 8.06783390045166 seconds +Time: 9.564647197723389 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '97887', '-ss', '50000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.967289686203003} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '84705', '-ss', '50000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.694962739944458} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 14, ..., 249992, 249996, +tensor(crow_indices=tensor([ 0, 4, 11, ..., 249991, 249993, 250000]), - col_indices=tensor([ 9116, 23500, 25241, ..., 7305, 15035, 46474]), - values=tensor([0.8636, 0.6633, 0.2645, ..., 0.7208, 0.8992, 0.1134]), + col_indices=tensor([19445, 22750, 27321, ..., 31731, 39710, 46259]), + values=tensor([0.4009, 0.2006, 0.6920, ..., 0.2884, 0.6470, 0.2171]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.3603, 0.4772, 0.1653, ..., 0.3951, 0.3400, 0.6722]) +tensor([0.3109, 0.8999, 0.0558, ..., 0.1822, 0.8563, 0.0744]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -56,16 +56,19 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 10.967289686203003 seconds +Time: 9.694962739944458 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '91738', '-ss', '50000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.729677200317383} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 14, ..., 249992, 249996, +tensor(crow_indices=tensor([ 0, 4, 7, ..., 249990, 249995, 250000]), - col_indices=tensor([ 9116, 23500, 25241, ..., 7305, 15035, 46474]), - values=tensor([0.8636, 0.6633, 0.2645, ..., 0.7208, 0.8992, 0.1134]), + col_indices=tensor([20378, 29361, 44885, ..., 25194, 39048, 45113]), + values=tensor([0.6839, 0.7204, 0.3118, ..., 0.2854, 0.8671, 0.0496]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.3603, 0.4772, 0.1653, ..., 0.3951, 0.3400, 0.6722]) +tensor([0.2159, 0.7026, 0.3184, ..., 0.1135, 0.4559, 0.6374]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -73,13 +76,30 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 10.967289686203003 seconds +Time: 10.729677200317383 seconds -[40.69, 39.7, 40.28, 39.02, 40.08, 39.07, 39.32, 38.93, 40.06, 39.15] -[117.34] -13.198044061660767 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 97887, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.967289686203003, 'TIME_S_1KI': 0.11204030858237563, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1548.6584901952745, 'W': 117.34} -[40.69, 39.7, 40.28, 39.02, 40.08, 39.07, 39.32, 38.93, 40.06, 39.15, 49.08, 39.91, 39.13, 39.81, 39.05, 39.67, 39.05, 40.19, 39.16, 39.9] -716.84 -35.842 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 97887, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.967289686203003, 'TIME_S_1KI': 0.11204030858237563, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1548.6584901952745, 'W': 117.34, 'J_1KI': 15.820880098432626, 'W_1KI': 1.1987291468734356, 'W_D': 81.498, 'J_D': 1075.6141949372293, 'W_D_1KI': 0.8325722516779552, 'J_D_1KI': 0.008505442517167297} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 7, ..., 249990, 249995, + 250000]), + col_indices=tensor([20378, 29361, 44885, ..., 25194, 39048, 45113]), + values=tensor([0.6839, 0.7204, 0.3118, ..., 0.2854, 0.8671, 0.0496]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.2159, 0.7026, 0.3184, ..., 0.1135, 0.4559, 0.6374]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 10.729677200317383 seconds + +[40.36, 39.52, 40.11, 39.22, 40.19, 39.14, 40.18, 39.47, 39.42, 39.16] +[116.68] +12.186044692993164 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 91738, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.729677200317383, 'TIME_S_1KI': 0.1169600078518976, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1421.8676947784425, 'W': 116.68} +[40.36, 39.52, 40.11, 39.22, 40.19, 39.14, 40.18, 39.47, 39.42, 39.16, 39.82, 39.18, 40.07, 39.1, 40.14, 39.1, 40.11, 39.11, 39.63, 39.0] +712.86 +35.643 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 91738, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.729677200317383, 'TIME_S_1KI': 0.1169600078518976, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1421.8676947784425, 'W': 116.68, 'J_1KI': 15.49922272971334, 'W_1KI': 1.2718829710697859, 'W_D': 81.037, 'J_D': 987.5205037860871, 'W_D_1KI': 0.883352591074582, 'J_D_1KI': 0.009629080545407377} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.001.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.001.json index dcf99f8..18b313c 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.001.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 47277, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.432827234268188, "TIME_S_1KI": 0.2206744766856651, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1956.9828476619723, "W": 146.36, "J_1KI": 41.393972706854754, "W_1KI": 3.095797110645769, "W_D": 110.02925000000002, "J_D": 1471.2035733199718, "W_D_1KI": 2.3273314719631117, "J_D_1KI": 0.04922756249260976} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 46932, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.4467294216156, "TIME_S_1KI": 0.22259288804260632, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1943.0940554380418, "W": 146.55, "J_1KI": 41.40232795188873, "W_1KI": 3.122602914855536, "W_D": 110.75150000000002, "J_D": 1468.4447716195587, "W_D_1KI": 2.3598291144634795, "J_D_1KI": 0.05028187834448734} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.001.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.001.output index c946bac..c54c9fb 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.001.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '50000', '-sd', '0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.2981231212615967} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.2965991497039795} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 43, 103, ..., 2499901, - 2499951, 2500000]), - col_indices=tensor([ 154, 1105, 2164, ..., 43048, 45641, 46786]), - values=tensor([0.5353, 0.9585, 0.2831, ..., 0.0513, 0.1909, 0.0614]), +tensor(crow_indices=tensor([ 0, 49, 105, ..., 2499896, + 2499948, 2500000]), + col_indices=tensor([ 1888, 3456, 5299, ..., 45108, 48153, 49689]), + values=tensor([0.2133, 0.4832, 0.5162, ..., 0.1550, 0.2104, 0.0398]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.3993, 0.6905, 0.7348, ..., 0.6851, 0.9182, 0.5409]) +tensor([0.8558, 0.3690, 0.3196, ..., 0.7609, 0.2901, 0.1393]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,39 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 0.2981231212615967 seconds +Time: 0.2965991497039795 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '35220', '-ss', '50000', '-sd', '0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 7.822157621383667} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '35401', '-ss', '50000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 7.9200310707092285} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 48, 100, ..., 2499912, - 2499953, 2500000]), - col_indices=tensor([ 120, 161, 363, ..., 47642, 48044, 49939]), - values=tensor([0.7949, 0.8676, 0.3054, ..., 0.9459, 0.0848, 0.8977]), - size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.4844, 0.7866, 0.3385, ..., 0.0837, 0.3382, 0.6328]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 2500000 -Density: 0.001 -Time: 7.822157621383667 seconds - -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '47277', '-ss', '50000', '-sd', '0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.432827234268188} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 62, 109, ..., 2499897, +tensor(crow_indices=tensor([ 0, 50, 98, ..., 2499887, 2499942, 2500000]), - col_indices=tensor([ 2040, 2609, 3779, ..., 46933, 47654, 47998]), - values=tensor([0.9101, 0.3119, 0.8580, ..., 0.1192, 0.4361, 0.9803]), + col_indices=tensor([ 1341, 6881, 6901, ..., 49243, 49539, 49603]), + values=tensor([0.6621, 0.7599, 0.1509, ..., 0.9636, 0.0388, 0.7851]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.1061, 0.6227, 0.1589, ..., 0.5507, 0.9975, 0.5119]) +tensor([0.4875, 0.8207, 0.8190, ..., 0.4243, 0.1238, 0.4257]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -56,16 +36,19 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 10.432827234268188 seconds +Time: 7.9200310707092285 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '46932', '-ss', '50000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.4467294216156} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 62, 109, ..., 2499897, +tensor(crow_indices=tensor([ 0, 37, 82, ..., 2499888, 2499942, 2500000]), - col_indices=tensor([ 2040, 2609, 3779, ..., 46933, 47654, 47998]), - values=tensor([0.9101, 0.3119, 0.8580, ..., 0.1192, 0.4361, 0.9803]), + col_indices=tensor([ 2117, 2189, 2263, ..., 47568, 48115, 49415]), + values=tensor([0.8006, 0.3321, 0.7026, ..., 0.2322, 0.3552, 0.1894]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.1061, 0.6227, 0.1589, ..., 0.5507, 0.9975, 0.5119]) +tensor([0.9529, 0.8532, 0.0899, ..., 0.0711, 0.7399, 0.8898]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -73,13 +56,30 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 10.432827234268188 seconds +Time: 10.4467294216156 seconds -[40.03, 40.19, 39.31, 40.07, 40.41, 39.74, 44.15, 41.27, 39.36, 40.06] -[146.36] -13.371022462844849 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 47277, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.432827234268188, 'TIME_S_1KI': 0.2206744766856651, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1956.9828476619723, 'W': 146.36} -[40.03, 40.19, 39.31, 40.07, 40.41, 39.74, 44.15, 41.27, 39.36, 40.06, 40.74, 39.25, 45.03, 39.17, 39.46, 39.16, 40.2, 39.66, 39.96, 39.62] -726.615 -36.33075 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 47277, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.432827234268188, 'TIME_S_1KI': 0.2206744766856651, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1956.9828476619723, 'W': 146.36, 'J_1KI': 41.393972706854754, 'W_1KI': 3.095797110645769, 'W_D': 110.02925000000002, 'J_D': 1471.2035733199718, 'W_D_1KI': 2.3273314719631117, 'J_D_1KI': 0.04922756249260976} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 37, 82, ..., 2499888, + 2499942, 2500000]), + col_indices=tensor([ 2117, 2189, 2263, ..., 47568, 48115, 49415]), + values=tensor([0.8006, 0.3321, 0.7026, ..., 0.2322, 0.3552, 0.1894]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.9529, 0.8532, 0.0899, ..., 0.0711, 0.7399, 0.8898]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 10.4467294216156 seconds + +[40.61, 39.27, 40.33, 39.32, 40.41, 39.18, 40.21, 39.47, 39.59, 40.71] +[146.55] +13.258915424346924 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 46932, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.4467294216156, 'TIME_S_1KI': 0.22259288804260632, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1943.0940554380418, 'W': 146.55} +[40.61, 39.27, 40.33, 39.32, 40.41, 39.18, 40.21, 39.47, 39.59, 40.71, 40.22, 40.14, 39.29, 40.17, 39.21, 40.37, 39.38, 39.54, 39.32, 40.0] +715.9699999999999 +35.7985 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 46932, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.4467294216156, 'TIME_S_1KI': 0.22259288804260632, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1943.0940554380418, 'W': 146.55, 'J_1KI': 41.40232795188873, 'W_1KI': 3.122602914855536, 'W_D': 110.75150000000002, 'J_D': 1468.4447716195587, 'W_D_1KI': 2.3598291144634795, 'J_D_1KI': 0.05028187834448734} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_1e-05.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_1e-05.json index a40aca5..a53c562 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_1e-05.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_1e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 129830, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.498366355895996, "TIME_S_1KI": 0.08086240742429328, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1343.8986691188811, "W": 102.66, "J_1KI": 10.351218278663492, "W_1KI": 0.7907263344373412, "W_D": 67.04849999999999, "J_D": 877.7166366298197, "W_D_1KI": 0.5164330278055919, "J_D_1KI": 0.003977763443006947} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 132622, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.695917844772339, "TIME_S_1KI": 0.08064964971703291, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1381.936604347229, "W": 102.52, "J_1KI": 10.420115850667528, "W_1KI": 0.7730240834853945, "W_D": 66.90350000000001, "J_D": 901.8376473755837, "W_D_1KI": 0.5044675845636472, "J_D_1KI": 0.0038038001580706607} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_1e-05.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_1e-05.output index 7b7017b..89e9f79 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_1e-05.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_1e-05.output @@ -1,13 +1,32 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '50000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.11773824691772461} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.13474559783935547} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 24998, 24999, 25000]), + col_indices=tensor([43476, 3093, 41733, ..., 42921, 16006, 37299]), + values=tensor([0.8834, 0.6775, 0.5620, ..., 0.7889, 0.3307, 0.4663]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.1655, 0.9515, 0.3152, ..., 0.5133, 0.8067, 0.9282]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 0.13474559783935547 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '77924', '-ss', '50000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 6.767163991928101} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 1, 1, ..., 25000, 25000, 25000]), - col_indices=tensor([16845, 2751, 33930, ..., 33536, 38018, 30474]), - values=tensor([0.6858, 0.5470, 0.3190, ..., 0.3110, 0.3011, 0.6040]), + col_indices=tensor([35071, 44060, 31911, ..., 37021, 35082, 17458]), + values=tensor([0.6370, 0.7388, 0.5924, ..., 0.3636, 0.5677, 0.2522]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.7348, 0.5937, 0.8612, ..., 0.8920, 0.9109, 0.1161]) +tensor([0.8033, 0.0482, 0.8958, ..., 0.4016, 0.2560, 0.2344]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -15,18 +34,18 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 0.11773824691772461 seconds +Time: 6.767163991928101 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '89180', '-ss', '50000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 7.942249059677124} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '120907', '-ss', '50000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.572461605072021} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 2, ..., 25000, 25000, 25000]), - col_indices=tensor([ 5133, 25494, 8495, ..., 18153, 14682, 27268]), - values=tensor([0.7177, 0.6433, 0.0497, ..., 0.6766, 0.5365, 0.3286]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 24999, 25000, 25000]), + col_indices=tensor([ 3082, 46101, 46713, ..., 40768, 36655, 17054]), + values=tensor([0.2693, 0.1416, 0.6603, ..., 0.5561, 0.2474, 0.5454]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.6426, 0.1118, 0.3197, ..., 0.9296, 0.1873, 0.3702]) +tensor([0.5277, 0.5906, 0.6144, ..., 0.6636, 0.4334, 0.5688]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -34,19 +53,18 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 7.942249059677124 seconds +Time: 9.572461605072021 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '117899', '-ss', '50000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.535074234008789} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '132622', '-ss', '50000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.695917844772339} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 24998, 24998, 25000]), - col_indices=tensor([ 1468, 1704, 43281, ..., 3197, 24132, 30286]), - values=tensor([1.4228e-01, 5.9740e-01, 9.5210e-06, ..., - 2.4125e-01, 6.2955e-01, 4.9169e-01]), +tensor(crow_indices=tensor([ 0, 1, 2, ..., 24998, 24998, 25000]), + col_indices=tensor([ 1978, 29423, 7022, ..., 46456, 14629, 46564]), + values=tensor([0.3729, 0.4306, 0.6677, ..., 0.7805, 0.6392, 0.2909]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.8788, 0.8743, 0.0964, ..., 0.0391, 0.4204, 0.2909]) +tensor([0.9195, 0.7845, 0.1112, ..., 0.9886, 0.0043, 0.8706]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -54,18 +72,15 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 9.535074234008789 seconds - -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '129830', '-ss', '50000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.498366355895996} +Time: 10.695917844772339 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 25000, 25000, 25000]), - col_indices=tensor([16477, 45779, 22583, ..., 30055, 21515, 45820]), - values=tensor([0.8893, 0.7790, 0.5329, ..., 0.5529, 0.2667, 0.0404]), +tensor(crow_indices=tensor([ 0, 1, 2, ..., 24998, 24998, 25000]), + col_indices=tensor([ 1978, 29423, 7022, ..., 46456, 14629, 46564]), + values=tensor([0.3729, 0.4306, 0.6677, ..., 0.7805, 0.6392, 0.2909]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.0469, 0.9963, 0.7558, ..., 0.9652, 0.6676, 0.7778]) +tensor([0.9195, 0.7845, 0.1112, ..., 0.9886, 0.0043, 0.8706]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -73,29 +88,13 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 10.498366355895996 seconds +Time: 10.695917844772339 seconds -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 25000, 25000, 25000]), - col_indices=tensor([16477, 45779, 22583, ..., 30055, 21515, 45820]), - values=tensor([0.8893, 0.7790, 0.5329, ..., 0.5529, 0.2667, 0.0404]), - size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.0469, 0.9963, 0.7558, ..., 0.9652, 0.6676, 0.7778]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 25000 -Density: 1e-05 -Time: 10.498366355895996 seconds - -[41.64, 39.36, 40.21, 39.07, 40.05, 39.18, 39.19, 39.18, 40.04, 39.2] -[102.66] -13.090772151947021 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 129830, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.498366355895996, 'TIME_S_1KI': 0.08086240742429328, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1343.8986691188811, 'W': 102.66} -[41.64, 39.36, 40.21, 39.07, 40.05, 39.18, 39.19, 39.18, 40.04, 39.2, 39.71, 39.93, 38.98, 39.86, 39.33, 39.67, 39.03, 39.93, 39.08, 39.73] -712.23 -35.6115 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 129830, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.498366355895996, 'TIME_S_1KI': 0.08086240742429328, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1343.8986691188811, 'W': 102.66, 'J_1KI': 10.351218278663492, 'W_1KI': 0.7907263344373412, 'W_D': 67.04849999999999, 'J_D': 877.7166366298197, 'W_D_1KI': 0.5164330278055919, 'J_D_1KI': 0.003977763443006947} +[40.91, 39.21, 40.28, 39.06, 40.18, 39.24, 39.39, 39.11, 40.12, 39.03] +[102.52] +13.4796781539917 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 132622, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.695917844772339, 'TIME_S_1KI': 0.08064964971703291, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1381.936604347229, 'W': 102.52} +[40.91, 39.21, 40.28, 39.06, 40.18, 39.24, 39.39, 39.11, 40.12, 39.03, 40.67, 39.4, 40.19, 38.91, 39.61, 38.93, 39.87, 39.08, 40.01, 38.87] +712.3299999999999 +35.616499999999995 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 132622, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.695917844772339, 'TIME_S_1KI': 0.08064964971703291, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1381.936604347229, 'W': 102.52, 'J_1KI': 10.420115850667528, 'W_1KI': 0.7730240834853945, 'W_D': 66.90350000000001, 'J_D': 901.8376473755837, 'W_D_1KI': 0.5044675845636472, 'J_D_1KI': 0.0038038001580706607} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.0001.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.0001.json new file mode 100644 index 0000000..c740a6d --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 450692, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.661210536956787, "TIME_S_1KI": 0.023655202526241394, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1274.7642656326295, "W": 94.48, "J_1KI": 2.82845993634817, "W_1KI": 0.2096331863001784, "W_D": 59.36250000000001, "J_D": 800.9440486729146, "W_D_1KI": 0.13171411962049473, "J_D_1KI": 0.00029224863015206556} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.0001.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.0001.output new file mode 100644 index 0000000..eed9694 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.0001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '5000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.05054283142089844} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 2500, 2500, 2500]), + col_indices=tensor([ 483, 2169, 757, ..., 173, 4439, 4656]), + values=tensor([0.9876, 0.6258, 0.5982, ..., 0.3562, 0.6626, 0.2988]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.5486, 0.1022, 0.5660, ..., 0.0025, 0.4692, 0.8005]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 0.05054283142089844 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '207744', '-ss', '5000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 4.839913845062256} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 2499, 2500, 2500]), + col_indices=tensor([1064, 259, 704, ..., 2037, 4830, 899]), + values=tensor([0.7873, 0.2357, 0.4656, ..., 0.3402, 0.5396, 0.7236]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.3390, 0.6218, 0.4185, ..., 0.9245, 0.2892, 0.5586]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 4.839913845062256 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '450692', '-ss', '5000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.661210536956787} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 2, ..., 2499, 2499, 2500]), + col_indices=tensor([2769, 4978, 3269, ..., 2907, 4470, 1850]), + values=tensor([0.1814, 0.5969, 0.2629, ..., 0.3883, 0.1478, 0.5451]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.9019, 0.2172, 0.0888, ..., 0.3698, 0.8940, 0.4050]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.661210536956787 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 2, ..., 2499, 2499, 2500]), + col_indices=tensor([2769, 4978, 3269, ..., 2907, 4470, 1850]), + values=tensor([0.1814, 0.5969, 0.2629, ..., 0.3883, 0.1478, 0.5451]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.9019, 0.2172, 0.0888, ..., 0.3698, 0.8940, 0.4050]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.661210536956787 seconds + +[39.19, 39.19, 38.63, 39.33, 38.89, 39.39, 38.57, 39.43, 38.51, 39.19] +[94.48] +13.492424488067627 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 450692, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.661210536956787, 'TIME_S_1KI': 0.023655202526241394, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1274.7642656326295, 'W': 94.48} +[39.19, 39.19, 38.63, 39.33, 38.89, 39.39, 38.57, 39.43, 38.51, 39.19, 39.32, 38.88, 39.47, 38.52, 39.48, 38.52, 39.58, 38.58, 39.31, 38.44] +702.3499999999999 +35.11749999999999 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 450692, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.661210536956787, 'TIME_S_1KI': 0.023655202526241394, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1274.7642656326295, 'W': 94.48, 'J_1KI': 2.82845993634817, 'W_1KI': 0.2096331863001784, 'W_D': 59.36250000000001, 'J_D': 800.9440486729146, 'W_D_1KI': 0.13171411962049473, 'J_D_1KI': 0.00029224863015206556} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.001.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.001.json new file mode 100644 index 0000000..5a45f3d --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 249519, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.236942052841187, "TIME_S_1KI": 0.04102670358907012, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1222.1109838342666, "W": 97.13999999999999, "J_1KI": 4.897867432276767, "W_1KI": 0.3893090305748259, "W_D": 61.76424999999999, "J_D": 777.0513520000576, "W_D_1KI": 0.24753325398065876, "J_D_1KI": 0.0009920417041614415} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.001.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.001.output new file mode 100644 index 0000000..bf3e9ad --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '5000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.05724024772644043} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 8, 15, ..., 24983, 24992, 25000]), + col_indices=tensor([ 471, 1370, 1845, ..., 3191, 3518, 3659]), + values=tensor([0.0299, 0.9557, 0.6054, ..., 0.0635, 0.2604, 0.4528]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.0205, 0.7752, 0.1498, ..., 0.2089, 0.1619, 0.7193]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 0.05724024772644043 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '183437', '-ss', '5000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 7.719191074371338} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 8, ..., 24996, 24997, 25000]), + col_indices=tensor([1493, 2121, 2213, ..., 623, 2347, 4713]), + values=tensor([0.6456, 0.4495, 0.4360, ..., 0.5144, 0.5794, 0.1984]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.2703, 0.0672, 0.3072, ..., 0.2566, 0.5122, 0.5785]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 7.719191074371338 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '249519', '-ss', '5000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.236942052841187} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 11, ..., 24991, 24997, 25000]), + col_indices=tensor([ 752, 886, 972, ..., 802, 1974, 3630]), + values=tensor([0.4437, 0.0647, 0.4607, ..., 0.1209, 0.0125, 0.5794]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.9393, 0.0560, 0.5479, ..., 0.4533, 0.0776, 0.5900]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.236942052841187 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 11, ..., 24991, 24997, 25000]), + col_indices=tensor([ 752, 886, 972, ..., 802, 1974, 3630]), + values=tensor([0.4437, 0.0647, 0.4607, ..., 0.1209, 0.0125, 0.5794]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.9393, 0.0560, 0.5479, ..., 0.4533, 0.0776, 0.5900]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.236942052841187 seconds + +[40.22, 39.73, 39.42, 38.78, 39.93, 38.71, 40.68, 38.8, 39.45, 38.55] +[97.14] +12.580924272537231 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 249519, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.236942052841187, 'TIME_S_1KI': 0.04102670358907012, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1222.1109838342666, 'W': 97.13999999999999} +[40.22, 39.73, 39.42, 38.78, 39.93, 38.71, 40.68, 38.8, 39.45, 38.55, 40.16, 39.21, 38.74, 39.25, 39.41, 39.27, 38.96, 39.33, 38.96, 38.84] +707.515 +35.37575 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 249519, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.236942052841187, 'TIME_S_1KI': 0.04102670358907012, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1222.1109838342666, 'W': 97.13999999999999, 'J_1KI': 4.897867432276767, 'W_1KI': 0.3893090305748259, 'W_D': 61.76424999999999, 'J_D': 777.0513520000576, 'W_D_1KI': 0.24753325398065876, 'J_D_1KI': 0.0009920417041614415} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.01.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.01.json new file mode 100644 index 0000000..e6c8fdb --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 146173, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.55378007888794, "TIME_S_1KI": 0.07220061214374707, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1340.7953616142272, "W": 116.1, "J_1KI": 9.17266089916898, "W_1KI": 0.7942643306219342, "W_D": 80.21499999999999, "J_D": 926.372953763008, "W_D_1KI": 0.5487675562518385, "J_D_1KI": 0.0037542333827166336} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.01.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.01.output new file mode 100644 index 0000000..021f115 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.01.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '5000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.09166121482849121} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 47, 94, ..., 249894, 249942, + 250000]), + col_indices=tensor([ 119, 293, 345, ..., 4744, 4847, 4998]), + values=tensor([0.2600, 0.0492, 0.0782, ..., 0.6942, 0.7814, 0.7527]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.8315, 0.0983, 0.7447, ..., 0.4668, 0.9945, 0.1855]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 0.09166121482849121 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '114552', '-ss', '5000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 8.2285475730896} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 57, 113, ..., 249897, 249950, + 250000]), + col_indices=tensor([ 60, 61, 88, ..., 4754, 4809, 4933]), + values=tensor([0.8655, 0.3309, 0.5749, ..., 0.8443, 0.2705, 0.0665]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.6570, 0.9775, 0.7976, ..., 0.2365, 0.6987, 0.3821]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 8.2285475730896 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '146173', '-ss', '5000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.55378007888794} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 53, 109, ..., 249914, 249951, + 250000]), + col_indices=tensor([ 51, 99, 229, ..., 4435, 4821, 4904]), + values=tensor([0.7585, 0.4725, 0.0422, ..., 0.0029, 0.1086, 0.7072]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.3786, 0.1898, 0.9439, ..., 0.4562, 0.4771, 0.6918]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.55378007888794 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 53, 109, ..., 249914, 249951, + 250000]), + col_indices=tensor([ 51, 99, 229, ..., 4435, 4821, 4904]), + values=tensor([0.7585, 0.4725, 0.0422, ..., 0.0029, 0.1086, 0.7072]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.3786, 0.1898, 0.9439, ..., 0.4562, 0.4771, 0.6918]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.55378007888794 seconds + +[39.38, 39.69, 39.36, 39.25, 38.8, 39.79, 38.95, 39.94, 38.86, 45.03] +[116.1] +11.548624992370605 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 146173, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.55378007888794, 'TIME_S_1KI': 0.07220061214374707, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1340.7953616142272, 'W': 116.1} +[39.38, 39.69, 39.36, 39.25, 38.8, 39.79, 38.95, 39.94, 38.86, 45.03, 39.59, 39.72, 39.05, 38.89, 38.89, 39.88, 38.77, 40.8, 45.19, 39.74] +717.7 +35.885000000000005 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 146173, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.55378007888794, 'TIME_S_1KI': 0.07220061214374707, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1340.7953616142272, 'W': 116.1, 'J_1KI': 9.17266089916898, 'W_1KI': 0.7942643306219342, 'W_D': 80.21499999999999, 'J_D': 926.372953763008, 'W_D_1KI': 0.5487675562518385, 'J_D_1KI': 0.0037542333827166336} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.05.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.05.json new file mode 100644 index 0000000..4f9e011 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 92778, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.476318120956421, "TIME_S_1KI": 0.11291812844592922, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1732.4749909281732, "W": 131.59, "J_1KI": 18.6733384091937, "W_1KI": 1.4183319321390848, "W_D": 96.0545, "J_D": 1264.6251160126926, "W_D_1KI": 1.0353154842742893, "J_D_1KI": 0.011159062323765217} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.05.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.05.output new file mode 100644 index 0000000..85eae8d --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.05.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '5000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.1557161808013916} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 231, 495, ..., 1249487, + 1249744, 1250000]), + col_indices=tensor([ 9, 30, 58, ..., 4828, 4865, 4971]), + values=tensor([0.7438, 0.5258, 0.4698, ..., 0.4344, 0.2594, 0.0033]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.1880, 0.8169, 0.5226, ..., 0.2752, 0.9006, 0.0611]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 0.1557161808013916 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '67430', '-ss', '5000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 7.631251096725464} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 248, 518, ..., 1249509, + 1249753, 1250000]), + col_indices=tensor([ 31, 45, 102, ..., 4944, 4977, 4981]), + values=tensor([0.8150, 0.4433, 0.0676, ..., 0.5361, 0.0056, 0.9882]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.0156, 0.0219, 0.6064, ..., 0.7934, 0.6259, 0.0204]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 7.631251096725464 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '92778', '-ss', '5000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.476318120956421} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 269, 520, ..., 1249470, + 1249738, 1250000]), + col_indices=tensor([ 32, 37, 46, ..., 4950, 4963, 4989]), + values=tensor([0.4206, 0.9091, 0.7478, ..., 0.6711, 0.2779, 0.9141]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.6953, 0.1111, 0.6307, ..., 0.1029, 0.6511, 0.8226]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.476318120956421 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 269, 520, ..., 1249470, + 1249738, 1250000]), + col_indices=tensor([ 32, 37, 46, ..., 4950, 4963, 4989]), + values=tensor([0.4206, 0.9091, 0.7478, ..., 0.6711, 0.2779, 0.9141]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.6953, 0.1111, 0.6307, ..., 0.1029, 0.6511, 0.8226]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.476318120956421 seconds + +[40.71, 39.95, 38.97, 39.83, 39.79, 39.14, 38.93, 39.78, 39.42, 39.71] +[131.59] +13.165704011917114 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 92778, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.476318120956421, 'TIME_S_1KI': 0.11291812844592922, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1732.4749909281732, 'W': 131.59} +[40.71, 39.95, 38.97, 39.83, 39.79, 39.14, 38.93, 39.78, 39.42, 39.71, 40.83, 39.04, 39.95, 38.93, 39.14, 39.24, 39.86, 38.92, 39.75, 38.89] +710.71 +35.5355 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 92778, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.476318120956421, 'TIME_S_1KI': 0.11291812844592922, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1732.4749909281732, 'W': 131.59, 'J_1KI': 18.6733384091937, 'W_1KI': 1.4183319321390848, 'W_D': 96.0545, 'J_D': 1264.6251160126926, 'W_D_1KI': 1.0353154842742893, 'J_D_1KI': 0.011159062323765217} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.1.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.1.json new file mode 100644 index 0000000..d3bf1fe --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 52513, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.26560354232788, "TIME_S_1KI": 0.19548689928832635, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1794.3579230117798, "W": 136.24, "J_1KI": 34.16978506297069, "W_1KI": 2.594405194904119, "W_D": 100.32050000000001, "J_D": 1321.2777746293546, "W_D_1KI": 1.9103936168186928, "J_D_1KI": 0.036379441601483306} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.1.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.1.output new file mode 100644 index 0000000..ca278ce --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.1.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '5000', '-sd', '0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 0.2491617202758789} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 513, 1030, ..., 2499018, + 2499503, 2500000]), + col_indices=tensor([ 5, 7, 9, ..., 4974, 4988, 4992]), + values=tensor([0.9314, 0.8722, 0.2786, ..., 0.3461, 0.5001, 0.4531]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.5860, 0.7303, 0.0322, ..., 0.3067, 0.0639, 0.6907]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 0.2491617202758789 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '42141', '-ss', '5000', '-sd', '0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 8.425995349884033} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 480, 969, ..., 2498991, + 2499495, 2500000]), + col_indices=tensor([ 1, 8, 15, ..., 4990, 4995, 4997]), + values=tensor([0.6450, 0.7913, 0.7669, ..., 0.2675, 0.7315, 0.7922]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.8872, 0.3458, 0.7222, ..., 0.3185, 0.9459, 0.1327]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 8.425995349884033 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '52513', '-ss', '5000', '-sd', '0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.26560354232788} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 496, 1000, ..., 2499050, + 2499547, 2500000]), + col_indices=tensor([ 1, 8, 12, ..., 4944, 4951, 4977]), + values=tensor([0.2566, 0.4868, 0.9344, ..., 0.5912, 0.8684, 0.6618]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.5960, 0.0213, 0.1088, ..., 0.8621, 0.3601, 0.4544]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.26560354232788 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 496, 1000, ..., 2499050, + 2499547, 2500000]), + col_indices=tensor([ 1, 8, 12, ..., 4944, 4951, 4977]), + values=tensor([0.2566, 0.4868, 0.9344, ..., 0.5912, 0.8684, 0.6618]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.5960, 0.0213, 0.1088, ..., 0.8621, 0.3601, 0.4544]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.26560354232788 seconds + +[47.36, 40.59, 40.08, 39.29, 39.8, 40.2, 40.01, 39.07, 40.08, 38.96] +[136.24] +13.170566082000732 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 52513, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.26560354232788, 'TIME_S_1KI': 0.19548689928832635, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1794.3579230117798, 'W': 136.24} +[47.36, 40.59, 40.08, 39.29, 39.8, 40.2, 40.01, 39.07, 40.08, 38.96, 40.34, 40.06, 39.51, 39.55, 39.19, 39.83, 39.16, 39.75, 38.95, 39.88] +718.3900000000001 +35.919500000000006 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 52513, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.26560354232788, 'TIME_S_1KI': 0.19548689928832635, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1794.3579230117798, 'W': 136.24, 'J_1KI': 34.16978506297069, 'W_1KI': 2.594405194904119, 'W_D': 100.32050000000001, 'J_D': 1321.2777746293546, 'W_D_1KI': 1.9103936168186928, 'J_D_1KI': 0.036379441601483306} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_1e-05.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_1e-05.json new file mode 100644 index 0000000..b172c0c --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 470922, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.104466915130615, "TIME_S_1KI": 0.021456773977708867, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1145.4606072449685, "W": 92.79, "J_1KI": 2.4323786258551703, "W_1KI": 0.19703900008918676, "W_D": 57.138000000000005, "J_D": 705.3489403681756, "W_D_1KI": 0.12133219514059655, "J_D_1KI": 0.0002576481777037313} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_1e-05.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_1e-05.output new file mode 100644 index 0000000..263466b --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_1e-05.output @@ -0,0 +1,356 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '5000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.06183266639709473} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([2604, 880, 70, 3579, 4688, 1415, 4052, 2136, 2789, + 1920, 1039, 1558, 2117, 2959, 828, 201, 2786, 2764, + 2257, 277, 2288, 309, 1119, 4553, 992, 4344, 1852, + 1654, 3440, 2337, 4465, 3747, 865, 1053, 722, 4388, + 1118, 2434, 2479, 2179, 2623, 1327, 1850, 4354, 1080, + 294, 3733, 2629, 4844, 2052, 338, 3690, 2779, 4781, + 442, 500, 2501, 2111, 2134, 4050, 4965, 2490, 1539, + 1728, 3791, 2480, 429, 85, 2238, 4139, 1911, 2702, + 1667, 623, 834, 958, 2640, 639, 3527, 4275, 2167, + 2457, 991, 806, 4483, 513, 3720, 1136, 1176, 1064, + 771, 912, 1234, 1122, 4461, 4277, 1464, 345, 1997, + 2256, 2917, 38, 2975, 472, 2189, 2640, 491, 245, + 718, 3839, 2523, 240, 4832, 1434, 3727, 2402, 3795, + 977, 2914, 3289, 1194, 1229, 3616, 4441, 1900, 4483, + 4227, 4209, 4021, 4316, 794, 1149, 4287, 2054, 4565, + 4842, 69, 93, 2768, 2785, 2781, 1662, 4565, 3083, + 2932, 2437, 4078, 1005, 2493, 4749, 4500, 4776, 2110, + 3771, 1500, 4456, 4652, 2281, 3889, 3267, 2338, 1779, + 1663, 1964, 223, 2535, 4215, 2012, 431, 2610, 2606, + 1802, 4804, 2967, 365, 3887, 1133, 2945, 28, 647, + 466, 4656, 1939, 1716, 1723, 1159, 2034, 3057, 1288, + 284, 673, 4283, 506, 1331, 614, 631, 4195, 2134, + 2612, 1089, 4012, 2128, 736, 1710, 4895, 1258, 2802, + 4181, 1214, 4441, 4549, 2923, 3989, 2826, 3613, 1217, + 1556, 110, 4249, 222, 1573, 3450, 1707, 4825, 3455, + 279, 1371, 3150, 620, 486, 544, 4512, 3097, 2958, + 3135, 21, 1955, 802, 3984, 2259, 2773, 1786, 4464, + 4164, 2686, 4882, 4392, 2240, 1975, 2258]), + values=tensor([0.5027, 0.7084, 0.3487, 0.0753, 0.4164, 0.9980, 0.6580, + 0.4935, 0.3902, 0.5664, 0.2658, 0.3783, 0.8206, 0.5243, + 0.7985, 0.9823, 0.7694, 0.1060, 0.0192, 0.9550, 0.7866, + 0.3204, 0.1228, 0.4101, 0.8052, 0.9732, 0.1676, 0.7257, + 0.3426, 0.4203, 0.8249, 0.6182, 0.8414, 0.1007, 0.5404, + 0.5322, 0.6815, 0.5471, 0.5528, 0.9304, 0.5952, 0.6825, + 0.1470, 0.9592, 0.1633, 0.8148, 0.7106, 0.4684, 0.6378, + 0.2787, 0.1559, 0.9606, 0.6114, 0.8631, 0.8476, 0.0374, + 0.0974, 0.1508, 0.6160, 0.2538, 0.9193, 0.3221, 0.6792, + 0.1039, 0.5088, 0.3858, 0.8567, 0.5930, 0.1245, 0.9954, + 0.1659, 0.1382, 0.3631, 0.0415, 0.2608, 0.5523, 0.3431, + 0.5922, 0.9276, 0.2417, 0.9820, 0.0941, 0.0465, 0.6122, + 0.3473, 0.8672, 0.7451, 0.4632, 0.6761, 0.3844, 0.6143, + 0.9600, 0.7204, 0.0168, 0.7425, 0.2772, 0.4866, 0.2756, + 0.3148, 0.2142, 0.2884, 0.7150, 0.6972, 0.0578, 0.3403, + 0.6794, 0.7790, 0.6966, 0.8236, 0.6083, 0.5211, 0.6301, + 0.9543, 0.5553, 0.9115, 0.9237, 0.2270, 0.6441, 0.7009, + 0.1070, 0.9702, 0.2577, 0.6283, 0.2972, 0.6911, 0.1725, + 0.0282, 0.9157, 0.7996, 0.8026, 0.3516, 0.8308, 0.1003, + 0.0248, 0.7281, 0.0565, 0.4669, 0.2079, 0.4864, 0.2943, + 0.0681, 0.8545, 0.6221, 0.1251, 0.9854, 0.1397, 0.1128, + 0.9416, 0.0256, 0.6346, 0.9861, 0.8618, 0.7250, 0.4296, + 0.7583, 0.0529, 0.9738, 0.1783, 0.4879, 0.4079, 0.1074, + 0.5057, 0.9961, 0.1328, 0.5920, 0.7290, 0.7943, 0.2699, + 0.4245, 0.8340, 0.8310, 0.7824, 0.7435, 0.8129, 0.8814, + 0.7889, 0.8688, 0.4636, 0.6432, 0.6209, 0.5976, 0.7619, + 0.1123, 0.6496, 0.0741, 0.4224, 0.7444, 0.0204, 0.2397, + 0.8878, 0.9369, 0.8874, 0.3159, 0.4066, 0.7965, 0.9182, + 0.6430, 0.4446, 0.9224, 0.9817, 0.9823, 0.2288, 0.4574, + 0.8650, 0.3584, 0.5672, 0.6737, 0.6909, 0.8267, 0.7004, + 0.1349, 0.9181, 0.4535, 0.2086, 0.7357, 0.4116, 0.8581, + 0.4745, 0.8694, 0.4770, 0.7691, 0.7362, 0.3193, 0.0221, + 0.8677, 0.6112, 0.7624, 0.0925, 0.5125, 0.8534, 0.7050, + 0.0262, 0.5351, 0.3163, 0.2383, 0.0599, 0.2394, 0.4205, + 0.6550, 0.0849, 0.3824, 0.5505, 0.5900, 0.6050, 0.9085, + 0.2972, 0.8380, 0.5688, 0.8007, 0.1354]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.8800, 0.9246, 0.8175, ..., 0.7580, 0.5437, 0.3847]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 0.06183266639709473 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '169813', '-ss', '5000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 3.7862648963928223} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([4929, 3000, 2082, 1973, 3068, 607, 2961, 29, 351, + 4460, 1744, 1352, 1928, 620, 2963, 2161, 3031, 1297, + 2919, 205, 4433, 3348, 1763, 856, 1768, 4451, 4553, + 4151, 4124, 2487, 3669, 4245, 3791, 4332, 4652, 2944, + 1288, 1040, 2819, 1114, 1794, 2584, 3750, 1803, 3463, + 4428, 74, 755, 2930, 4705, 1792, 4415, 3681, 827, + 4613, 2053, 1757, 3551, 4558, 4714, 3521, 1441, 4198, + 4541, 3322, 2233, 4821, 4668, 3073, 842, 2391, 3470, + 3549, 2287, 3488, 3373, 466, 1474, 153, 4112, 3825, + 4049, 3820, 3974, 3338, 3169, 805, 1709, 934, 888, + 4398, 4212, 3596, 4722, 3648, 2384, 3672, 1636, 2638, + 1043, 3299, 4127, 253, 202, 700, 2123, 4147, 1615, + 2757, 961, 2278, 1624, 3033, 3925, 2974, 659, 4026, + 4847, 3567, 1263, 2942, 649, 336, 2794, 2496, 1692, + 2922, 2720, 4718, 3696, 3170, 3469, 1190, 927, 2942, + 4571, 3583, 3648, 2986, 2168, 2398, 922, 12, 2532, + 4982, 381, 360, 3881, 4346, 1626, 2391, 1413, 4317, + 670, 2866, 246, 1603, 4269, 1839, 293, 829, 3204, + 2987, 1314, 2286, 432, 4021, 2567, 1874, 328, 649, + 3133, 542, 3317, 2128, 3678, 1459, 1800, 937, 707, + 3716, 2927, 4259, 1827, 3266, 2961, 3799, 3106, 2266, + 150, 2700, 2735, 4193, 1030, 278, 2845, 685, 2154, + 4023, 2287, 2456, 1418, 3324, 1219, 1823, 2013, 2290, + 618, 4034, 748, 3423, 2391, 1286, 2548, 2856, 3978, + 206, 3640, 4573, 4602, 2605, 3727, 1817, 3883, 289, + 1165, 667, 2695, 652, 3897, 749, 889, 941, 1767, + 2961, 4938, 4706, 2892, 918, 4326, 4938, 1016, 1946, + 3193, 4622, 2689, 1925, 1828, 3491, 4755]), + values=tensor([6.5732e-01, 5.5709e-01, 9.0255e-01, 3.7373e-01, + 9.2539e-01, 5.3507e-01, 6.8389e-01, 8.5026e-01, + 2.3478e-01, 1.5006e-01, 8.8977e-01, 6.9161e-01, + 6.1729e-01, 8.2125e-01, 3.7387e-01, 4.1891e-01, + 4.2314e-01, 6.0341e-01, 5.3184e-01, 6.7206e-01, + 7.4531e-02, 7.8553e-01, 8.1168e-01, 1.2840e-02, + 9.3074e-01, 9.6045e-01, 8.9283e-01, 3.7963e-01, + 7.0103e-01, 9.0509e-01, 2.9361e-01, 9.8464e-01, + 2.8780e-01, 4.8753e-01, 4.8920e-01, 3.3610e-01, + 9.1715e-01, 3.5090e-01, 5.7914e-02, 9.3110e-01, + 2.2612e-01, 4.1491e-01, 8.2882e-01, 5.9619e-01, + 1.4545e-01, 6.3253e-01, 6.1725e-01, 7.4001e-01, + 9.8714e-01, 7.1669e-01, 9.6945e-01, 7.1615e-01, + 5.3071e-01, 1.9208e-01, 2.5701e-01, 6.2044e-01, + 6.5394e-01, 4.5949e-01, 5.3496e-01, 8.5279e-01, + 1.6171e-01, 4.7427e-01, 3.2489e-01, 9.4031e-01, + 6.6236e-01, 3.3448e-01, 4.5980e-01, 9.8944e-01, + 3.9491e-01, 4.9759e-01, 4.9597e-01, 6.3195e-01, + 2.6203e-01, 4.4820e-01, 5.1223e-01, 3.6293e-01, + 4.5785e-01, 2.8238e-01, 7.5282e-02, 3.5572e-02, + 1.0158e-01, 6.1843e-01, 2.0727e-01, 5.8810e-01, + 3.6032e-01, 6.3934e-01, 3.9975e-01, 9.0048e-01, + 6.8382e-01, 3.3572e-01, 5.8629e-02, 4.9842e-01, + 2.8358e-01, 3.0533e-01, 5.1674e-01, 5.7869e-01, + 8.9344e-01, 1.0014e-01, 1.0304e-01, 8.1526e-01, + 7.6755e-01, 7.0754e-02, 8.7246e-01, 7.6389e-01, + 6.2998e-01, 2.4960e-01, 3.2187e-01, 7.1579e-01, + 2.7927e-01, 5.3053e-01, 3.0237e-01, 7.6440e-02, + 4.1133e-01, 1.4339e-01, 4.0853e-01, 4.2458e-01, + 5.2413e-01, 1.0859e-03, 2.4440e-01, 2.9440e-02, + 5.4994e-01, 7.3144e-01, 9.1113e-01, 3.6059e-03, + 9.4994e-01, 3.3446e-01, 5.3742e-01, 4.4632e-01, + 7.2486e-02, 6.4910e-01, 1.3537e-01, 8.5198e-01, + 1.0295e-01, 9.4804e-01, 7.3070e-01, 6.7511e-01, + 9.8159e-01, 8.2450e-01, 9.4960e-03, 8.6690e-01, + 4.2671e-02, 1.4742e-01, 8.7106e-01, 3.5370e-01, + 2.7525e-01, 5.1878e-01, 4.3630e-01, 6.5541e-01, + 2.5515e-01, 4.3745e-01, 1.7148e-01, 1.7999e-01, + 9.8168e-02, 4.2671e-01, 8.0177e-01, 6.3035e-01, + 5.4076e-01, 7.7599e-01, 6.2263e-01, 2.3030e-01, + 6.9773e-01, 8.4732e-01, 8.0053e-01, 8.6019e-01, + 2.2649e-01, 6.7521e-01, 8.5825e-01, 6.0515e-01, + 9.8639e-01, 1.4857e-01, 2.9126e-01, 6.5170e-01, + 4.0089e-01, 1.9759e-01, 4.6747e-03, 6.9883e-02, + 3.7716e-01, 6.0957e-01, 3.6578e-01, 4.8538e-04, + 4.0192e-01, 4.0856e-01, 2.3977e-01, 8.9289e-01, + 4.4473e-01, 1.9347e-01, 4.3197e-01, 4.7259e-01, + 3.6158e-01, 6.2329e-01, 7.8778e-01, 2.0247e-01, + 5.4445e-02, 9.9327e-01, 1.4720e-01, 6.7916e-01, + 8.7100e-01, 3.3540e-01, 9.5084e-01, 3.4452e-02, + 2.6256e-01, 1.8338e-01, 9.7536e-01, 3.5124e-01, + 2.8707e-01, 7.8855e-01, 6.7111e-01, 5.7173e-01, + 9.5579e-01, 6.0574e-01, 6.8834e-01, 1.3845e-01, + 6.9447e-01, 7.9333e-02, 6.1603e-01, 6.4107e-03, + 3.1443e-02, 2.2338e-01, 7.6880e-01, 4.8996e-01, + 7.2451e-01, 2.5495e-01, 1.1564e-01, 6.2903e-01, + 6.6600e-01, 9.4852e-01, 4.0126e-01, 4.9942e-01, + 3.5796e-01, 8.0719e-01, 6.5464e-01, 2.6782e-01, + 9.4003e-01, 6.5438e-01, 3.6967e-01, 1.8464e-01, + 4.7524e-01, 7.2208e-01, 1.2031e-01, 5.8708e-01, + 2.0250e-01, 6.5919e-01, 4.4919e-01, 5.7088e-01, + 6.2858e-01, 1.8170e-01, 2.2030e-01, 3.1361e-01, + 3.8840e-01, 1.4761e-01]), size=(5000, 5000), nnz=250, + layout=torch.sparse_csr) +tensor([0.8065, 0.5790, 0.9005, ..., 0.0135, 0.6788, 0.2076]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 3.7862648963928223 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '470922', '-ss', '5000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.104466915130615} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 250, 250, 250]), + col_indices=tensor([ 286, 2098, 2957, 31, 4770, 3649, 4063, 3564, 3211, + 519, 372, 1653, 2583, 2464, 2987, 1744, 4556, 3480, + 4025, 965, 3221, 3480, 1237, 638, 2731, 2204, 3903, + 337, 4640, 3708, 4928, 846, 4069, 3241, 2342, 3155, + 3904, 4645, 1110, 3262, 206, 2707, 291, 1906, 3653, + 4410, 4931, 1727, 4173, 1383, 3385, 3838, 1305, 3168, + 1375, 4057, 2761, 2787, 2307, 6, 2503, 1872, 3680, + 2234, 1597, 3084, 1758, 491, 3779, 4890, 3184, 831, + 331, 2968, 3525, 1971, 454, 168, 2971, 2622, 1099, + 3321, 3822, 4888, 2660, 4331, 3839, 847, 453, 3854, + 958, 4865, 2336, 403, 4990, 684, 801, 4446, 671, + 4256, 4579, 3616, 522, 3560, 4436, 4875, 4839, 4252, + 2678, 3408, 277, 1706, 3353, 4272, 200, 4495, 1971, + 1057, 2080, 4776, 2636, 1840, 1457, 1455, 3267, 879, + 4146, 2502, 4940, 2313, 21, 1504, 535, 3781, 367, + 2250, 357, 4188, 146, 2230, 1761, 1304, 1785, 442, + 2853, 3699, 79, 4930, 2598, 3595, 2987, 205, 247, + 2873, 2237, 1134, 2086, 3420, 2896, 4246, 2080, 1618, + 978, 1465, 2116, 4506, 3634, 1205, 3062, 601, 2140, + 765, 3494, 3345, 738, 3535, 3354, 3147, 4390, 602, + 4817, 1923, 2074, 44, 1678, 4913, 1057, 4051, 3685, + 2781, 3899, 4448, 4692, 1277, 259, 2144, 2798, 4087, + 2596, 4771, 4479, 733, 3005, 1161, 3811, 3147, 4464, + 4683, 773, 3834, 3088, 1039, 3766, 2820, 3923, 3718, + 3049, 1976, 990, 3587, 2696, 4263, 2139, 3191, 1101, + 4701, 4465, 551, 3012, 2514, 2260, 1927, 3611, 4115, + 4664, 772, 3814, 2744, 2328, 560, 3629, 3666, 4110, + 1272, 515, 3230, 2775, 3191, 4516, 1702]), + values=tensor([0.4950, 0.4387, 0.7062, 0.8184, 0.9685, 0.9491, 0.6387, + 0.3930, 0.4627, 0.2264, 0.4673, 0.2803, 0.8352, 0.7116, + 0.3144, 0.9721, 0.1277, 0.9601, 0.0123, 0.3968, 0.9183, + 0.0517, 0.5676, 0.9009, 0.4901, 0.3378, 0.4750, 0.6307, + 0.7160, 0.7754, 0.8317, 0.5508, 0.6443, 0.1719, 0.1190, + 0.2292, 0.9505, 0.2302, 0.5965, 0.4343, 0.9706, 0.9472, + 0.7071, 0.4120, 0.5080, 0.6133, 0.5804, 0.7848, 0.1131, + 0.7398, 0.2113, 0.5136, 0.9362, 0.4868, 0.7307, 0.9542, + 0.1907, 0.7842, 0.0075, 0.1654, 0.1604, 0.5554, 0.9265, + 0.9594, 0.1847, 0.0412, 0.1458, 0.3185, 0.9474, 0.7262, + 0.9867, 0.9175, 0.8563, 0.0555, 0.5865, 0.1402, 0.0777, + 0.1693, 0.3284, 0.8041, 0.3119, 0.6054, 0.1208, 0.1474, + 0.6411, 0.6397, 0.9233, 0.0205, 0.1838, 0.9985, 0.4716, + 0.4977, 0.8331, 0.9916, 0.5989, 0.7640, 0.9210, 0.4278, + 0.0911, 0.8508, 0.2547, 0.5851, 0.9233, 0.2665, 0.1213, + 0.8754, 0.6206, 0.7311, 0.2194, 0.9834, 0.8122, 0.4946, + 0.7260, 0.9509, 0.7893, 0.0815, 0.9968, 0.5027, 0.3558, + 0.7001, 0.1542, 0.3964, 0.0402, 0.9298, 0.1070, 0.4902, + 0.8333, 0.6213, 0.7680, 0.5975, 0.2149, 0.9396, 0.8765, + 0.8836, 0.3422, 0.3496, 0.7499, 0.8855, 0.3598, 0.7125, + 0.1563, 0.2571, 0.2028, 0.2313, 0.3287, 0.3989, 0.4172, + 0.9776, 0.9673, 0.6099, 0.3489, 0.5171, 0.3263, 0.3550, + 0.8206, 0.1824, 0.1805, 0.0479, 0.6241, 0.3393, 0.7730, + 0.0623, 0.4418, 0.3306, 0.0692, 0.1691, 0.9139, 0.9289, + 0.1653, 0.5991, 0.0793, 0.6308, 0.8611, 0.1878, 0.5735, + 0.8923, 0.1845, 0.1387, 0.3446, 0.0333, 0.5909, 0.0051, + 0.6730, 0.2001, 0.7864, 0.3596, 0.6702, 0.7444, 0.5210, + 0.7057, 0.5369, 0.0193, 0.2647, 0.1729, 0.2634, 0.6010, + 0.4976, 0.7177, 0.7966, 0.8166, 0.9702, 0.2066, 0.9091, + 0.4739, 0.8346, 0.6718, 0.2794, 0.6249, 0.0434, 0.4190, + 0.9938, 0.9770, 0.8053, 0.5102, 0.4949, 0.5149, 0.3290, + 0.8346, 0.3511, 0.4625, 0.1176, 0.9732, 0.6568, 0.0814, + 0.1466, 0.9735, 0.9996, 0.5023, 0.0806, 0.6393, 0.9851, + 0.9968, 0.7168, 0.8555, 0.4797, 0.5400, 0.6489, 0.3087, + 0.4955, 0.2041, 0.9406, 0.8471, 0.5173, 0.1622, 0.0921, + 0.5950, 0.5479, 0.1406, 0.5404, 0.7323]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.4539, 0.8865, 0.6514, ..., 0.0864, 0.1789, 0.3670]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.104466915130615 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 250, 250, 250]), + col_indices=tensor([ 286, 2098, 2957, 31, 4770, 3649, 4063, 3564, 3211, + 519, 372, 1653, 2583, 2464, 2987, 1744, 4556, 3480, + 4025, 965, 3221, 3480, 1237, 638, 2731, 2204, 3903, + 337, 4640, 3708, 4928, 846, 4069, 3241, 2342, 3155, + 3904, 4645, 1110, 3262, 206, 2707, 291, 1906, 3653, + 4410, 4931, 1727, 4173, 1383, 3385, 3838, 1305, 3168, + 1375, 4057, 2761, 2787, 2307, 6, 2503, 1872, 3680, + 2234, 1597, 3084, 1758, 491, 3779, 4890, 3184, 831, + 331, 2968, 3525, 1971, 454, 168, 2971, 2622, 1099, + 3321, 3822, 4888, 2660, 4331, 3839, 847, 453, 3854, + 958, 4865, 2336, 403, 4990, 684, 801, 4446, 671, + 4256, 4579, 3616, 522, 3560, 4436, 4875, 4839, 4252, + 2678, 3408, 277, 1706, 3353, 4272, 200, 4495, 1971, + 1057, 2080, 4776, 2636, 1840, 1457, 1455, 3267, 879, + 4146, 2502, 4940, 2313, 21, 1504, 535, 3781, 367, + 2250, 357, 4188, 146, 2230, 1761, 1304, 1785, 442, + 2853, 3699, 79, 4930, 2598, 3595, 2987, 205, 247, + 2873, 2237, 1134, 2086, 3420, 2896, 4246, 2080, 1618, + 978, 1465, 2116, 4506, 3634, 1205, 3062, 601, 2140, + 765, 3494, 3345, 738, 3535, 3354, 3147, 4390, 602, + 4817, 1923, 2074, 44, 1678, 4913, 1057, 4051, 3685, + 2781, 3899, 4448, 4692, 1277, 259, 2144, 2798, 4087, + 2596, 4771, 4479, 733, 3005, 1161, 3811, 3147, 4464, + 4683, 773, 3834, 3088, 1039, 3766, 2820, 3923, 3718, + 3049, 1976, 990, 3587, 2696, 4263, 2139, 3191, 1101, + 4701, 4465, 551, 3012, 2514, 2260, 1927, 3611, 4115, + 4664, 772, 3814, 2744, 2328, 560, 3629, 3666, 4110, + 1272, 515, 3230, 2775, 3191, 4516, 1702]), + values=tensor([0.4950, 0.4387, 0.7062, 0.8184, 0.9685, 0.9491, 0.6387, + 0.3930, 0.4627, 0.2264, 0.4673, 0.2803, 0.8352, 0.7116, + 0.3144, 0.9721, 0.1277, 0.9601, 0.0123, 0.3968, 0.9183, + 0.0517, 0.5676, 0.9009, 0.4901, 0.3378, 0.4750, 0.6307, + 0.7160, 0.7754, 0.8317, 0.5508, 0.6443, 0.1719, 0.1190, + 0.2292, 0.9505, 0.2302, 0.5965, 0.4343, 0.9706, 0.9472, + 0.7071, 0.4120, 0.5080, 0.6133, 0.5804, 0.7848, 0.1131, + 0.7398, 0.2113, 0.5136, 0.9362, 0.4868, 0.7307, 0.9542, + 0.1907, 0.7842, 0.0075, 0.1654, 0.1604, 0.5554, 0.9265, + 0.9594, 0.1847, 0.0412, 0.1458, 0.3185, 0.9474, 0.7262, + 0.9867, 0.9175, 0.8563, 0.0555, 0.5865, 0.1402, 0.0777, + 0.1693, 0.3284, 0.8041, 0.3119, 0.6054, 0.1208, 0.1474, + 0.6411, 0.6397, 0.9233, 0.0205, 0.1838, 0.9985, 0.4716, + 0.4977, 0.8331, 0.9916, 0.5989, 0.7640, 0.9210, 0.4278, + 0.0911, 0.8508, 0.2547, 0.5851, 0.9233, 0.2665, 0.1213, + 0.8754, 0.6206, 0.7311, 0.2194, 0.9834, 0.8122, 0.4946, + 0.7260, 0.9509, 0.7893, 0.0815, 0.9968, 0.5027, 0.3558, + 0.7001, 0.1542, 0.3964, 0.0402, 0.9298, 0.1070, 0.4902, + 0.8333, 0.6213, 0.7680, 0.5975, 0.2149, 0.9396, 0.8765, + 0.8836, 0.3422, 0.3496, 0.7499, 0.8855, 0.3598, 0.7125, + 0.1563, 0.2571, 0.2028, 0.2313, 0.3287, 0.3989, 0.4172, + 0.9776, 0.9673, 0.6099, 0.3489, 0.5171, 0.3263, 0.3550, + 0.8206, 0.1824, 0.1805, 0.0479, 0.6241, 0.3393, 0.7730, + 0.0623, 0.4418, 0.3306, 0.0692, 0.1691, 0.9139, 0.9289, + 0.1653, 0.5991, 0.0793, 0.6308, 0.8611, 0.1878, 0.5735, + 0.8923, 0.1845, 0.1387, 0.3446, 0.0333, 0.5909, 0.0051, + 0.6730, 0.2001, 0.7864, 0.3596, 0.6702, 0.7444, 0.5210, + 0.7057, 0.5369, 0.0193, 0.2647, 0.1729, 0.2634, 0.6010, + 0.4976, 0.7177, 0.7966, 0.8166, 0.9702, 0.2066, 0.9091, + 0.4739, 0.8346, 0.6718, 0.2794, 0.6249, 0.0434, 0.4190, + 0.9938, 0.9770, 0.8053, 0.5102, 0.4949, 0.5149, 0.3290, + 0.8346, 0.3511, 0.4625, 0.1176, 0.9732, 0.6568, 0.0814, + 0.1466, 0.9735, 0.9996, 0.5023, 0.0806, 0.6393, 0.9851, + 0.9968, 0.7168, 0.8555, 0.4797, 0.5400, 0.6489, 0.3087, + 0.4955, 0.2041, 0.9406, 0.8471, 0.5173, 0.1622, 0.0921, + 0.5950, 0.5479, 0.1406, 0.5404, 0.7323]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.4539, 0.8865, 0.6514, ..., 0.0864, 0.1789, 0.3670]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.104466915130615 seconds + +[39.77, 39.06, 39.0, 43.22, 38.95, 38.87, 39.0, 38.96, 40.09, 38.55] +[92.79] +12.344655752182007 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 470922, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.104466915130615, 'TIME_S_1KI': 0.021456773977708867, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1145.4606072449685, 'W': 92.79} +[39.77, 39.06, 39.0, 43.22, 38.95, 38.87, 39.0, 38.96, 40.09, 38.55, 44.05, 41.96, 39.49, 38.5, 38.79, 39.12, 39.88, 38.31, 39.38, 38.55] +713.04 +35.652 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 470922, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.104466915130615, 'TIME_S_1KI': 0.021456773977708867, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1145.4606072449685, 'W': 92.79, 'J_1KI': 2.4323786258551703, 'W_1KI': 0.19703900008918676, 'W_D': 57.138000000000005, 'J_D': 705.3489403681756, 'W_D_1KI': 0.12133219514059655, 'J_D_1KI': 0.0002576481777037313} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.0001.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.0001.json index 1cb7a6a..648e77d 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.0001.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.0001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 33560, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.490610837936401, "TIME_S_1KI": 0.3125926948133612, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1237.3838691329956, "W": 88.17, "J_1KI": 36.87079467023229, "W_1KI": 2.6272348033373065, "W_D": 71.61225, "J_D": 1005.0112621335984, "W_D_1KI": 2.133857270560191, "J_D_1KI": 0.06358335132777686} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 33926, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.632460117340088, "TIME_S_1KI": 0.31340152441608465, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1251.6098043680192, "W": 88.42000000000002, "J_1KI": 36.892348180393185, "W_1KI": 2.606260685020339, "W_D": 71.92675000000001, "J_D": 1018.1432424375416, "W_D_1KI": 2.120106997582975, "J_D_1KI": 0.062492100382685115} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.0001.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.0001.output index 14d045d..aff18b0 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.0001.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.0001.output @@ -1,34 +1,14 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '100000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.3128688335418701} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.309490442276001} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 17, 23, ..., 999976, - 999990, 1000000]), - col_indices=tensor([ 283, 794, 12077, ..., 88041, 96002, 98956]), - values=tensor([0.6667, 0.7061, 0.4936, ..., 0.0020, 0.2226, 0.8107]), - size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.3746, 0.1787, 0.3326, ..., 0.2981, 0.5262, 0.4171]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 1000000 -Density: 0.0001 -Time: 0.3128688335418701 seconds - -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '33560', '-ss', '100000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.490610837936401} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 12, 24, ..., 999975, +tensor(crow_indices=tensor([ 0, 8, 15, ..., 999979, 999989, 1000000]), - col_indices=tensor([ 291, 3246, 3703, ..., 78390, 83116, 86469]), - values=tensor([0.7026, 0.5046, 0.5818, ..., 0.3671, 0.4061, 0.2873]), + col_indices=tensor([ 8594, 29009, 41843, ..., 77886, 78317, 95347]), + values=tensor([0.9328, 0.5746, 0.1196, ..., 0.5058, 0.9583, 0.4434]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.3720, 0.0968, 0.4099, ..., 0.6733, 0.7032, 0.3728]) +tensor([0.8206, 0.6612, 0.6620, ..., 0.9270, 0.4872, 0.3406]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,16 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 10.490610837936401 seconds +Time: 0.309490442276001 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '33926', '-ss', '100000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.632460117340088} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 12, 24, ..., 999975, - 999989, 1000000]), - col_indices=tensor([ 291, 3246, 3703, ..., 78390, 83116, 86469]), - values=tensor([0.7026, 0.5046, 0.5818, ..., 0.3671, 0.4061, 0.2873]), +tensor(crow_indices=tensor([ 0, 7, 18, ..., 999986, + 999991, 1000000]), + col_indices=tensor([ 9555, 32072, 52846, ..., 78086, 80072, 96075]), + values=tensor([0.9751, 0.3269, 0.5720, ..., 0.0320, 0.6071, 0.6982]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.3720, 0.0968, 0.4099, ..., 0.6733, 0.7032, 0.3728]) +tensor([0.5445, 0.0121, 0.5604, ..., 0.3280, 0.5430, 0.6322]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -53,13 +36,30 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 10.490610837936401 seconds +Time: 10.632460117340088 seconds -[18.34, 17.84, 18.15, 18.01, 17.99, 21.31, 18.71, 18.25, 18.1, 17.94] -[88.17] -14.034069061279297 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 33560, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.490610837936401, 'TIME_S_1KI': 0.3125926948133612, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1237.3838691329956, 'W': 88.17} -[18.34, 17.84, 18.15, 18.01, 17.99, 21.31, 18.71, 18.25, 18.1, 17.94, 18.37, 20.96, 17.82, 18.03, 18.16, 17.77, 17.89, 17.87, 17.71, 18.52] -331.155 -16.55775 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 33560, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.490610837936401, 'TIME_S_1KI': 0.3125926948133612, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1237.3838691329956, 'W': 88.17, 'J_1KI': 36.87079467023229, 'W_1KI': 2.6272348033373065, 'W_D': 71.61225, 'J_D': 1005.0112621335984, 'W_D_1KI': 2.133857270560191, 'J_D_1KI': 0.06358335132777686} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 18, ..., 999986, + 999991, 1000000]), + col_indices=tensor([ 9555, 32072, 52846, ..., 78086, 80072, 96075]), + values=tensor([0.9751, 0.3269, 0.5720, ..., 0.0320, 0.6071, 0.6982]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.5445, 0.0121, 0.5604, ..., 0.3280, 0.5430, 0.6322]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 10.632460117340088 seconds + +[18.53, 17.97, 18.07, 18.07, 17.99, 18.09, 21.33, 17.98, 18.39, 17.84] +[88.42] +14.155279397964478 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 33926, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.632460117340088, 'TIME_S_1KI': 0.31340152441608465, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1251.6098043680192, 'W': 88.42000000000002} +[18.53, 17.97, 18.07, 18.07, 17.99, 18.09, 21.33, 17.98, 18.39, 17.84, 18.71, 17.91, 17.99, 18.02, 18.68, 18.08, 17.92, 18.59, 18.24, 18.01] +329.865 +16.49325 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 33926, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.632460117340088, 'TIME_S_1KI': 0.31340152441608465, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1251.6098043680192, 'W': 88.42000000000002, 'J_1KI': 36.892348180393185, 'W_1KI': 2.606260685020339, 'W_D': 71.92675000000001, 'J_D': 1018.1432424375416, 'W_D_1KI': 2.120106997582975, 'J_D_1KI': 0.062492100382685115} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.001.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.001.json new file mode 100644 index 0000000..b1b6585 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 2890, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.78333592414856, "TIME_S_1KI": 3.731258105241716, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1463.5743112421035, "W": 81.86, "J_1KI": 506.4270973156068, "W_1KI": 28.325259515570934, "W_D": 65.62225000000001, "J_D": 1173.259703712523, "W_D_1KI": 22.706660899653983, "J_D_1KI": 7.856976089845669} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.001.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.001.output new file mode 100644 index 0000000..1352df1 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.001.output @@ -0,0 +1,65 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '100000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 3.6327288150787354} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 95, 182, ..., 9999803, + 9999900, 10000000]), + col_indices=tensor([ 1164, 1511, 2606, ..., 97059, 99366, 99637]), + values=tensor([0.1789, 0.4314, 0.0466, ..., 0.4339, 0.7049, 0.9540]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.5756, 0.3189, 0.9065, ..., 0.6359, 0.4482, 0.1651]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 3.6327288150787354 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2890', '-ss', '100000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.78333592414856} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 118, 207, ..., 9999808, + 9999910, 10000000]), + col_indices=tensor([ 712, 968, 1059, ..., 96997, 98856, 99104]), + values=tensor([0.5177, 0.6712, 0.5343, ..., 0.8226, 0.3425, 0.6939]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.8858, 0.8376, 0.8837, ..., 0.6861, 0.2657, 0.8920]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 10.78333592414856 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 118, 207, ..., 9999808, + 9999910, 10000000]), + col_indices=tensor([ 712, 968, 1059, ..., 96997, 98856, 99104]), + values=tensor([0.5177, 0.6712, 0.5343, ..., 0.8226, 0.3425, 0.6939]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.8858, 0.8376, 0.8837, ..., 0.6861, 0.2657, 0.8920]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 10.78333592414856 seconds + +[18.4, 18.1, 18.05, 18.1, 18.29, 18.11, 18.1, 17.9, 17.96, 17.98] +[81.86] +17.878992319107056 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2890, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.78333592414856, 'TIME_S_1KI': 3.731258105241716, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1463.5743112421035, 'W': 81.86} +[18.4, 18.1, 18.05, 18.1, 18.29, 18.11, 18.1, 17.9, 17.96, 17.98, 18.26, 17.88, 17.92, 17.96, 18.1, 17.9, 17.98, 18.14, 18.06, 17.77] +324.755 +16.23775 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2890, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.78333592414856, 'TIME_S_1KI': 3.731258105241716, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1463.5743112421035, 'W': 81.86, 'J_1KI': 506.4270973156068, 'W_1KI': 28.325259515570934, 'W_D': 65.62225000000001, 'J_D': 1173.259703712523, 'W_D_1KI': 22.706660899653983, 'J_D_1KI': 7.856976089845669} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_1e-05.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_1e-05.json index 0f4c2a6..c182694 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_1e-05.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_1e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 65588, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.838059663772583, "TIME_S_1KI": 0.16524455180479025, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1179.1915578985215, "W": 82.78, "J_1KI": 17.97876986489177, "W_1KI": 1.2621211197170215, "W_D": 66.50475, "J_D": 947.3524977065921, "W_D_1KI": 1.0139774044032446, "J_D_1KI": 0.015459800640410512} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 64311, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.418502807617188, "TIME_S_1KI": 0.16200187849072767, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1158.3267517518998, "W": 82.98, "J_1KI": 18.011331681234932, "W_1KI": 1.2902924849559174, "W_D": 66.6565, "J_D": 930.4652582327127, "W_D_1KI": 1.036471210212872, "J_D_1KI": 0.01611654631731542} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_1e-05.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_1e-05.output index 7b64a62..055aab1 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_1e-05.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_1e-05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '100000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.17682647705078125} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.17745423316955566} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 6, ..., 99999, 99999, +tensor(crow_indices=tensor([ 0, 0, 3, ..., 99998, 100000, 100000]), - col_indices=tensor([ 3198, 22722, 88522, ..., 47695, 53177, 56584]), - values=tensor([0.0931, 0.9110, 0.9063, ..., 0.1473, 0.7899, 0.0419]), + col_indices=tensor([42546, 58983, 86183, ..., 98460, 14991, 73616]), + values=tensor([0.4174, 0.2060, 0.0899, ..., 0.6212, 0.4971, 0.7481]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.4850, 0.3145, 0.7013, ..., 0.1298, 0.2149, 0.6470]) +tensor([0.8074, 0.4851, 0.0283, ..., 0.2070, 0.7576, 0.4733]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 0.17682647705078125 seconds +Time: 0.17745423316955566 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '59380', '-ss', '100000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.506051540374756} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '59170', '-ss', '100000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.660528182983398} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 2, ..., 99999, 99999, +tensor(crow_indices=tensor([ 0, 0, 0, ..., 100000, 100000, 100000]), - col_indices=tensor([45126, 76716, 27115, ..., 82599, 76675, 53817]), - values=tensor([0.5870, 0.5895, 0.9992, ..., 0.5279, 0.4372, 0.6677]), + col_indices=tensor([96712, 9860, 17593, ..., 59712, 70511, 99970]), + values=tensor([0.7958, 0.9740, 0.0109, ..., 0.7243, 0.7214, 0.8821]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.8372, 0.3480, 0.3478, ..., 0.9164, 0.0517, 0.0932]) +tensor([0.4741, 0.0741, 0.4151, ..., 0.2722, 0.2577, 0.9729]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,19 +36,19 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 9.506051540374756 seconds +Time: 9.660528182983398 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '65588', '-ss', '100000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.838059663772583} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '64311', '-ss', '100000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.418502807617188} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 3, ..., 100000, 100000, +tensor(crow_indices=tensor([ 0, 2, 3, ..., 99999, 99999, 100000]), - col_indices=tensor([69179, 69629, 89362, ..., 28216, 37414, 39020]), - values=tensor([0.6325, 0.8110, 0.8083, ..., 0.4927, 0.7217, 0.7562]), + col_indices=tensor([60832, 83948, 658, ..., 83631, 80017, 34658]), + values=tensor([0.5224, 0.7895, 0.2144, ..., 0.4897, 0.2214, 0.9534]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.6752, 0.8314, 0.5534, ..., 0.1964, 0.0025, 0.5959]) +tensor([0.6371, 0.8407, 0.9472, ..., 0.9476, 0.5347, 0.4303]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -56,16 +56,16 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 10.838059663772583 seconds +Time: 10.418502807617188 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 3, ..., 100000, 100000, +tensor(crow_indices=tensor([ 0, 2, 3, ..., 99999, 99999, 100000]), - col_indices=tensor([69179, 69629, 89362, ..., 28216, 37414, 39020]), - values=tensor([0.6325, 0.8110, 0.8083, ..., 0.4927, 0.7217, 0.7562]), + col_indices=tensor([60832, 83948, 658, ..., 83631, 80017, 34658]), + values=tensor([0.5224, 0.7895, 0.2144, ..., 0.4897, 0.2214, 0.9534]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.6752, 0.8314, 0.5534, ..., 0.1964, 0.0025, 0.5959]) +tensor([0.6371, 0.8407, 0.9472, ..., 0.9476, 0.5347, 0.4303]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -73,13 +73,13 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 10.838059663772583 seconds +Time: 10.418502807617188 seconds -[18.44, 17.92, 18.39, 18.06, 17.93, 17.86, 18.18, 18.13, 18.19, 17.89] -[82.78] -14.244884729385376 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 65588, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.838059663772583, 'TIME_S_1KI': 0.16524455180479025, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1179.1915578985215, 'W': 82.78} -[18.44, 17.92, 18.39, 18.06, 17.93, 17.86, 18.18, 18.13, 18.19, 17.89, 18.4, 17.89, 17.79, 17.88, 18.16, 18.32, 18.28, 17.67, 18.23, 18.52] -325.505 -16.27525 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 65588, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.838059663772583, 'TIME_S_1KI': 0.16524455180479025, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1179.1915578985215, 'W': 82.78, 'J_1KI': 17.97876986489177, 'W_1KI': 1.2621211197170215, 'W_D': 66.50475, 'J_D': 947.3524977065921, 'W_D_1KI': 1.0139774044032446, 'J_D_1KI': 0.015459800640410512} +[18.9, 18.01, 18.86, 18.3, 17.96, 18.02, 18.19, 17.91, 18.92, 17.86] +[82.98] +13.959107637405396 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 64311, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.418502807617188, 'TIME_S_1KI': 0.16200187849072767, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1158.3267517518998, 'W': 82.98} +[18.9, 18.01, 18.86, 18.3, 17.96, 18.02, 18.19, 17.91, 18.92, 17.86, 18.32, 17.96, 18.01, 17.83, 18.19, 17.85, 17.88, 18.01, 18.1, 17.86] +326.47 +16.323500000000003 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 64311, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.418502807617188, 'TIME_S_1KI': 0.16200187849072767, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1158.3267517518998, 'W': 82.98, 'J_1KI': 18.011331681234932, 'W_1KI': 1.2902924849559174, 'W_D': 66.6565, 'J_D': 930.4652582327127, 'W_D_1KI': 1.036471210212872, 'J_D_1KI': 0.01611654631731542} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.0001.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.0001.json index ff48609..c677488 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.0001.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.0001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 240931, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.228987216949463, "TIME_S_1KI": 0.04245608583764423, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 972.0923814868927, "W": 74.28, "J_1KI": 4.034733519085932, "W_1KI": 0.30830403725547983, "W_D": 58.167500000000004, "J_D": 761.2302584832908, "W_D_1KI": 0.2414280437137604, "J_D_1KI": 0.0010020630127038877} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 253635, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.510948419570923, "TIME_S_1KI": 0.04144123807664921, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1049.5495847654342, "W": 74.66, "J_1KI": 4.1380313630430905, "W_1KI": 0.29436000551974295, "W_D": 58.32449999999999, "J_D": 819.9096538528203, "W_D_1KI": 0.22995446212076406, "J_D_1KI": 0.0009066353702003433} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.0001.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.0001.output index 36a267f..d7cd9dc 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.0001.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.0001.output @@ -1,13 +1,13 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.062392234802246094} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.057019948959350586} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 3, ..., 9997, 9999, 10000]), - col_indices=tensor([7179, 9532, 8081, ..., 4031, 8581, 2872]), - values=tensor([0.3998, 0.4929, 0.1773, ..., 0.2243, 0.6349, 0.5923]), +tensor(crow_indices=tensor([ 0, 3, 5, ..., 9999, 9999, 10000]), + col_indices=tensor([5511, 5632, 9392, ..., 1424, 5807, 9708]), + values=tensor([0.8862, 0.8794, 0.5579, ..., 0.8535, 0.8536, 0.3017]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.2436, 0.6971, 0.0487, ..., 0.2986, 0.9140, 0.9941]) +tensor([0.8843, 0.1620, 0.1106, ..., 0.3314, 0.8529, 0.5084]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -15,18 +15,18 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 0.062392234802246094 seconds +Time: 0.057019948959350586 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '168290', '-ss', '10000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 7.3342225551605225} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '184146', '-ss', '10000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 7.623284816741943} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 10000, 10000, 10000]), - col_indices=tensor([7117, 7845, 2903, ..., 807, 7859, 5458]), - values=tensor([0.8544, 0.9061, 0.0037, ..., 0.6594, 0.1915, 0.6916]), +tensor(crow_indices=tensor([ 0, 2, 3, ..., 10000, 10000, 10000]), + col_indices=tensor([5228, 7612, 8334, ..., 8947, 2750, 8241]), + values=tensor([0.5331, 0.8440, 0.9594, ..., 0.6439, 0.5967, 0.7449]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.0592, 0.4192, 0.0774, ..., 0.7897, 0.5835, 0.6060]) +tensor([0.9017, 0.2905, 0.1618, ..., 0.3745, 0.4560, 0.4176]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -34,18 +34,18 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 7.3342225551605225 seconds +Time: 7.623284816741943 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '240931', '-ss', '10000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.228987216949463} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '253635', '-ss', '10000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.510948419570923} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 4, ..., 9999, 10000, 10000]), - col_indices=tensor([1962, 399, 6914, ..., 7707, 7379, 8204]), - values=tensor([0.6427, 0.2940, 0.2788, ..., 0.7421, 0.9158, 0.7396]), +tensor(crow_indices=tensor([ 0, 2, 2, ..., 9997, 9999, 10000]), + col_indices=tensor([ 773, 7277, 5799, ..., 6666, 7394, 1954]), + values=tensor([0.1024, 0.0437, 0.8987, ..., 0.7237, 0.2930, 0.3597]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.3387, 0.7040, 0.3501, ..., 0.4098, 0.3396, 0.7875]) +tensor([0.1275, 0.0118, 0.1480, ..., 0.4560, 0.1036, 0.8618]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,15 +53,15 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 10.228987216949463 seconds +Time: 10.510948419570923 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 4, ..., 9999, 10000, 10000]), - col_indices=tensor([1962, 399, 6914, ..., 7707, 7379, 8204]), - values=tensor([0.6427, 0.2940, 0.2788, ..., 0.7421, 0.9158, 0.7396]), +tensor(crow_indices=tensor([ 0, 2, 2, ..., 9997, 9999, 10000]), + col_indices=tensor([ 773, 7277, 5799, ..., 6666, 7394, 1954]), + values=tensor([0.1024, 0.0437, 0.8987, ..., 0.7237, 0.2930, 0.3597]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.3387, 0.7040, 0.3501, ..., 0.4098, 0.3396, 0.7875]) +tensor([0.1275, 0.0118, 0.1480, ..., 0.4560, 0.1036, 0.8618]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -69,13 +69,13 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 10.228987216949463 seconds +Time: 10.510948419570923 seconds -[18.32, 17.9, 17.75, 17.66, 17.81, 17.98, 17.81, 17.63, 17.93, 18.05] -[74.28] -13.086865663528442 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 240931, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.228987216949463, 'TIME_S_1KI': 0.04245608583764423, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 972.0923814868927, 'W': 74.28} -[18.32, 17.9, 17.75, 17.66, 17.81, 17.98, 17.81, 17.63, 17.93, 18.05, 18.35, 17.85, 17.96, 17.83, 18.33, 17.85, 17.85, 17.82, 18.0, 17.86] -322.25 -16.1125 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 240931, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.228987216949463, 'TIME_S_1KI': 0.04245608583764423, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 972.0923814868927, 'W': 74.28, 'J_1KI': 4.034733519085932, 'W_1KI': 0.30830403725547983, 'W_D': 58.167500000000004, 'J_D': 761.2302584832908, 'W_D_1KI': 0.2414280437137604, 'J_D_1KI': 0.0010020630127038877} +[18.3, 17.87, 18.03, 17.87, 18.05, 17.86, 19.1, 17.97, 18.04, 17.74] +[74.66] +14.057722806930542 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 253635, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.510948419570923, 'TIME_S_1KI': 0.04144123807664921, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1049.5495847654342, 'W': 74.66} +[18.3, 17.87, 18.03, 17.87, 18.05, 17.86, 19.1, 17.97, 18.04, 17.74, 18.05, 17.87, 18.1, 17.95, 17.96, 18.0, 19.85, 17.84, 18.3, 18.01] +326.71000000000004 +16.335500000000003 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 253635, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.510948419570923, 'TIME_S_1KI': 0.04144123807664921, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1049.5495847654342, 'W': 74.66, 'J_1KI': 4.1380313630430905, 'W_1KI': 0.29436000551974295, 'W_D': 58.32449999999999, 'J_D': 819.9096538528203, 'W_D_1KI': 0.22995446212076406, 'J_D_1KI': 0.0009066353702003433} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.001.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.001.json index 4c2a534..8f37619 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.001.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 201421, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.7230703830719, "TIME_S_1KI": 0.053237102303493176, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1117.283116903305, "W": 79.97, "J_1KI": 5.547004120242204, "W_1KI": 0.3970291081863361, "W_D": 63.60725, "J_D": 888.6745846898556, "W_D_1KI": 0.31579254397505724, "J_D_1KI": 0.001567823335079546} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 197679, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.670233726501465, "TIME_S_1KI": 0.053977578430189674, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1114.4275546693802, "W": 79.73, "J_1KI": 5.637561676603889, "W_1KI": 0.40333065221900155, "W_D": 63.12950000000001, "J_D": 882.3937578389646, "W_D_1KI": 0.31935359851071693, "J_D_1KI": 0.001615516056387967} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.001.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.001.output index 1cf380c..4a4c171 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.001.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.06886577606201172} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.06928658485412598} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 12, 19, ..., 99984, 99991, +tensor(crow_indices=tensor([ 0, 8, 25, ..., 99976, 99987, 100000]), - col_indices=tensor([1627, 2251, 2667, ..., 7083, 9414, 9995]), - values=tensor([0.7763, 0.8562, 0.0227, ..., 0.7081, 0.0734, 0.4206]), + col_indices=tensor([ 333, 360, 7030, ..., 7825, 8274, 9549]), + values=tensor([0.8393, 0.7372, 0.2908, ..., 0.1152, 0.3448, 0.5520]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.6749, 0.4550, 0.5239, ..., 0.7938, 0.7493, 0.7052]) +tensor([0.6596, 0.1551, 0.2351, ..., 0.2147, 0.9669, 0.0099]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 0.06886577606201172 seconds +Time: 0.06928658485412598 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '152470', '-ss', '10000', '-sd', '0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 7.948191404342651} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '151544', '-ss', '10000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 8.049443006515503} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 14, 22, ..., 99977, 99992, +tensor(crow_indices=tensor([ 0, 10, 20, ..., 99982, 99989, 100000]), - col_indices=tensor([ 579, 1179, 1463, ..., 6326, 6539, 6627]), - values=tensor([0.4661, 0.6191, 0.1376, ..., 0.4152, 0.1640, 0.4813]), + col_indices=tensor([ 534, 848, 1028, ..., 7528, 7587, 7919]), + values=tensor([0.8744, 0.7231, 0.5055, ..., 0.6485, 0.2326, 0.7897]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.0160, 0.8279, 0.2510, ..., 0.4302, 0.2870, 0.5452]) +tensor([0.6730, 0.3279, 0.8164, ..., 0.2443, 0.5036, 0.1429]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,19 +36,19 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 7.948191404342651 seconds +Time: 8.049443006515503 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '201421', '-ss', '10000', '-sd', '0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.7230703830719} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '197679', '-ss', '10000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.670233726501465} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 11, 17, ..., 99977, 99988, +tensor(crow_indices=tensor([ 0, 11, 18, ..., 99979, 99990, 100000]), - col_indices=tensor([ 243, 1001, 2007, ..., 7428, 8081, 8733]), - values=tensor([0.5597, 0.5588, 0.7631, ..., 0.2707, 0.4657, 0.9680]), + col_indices=tensor([ 654, 920, 2120, ..., 5173, 5860, 7868]), + values=tensor([0.9786, 0.8942, 0.8907, ..., 0.0590, 0.7963, 0.5333]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.1756, 0.9887, 0.2623, ..., 0.3846, 0.9664, 0.0716]) +tensor([0.8342, 0.1347, 0.2067, ..., 0.1241, 0.4408, 0.8118]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -56,16 +56,16 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 10.7230703830719 seconds +Time: 10.670233726501465 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 11, 17, ..., 99977, 99988, +tensor(crow_indices=tensor([ 0, 11, 18, ..., 99979, 99990, 100000]), - col_indices=tensor([ 243, 1001, 2007, ..., 7428, 8081, 8733]), - values=tensor([0.5597, 0.5588, 0.7631, ..., 0.2707, 0.4657, 0.9680]), + col_indices=tensor([ 654, 920, 2120, ..., 5173, 5860, 7868]), + values=tensor([0.9786, 0.8942, 0.8907, ..., 0.0590, 0.7963, 0.5333]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.1756, 0.9887, 0.2623, ..., 0.3846, 0.9664, 0.0716]) +tensor([0.8342, 0.1347, 0.2067, ..., 0.1241, 0.4408, 0.8118]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -73,13 +73,13 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 10.7230703830719 seconds +Time: 10.670233726501465 seconds -[20.0, 17.95, 18.01, 18.57, 18.05, 17.91, 18.47, 18.3, 18.35, 18.45] -[79.97] -13.971278190612793 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 201421, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.7230703830719, 'TIME_S_1KI': 0.053237102303493176, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1117.283116903305, 'W': 79.97} -[20.0, 17.95, 18.01, 18.57, 18.05, 17.91, 18.47, 18.3, 18.35, 18.45, 18.1, 18.11, 18.32, 17.8, 18.31, 17.97, 17.94, 17.86, 17.95, 18.22] -327.255 -16.36275 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 201421, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.7230703830719, 'TIME_S_1KI': 0.053237102303493176, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1117.283116903305, 'W': 79.97, 'J_1KI': 5.547004120242204, 'W_1KI': 0.3970291081863361, 'W_D': 63.60725, 'J_D': 888.6745846898556, 'W_D_1KI': 0.31579254397505724, 'J_D_1KI': 0.001567823335079546} +[18.34, 17.98, 18.08, 17.94, 18.13, 18.09, 21.2, 18.15, 17.85, 18.53] +[79.73] +13.977518558502197 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 197679, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.670233726501465, 'TIME_S_1KI': 0.053977578430189674, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1114.4275546693802, 'W': 79.73} +[18.34, 17.98, 18.08, 17.94, 18.13, 18.09, 21.2, 18.15, 17.85, 18.53, 18.16, 18.1, 18.61, 18.87, 18.17, 17.77, 20.46, 17.84, 18.3, 17.91] +332.01 +16.6005 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 197679, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.670233726501465, 'TIME_S_1KI': 0.053977578430189674, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1114.4275546693802, 'W': 79.73, 'J_1KI': 5.637561676603889, 'W_1KI': 0.40333065221900155, 'W_D': 63.12950000000001, 'J_D': 882.3937578389646, 'W_D_1KI': 0.31935359851071693, 'J_D_1KI': 0.001615516056387967} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.01.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.01.json index 7e50353..23a3327 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.01.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.01.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 58758, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.521214962005615, "TIME_S_1KI": 0.1790601273359477, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1248.3679735660553, "W": 87.25, "J_1KI": 21.245923509412425, "W_1KI": 1.484904183260152, "W_D": 70.41275, "J_D": 1007.4615705525875, "W_D_1KI": 1.1983517138091835, "J_D_1KI": 0.020394698829251906} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 58160, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.467525959014893, "TIME_S_1KI": 0.17997809420589567, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1227.377723479271, "W": 87.15, "J_1KI": 21.10346842295858, "W_1KI": 1.4984525447042643, "W_D": 70.98275000000001, "J_D": 999.6861285289527, "W_D_1KI": 1.2204736932599725, "J_D_1KI": 0.020984760888238866} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.01.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.01.output index 560bbb8..35e5d90 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.01.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.01.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '0.01'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.19649839401245117} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.19839882850646973} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 90, 190, ..., 999800, - 999902, 1000000]), - col_indices=tensor([ 52, 87, 188, ..., 9706, 9893, 9952]), - values=tensor([0.1675, 0.8959, 0.7675, ..., 0.1378, 0.1178, 0.3486]), +tensor(crow_indices=tensor([ 0, 97, 186, ..., 999796, + 999897, 1000000]), + col_indices=tensor([ 169, 359, 528, ..., 9765, 9789, 9792]), + values=tensor([0.6521, 0.9085, 0.4727, ..., 0.8814, 0.1698, 0.8627]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.9304, 0.9814, 0.5110, ..., 0.0040, 0.2898, 0.8662]) +tensor([0.7127, 0.9881, 0.6892, ..., 0.7113, 0.3734, 0.9813]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 0.19649839401245117 seconds +Time: 0.19839882850646973 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '53435', '-ss', '10000', '-sd', '0.01'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 9.548681497573853} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '52923', '-ss', '10000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 9.554424524307251} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 94, 197, ..., 999809, - 999893, 1000000]), - col_indices=tensor([ 61, 165, 222, ..., 9905, 9907, 9919]), - values=tensor([0.6376, 0.5545, 0.9458, ..., 0.6333, 0.2848, 0.3343]), +tensor(crow_indices=tensor([ 0, 112, 189, ..., 999798, + 999899, 1000000]), + col_indices=tensor([ 113, 156, 184, ..., 9769, 9838, 9941]), + values=tensor([0.0187, 0.7839, 0.6319, ..., 0.9818, 0.7594, 0.0765]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.2834, 0.7754, 0.6738, ..., 0.4578, 0.3713, 0.7996]) +tensor([0.4252, 0.8416, 0.9146, ..., 0.0970, 0.6595, 0.8304]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,19 +36,19 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 9.548681497573853 seconds +Time: 9.554424524307251 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '58758', '-ss', '10000', '-sd', '0.01'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.521214962005615} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '58160', '-ss', '10000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.467525959014893} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 109, 219, ..., 999802, - 999904, 1000000]), - col_indices=tensor([ 63, 137, 260, ..., 9828, 9873, 9905]), - values=tensor([0.1449, 0.8321, 0.3255, ..., 0.3929, 0.1108, 0.3040]), +tensor(crow_indices=tensor([ 0, 93, 191, ..., 999802, + 999899, 1000000]), + col_indices=tensor([ 46, 78, 103, ..., 9585, 9899, 9954]), + values=tensor([0.1947, 0.9409, 0.0413, ..., 0.0261, 0.0318, 0.5135]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.9048, 0.1055, 0.1608, ..., 0.3713, 0.7919, 0.0232]) +tensor([0.1045, 0.5937, 0.6366, ..., 0.8712, 0.6092, 0.3132]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -56,16 +56,16 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 10.521214962005615 seconds +Time: 10.467525959014893 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 109, 219, ..., 999802, - 999904, 1000000]), - col_indices=tensor([ 63, 137, 260, ..., 9828, 9873, 9905]), - values=tensor([0.1449, 0.8321, 0.3255, ..., 0.3929, 0.1108, 0.3040]), +tensor(crow_indices=tensor([ 0, 93, 191, ..., 999802, + 999899, 1000000]), + col_indices=tensor([ 46, 78, 103, ..., 9585, 9899, 9954]), + values=tensor([0.1947, 0.9409, 0.0413, ..., 0.0261, 0.0318, 0.5135]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.9048, 0.1055, 0.1608, ..., 0.3713, 0.7919, 0.0232]) +tensor([0.1045, 0.5937, 0.6366, ..., 0.8712, 0.6092, 0.3132]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -73,13 +73,13 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 10.521214962005615 seconds +Time: 10.467525959014893 seconds -[21.83, 18.04, 18.3, 17.99, 18.07, 21.44, 19.02, 18.2, 18.32, 17.87] -[87.25] -14.307942390441895 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 58758, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.521214962005615, 'TIME_S_1KI': 0.1790601273359477, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1248.3679735660553, 'W': 87.25} -[21.83, 18.04, 18.3, 17.99, 18.07, 21.44, 19.02, 18.2, 18.32, 17.87, 18.33, 21.51, 17.89, 18.53, 18.27, 17.81, 18.21, 18.47, 18.07, 19.18] -336.745 -16.83725 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 58758, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.521214962005615, 'TIME_S_1KI': 0.1790601273359477, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1248.3679735660553, 'W': 87.25, 'J_1KI': 21.245923509412425, 'W_1KI': 1.484904183260152, 'W_D': 70.41275, 'J_D': 1007.4615705525875, 'W_D_1KI': 1.1983517138091835, 'J_D_1KI': 0.020394698829251906} +[18.42, 18.1, 18.09, 17.94, 17.96, 18.13, 17.89, 17.87, 18.11, 18.12] +[87.15] +14.083508014678955 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 58160, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.467525959014893, 'TIME_S_1KI': 0.17997809420589567, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1227.377723479271, 'W': 87.15} +[18.42, 18.1, 18.09, 17.94, 17.96, 18.13, 17.89, 17.87, 18.11, 18.12, 18.39, 17.75, 17.9, 17.92, 17.94, 17.89, 17.89, 17.89, 17.75, 17.72] +323.34499999999997 +16.16725 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 58160, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.467525959014893, 'TIME_S_1KI': 0.17997809420589567, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1227.377723479271, 'W': 87.15, 'J_1KI': 21.10346842295858, 'W_1KI': 1.4984525447042643, 'W_D': 70.98275000000001, 'J_D': 999.6861285289527, 'W_D_1KI': 1.2204736932599725, 'J_D_1KI': 0.020984760888238866} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.05.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.05.json index c7df7b8..4e517f7 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.05.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 8801, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.496035814285278, "TIME_S_1KI": 1.1925958202801135, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1351.109428257942, "W": 82.21, "J_1KI": 153.5177171069131, "W_1KI": 9.340983979093284, "W_D": 65.94874999999999, "J_D": 1083.858142644763, "W_D_1KI": 7.493324622202022, "J_D_1KI": 0.8514174096354984} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 8810, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.41358470916748, "TIME_S_1KI": 1.18201869570573, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1329.5588334417344, "W": 83.19, "J_1KI": 150.91473705354534, "W_1KI": 9.442678774120317, "W_D": 66.8505, "J_D": 1068.4177520735263, "W_D_1KI": 7.588024971623155, "J_D_1KI": 0.8612968185724353} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.05.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.05.output index 7df5a22..8074157 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.05.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '0.05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 1.1929755210876465} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 1.1917307376861572} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 485, 983, ..., 4998996, - 4999523, 5000000]), - col_indices=tensor([ 11, 47, 113, ..., 9897, 9981, 9996]), - values=tensor([0.8953, 0.8081, 0.2668, ..., 0.4279, 0.4927, 0.2076]), +tensor(crow_indices=tensor([ 0, 493, 986, ..., 4999011, + 4999486, 5000000]), + col_indices=tensor([ 9, 19, 72, ..., 9981, 9987, 9993]), + values=tensor([0.5847, 0.5648, 0.9368, ..., 0.4963, 0.0551, 0.2254]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.3301, 0.9128, 0.0218, ..., 0.3705, 0.4449, 0.9102]) +tensor([0.1357, 0.6996, 0.1280, ..., 0.8014, 0.9186, 0.9128]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 1.1929755210876465 seconds +Time: 1.1917307376861572 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '8801', '-ss', '10000', '-sd', '0.05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.496035814285278} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '8810', '-ss', '10000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.41358470916748} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 509, 1022, ..., 4998954, - 4999475, 5000000]), - col_indices=tensor([ 16, 27, 72, ..., 9970, 9971, 9996]), - values=tensor([0.8982, 0.6195, 0.1567, ..., 0.8636, 0.4059, 0.3830]), +tensor(crow_indices=tensor([ 0, 520, 1021, ..., 4999015, + 4999518, 5000000]), + col_indices=tensor([ 2, 21, 23, ..., 9856, 9947, 9960]), + values=tensor([0.9436, 0.1483, 0.1830, ..., 0.0068, 0.4770, 0.7006]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.3042, 0.6883, 0.8193, ..., 0.9178, 0.9438, 0.4311]) +tensor([0.0991, 0.7135, 0.2277, ..., 0.9430, 0.0011, 0.3680]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,16 +36,16 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 10.496035814285278 seconds +Time: 10.41358470916748 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 509, 1022, ..., 4998954, - 4999475, 5000000]), - col_indices=tensor([ 16, 27, 72, ..., 9970, 9971, 9996]), - values=tensor([0.8982, 0.6195, 0.1567, ..., 0.8636, 0.4059, 0.3830]), +tensor(crow_indices=tensor([ 0, 520, 1021, ..., 4999015, + 4999518, 5000000]), + col_indices=tensor([ 2, 21, 23, ..., 9856, 9947, 9960]), + values=tensor([0.9436, 0.1483, 0.1830, ..., 0.0068, 0.4770, 0.7006]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.3042, 0.6883, 0.8193, ..., 0.9178, 0.9438, 0.4311]) +tensor([0.0991, 0.7135, 0.2277, ..., 0.9430, 0.0011, 0.3680]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,13 +53,13 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 10.496035814285278 seconds +Time: 10.41358470916748 seconds -[18.39, 18.45, 17.88, 18.05, 18.11, 18.03, 17.97, 17.99, 18.04, 18.19] -[82.21] -16.434854984283447 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8801, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.496035814285278, 'TIME_S_1KI': 1.1925958202801135, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1351.109428257942, 'W': 82.21} -[18.39, 18.45, 17.88, 18.05, 18.11, 18.03, 17.97, 17.99, 18.04, 18.19, 18.32, 17.88, 17.95, 17.82, 18.37, 18.15, 18.04, 17.93, 18.22, 17.79] -325.225 -16.26125 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8801, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.496035814285278, 'TIME_S_1KI': 1.1925958202801135, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1351.109428257942, 'W': 82.21, 'J_1KI': 153.5177171069131, 'W_1KI': 9.340983979093284, 'W_D': 65.94874999999999, 'J_D': 1083.858142644763, 'W_D_1KI': 7.493324622202022, 'J_D_1KI': 0.8514174096354984} +[18.42, 18.1, 18.34, 18.2, 17.91, 17.99, 18.73, 17.98, 18.05, 17.95] +[83.19] +15.982195377349854 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8810, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.41358470916748, 'TIME_S_1KI': 1.18201869570573, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1329.5588334417344, 'W': 83.19} +[18.42, 18.1, 18.34, 18.2, 17.91, 17.99, 18.73, 17.98, 18.05, 17.95, 18.53, 18.25, 18.01, 18.36, 18.13, 18.03, 18.01, 18.04, 18.22, 17.98] +326.79 +16.3395 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8810, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.41358470916748, 'TIME_S_1KI': 1.18201869570573, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1329.5588334417344, 'W': 83.19, 'J_1KI': 150.91473705354534, 'W_1KI': 9.442678774120317, 'W_D': 66.8505, 'J_D': 1068.4177520735263, 'W_D_1KI': 7.588024971623155, 'J_D_1KI': 0.8612968185724353} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.1.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.1.json new file mode 100644 index 0000000..3f0bb83 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 2918, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.42000699043274, "TIME_S_1KI": 3.5709413949392523, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1397.7840516352655, "W": 79.43, "J_1KI": 479.0212651251767, "W_1KI": 27.220699108978753, "W_D": 63.01475000000001, "J_D": 1108.913666974485, "W_D_1KI": 21.595185058259084, "J_D_1KI": 7.400680280417781} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.1.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.1.output new file mode 100644 index 0000000..099bdad --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.1.output @@ -0,0 +1,65 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 3.597771644592285} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 977, 1956, ..., 9997922, + 9998976, 10000000]), + col_indices=tensor([ 2, 3, 9, ..., 9970, 9977, 9979]), + values=tensor([0.1332, 0.2138, 0.7669, ..., 0.0474, 0.1604, 0.1097]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.2601, 0.5133, 0.4344, ..., 0.1772, 0.3859, 0.7315]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 3.597771644592285 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2918', '-ss', '10000', '-sd', '0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.42000699043274} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1029, 2018, ..., 9998096, + 9999045, 10000000]), + col_indices=tensor([ 7, 14, 18, ..., 9941, 9949, 9980]), + values=tensor([0.9805, 0.4931, 0.0315, ..., 0.9071, 0.5605, 0.7269]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.0123, 0.2996, 0.0215, ..., 0.5909, 0.6219, 0.0073]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 10.42000699043274 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1029, 2018, ..., 9998096, + 9999045, 10000000]), + col_indices=tensor([ 7, 14, 18, ..., 9941, 9949, 9980]), + values=tensor([0.9805, 0.4931, 0.0315, ..., 0.9071, 0.5605, 0.7269]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.0123, 0.2996, 0.0215, ..., 0.5909, 0.6219, 0.0073]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 10.42000699043274 seconds + +[21.55, 18.44, 17.92, 18.49, 18.01, 17.89, 18.03, 17.94, 18.06, 18.03] +[79.43] +17.597684144973755 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2918, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.42000699043274, 'TIME_S_1KI': 3.5709413949392523, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1397.7840516352655, 'W': 79.43} +[21.55, 18.44, 17.92, 18.49, 18.01, 17.89, 18.03, 17.94, 18.06, 18.03, 19.32, 18.84, 17.98, 18.09, 18.0, 17.91, 18.09, 18.07, 18.13, 17.93] +328.30499999999995 +16.415249999999997 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2918, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.42000699043274, 'TIME_S_1KI': 3.5709413949392523, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1397.7840516352655, 'W': 79.43, 'J_1KI': 479.0212651251767, 'W_1KI': 27.220699108978753, 'W_D': 63.01475000000001, 'J_D': 1108.913666974485, 'W_D_1KI': 21.595185058259084, 'J_D_1KI': 7.400680280417781} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_1e-05.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_1e-05.json index e2fe041..114ef88 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_1e-05.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_1e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 282031, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.372447967529297, "TIME_S_1KI": 0.036777687444037345, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1003.1455927085877, "W": 73.48, "J_1KI": 3.5568628721969846, "W_1KI": 0.26053873510358794, "W_D": 57.203, "J_D": 780.9327346177101, "W_D_1KI": 0.20282522134091643, "J_D_1KI": 0.0007191593170286829} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 286411, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.634347915649414, "TIME_S_1KI": 0.03712967698743908, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1001.8400530457496, "W": 73.11, "J_1KI": 3.4979105308306933, "W_1KI": 0.25526254229062434, "W_D": 56.78875, "J_D": 778.186900730431, "W_D_1KI": 0.19827712622769378, "J_D_1KI": 0.0006922818125969107} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_1e-05.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_1e-05.output index e246539..b127831 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_1e-05.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_1e-05.output @@ -1,373 +1,373 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '10000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.05941200256347656} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.05389976501464844} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([9369, 4292, 7681, 931, 2083, 7410, 1936, 8751, 948, - 4175, 1888, 1345, 5560, 8024, 9671, 1813, 200, 3639, - 4209, 3914, 3664, 7493, 1335, 4478, 2357, 7163, 3091, - 1065, 1561, 1003, 6097, 7558, 3015, 3713, 2757, 4724, - 4711, 6411, 2158, 193, 8251, 155, 227, 7018, 2516, - 6344, 9278, 6665, 5922, 3495, 9818, 2312, 5020, 771, - 4972, 1230, 8287, 7235, 5784, 9154, 6363, 9057, 5066, - 5544, 6958, 630, 2095, 1478, 1039, 1263, 2930, 2777, - 7763, 5296, 5286, 2070, 731, 3847, 9033, 1007, 9514, - 5297, 1206, 2620, 5020, 6860, 5221, 4179, 9153, 5412, - 6136, 2995, 6232, 9878, 9074, 9378, 3413, 1935, 7692, - 8374, 9520, 993, 5604, 4102, 1183, 6775, 1244, 8245, - 5932, 1440, 3804, 7398, 4378, 8195, 8257, 8791, 1040, - 7963, 4734, 2450, 6959, 5246, 9222, 1, 7047, 8234, - 866, 6402, 4633, 7505, 2769, 4851, 6107, 7344, 7189, - 7328, 3897, 965, 5441, 5357, 5066, 1754, 6254, 5498, - 7113, 89, 760, 8137, 2865, 5201, 4600, 5721, 1748, - 4799, 198, 561, 973, 1342, 1462, 7299, 4893, 7284, - 4504, 5041, 5263, 8101, 2983, 3117, 9593, 1776, 912, - 1800, 6114, 293, 8830, 5057, 9808, 6301, 342, 7503, - 1661, 4113, 4237, 5473, 6194, 2962, 5280, 3993, 7639, - 2123, 1005, 432, 3307, 4099, 1416, 6710, 7849, 719, - 3636, 4849, 2857, 6681, 3061, 4205, 4356, 4430, 9178, - 19, 6497, 5369, 4515, 7344, 3800, 3469, 5969, 5902, - 4203, 2153, 6321, 2664, 9623, 5060, 3065, 9561, 4385, - 5272, 8081, 7587, 6655, 6103, 4233, 3378, 5753, 279, - 844, 9757, 8250, 7589, 5060, 8284, 4520, 7172, 5654, - 2330, 281, 5681, 5734, 533, 8626, 9457, 995, 6177, - 714, 9777, 8353, 7141, 3996, 8060, 8903, 2572, 492, - 4226, 1366, 1687, 4444, 1185, 6850, 4034, 8390, 9196, - 7811, 2375, 5756, 7147, 4177, 6207, 2461, 9116, 8302, - 856, 6642, 9846, 8355, 7524, 4741, 4031, 9989, 4115, - 7857, 8052, 8671, 2448, 7790, 8481, 5526, 255, 123, - 7179, 6917, 7059, 4981, 8522, 3823, 8374, 9363, 2579, - 719, 3170, 2065, 2123, 6483, 2368, 6179, 5946, 2044, - 9039, 7593, 9755, 9345, 255, 4168, 1474, 6322, 8273, - 3870, 5214, 4849, 6546, 2060, 2438, 5644, 9600, 3633, - 4659, 188, 9526, 1427, 319, 8390, 3521, 3476, 479, - 3575, 61, 4594, 635, 5082, 6907, 8592, 3784, 2982, - 3662, 4786, 1972, 762, 8064, 6674, 530, 3912, 5854, - 8548, 1089, 3237, 6468, 8726, 436, 4844, 6699, 9575, - 1656, 9217, 3900, 4247, 8086, 5534, 9169, 1634, 9645, - 4528, 7198, 4697, 9088, 8781, 5626, 1215, 129, 5598, - 1585, 449, 903, 5853, 9516, 8438, 7765, 8525, 4032, - 6984, 216, 180, 9590, 285, 9447, 2370, 152, 3856, - 4746, 5575, 8833, 7314, 8256, 1547, 7346, 6, 9884, - 3393, 5101, 4193, 7777, 6780, 8505, 196, 6530, 5802, - 6109, 1711, 411, 670, 8958, 6114, 6213, 2109, 1552, - 3886, 9976, 3912, 7466, 2360, 2339, 8975, 2160, 6808, - 983, 5935, 5386, 4300, 5934, 1130, 4710, 1592, 1841, - 3608, 3413, 1472, 2712, 5373, 1880, 6288, 5094, 1269, - 3993, 6098, 9131, 5653, 5682, 6923, 3163, 1227, 2575, - 2745, 3093, 6715, 9823, 2192, 3993, 8479, 9787, 7579, - 2022, 1182, 5457, 5227, 5758, 1, 1790, 6759, 3391, - 1588, 544, 9366, 4383, 2855, 4468, 2468, 4088, 7238, - 5779, 4678, 2400, 5780, 7097, 6321, 9466, 4336, 6810, - 6425, 1843, 4019, 6686, 7926, 274, 184, 1068, 61, - 7363, 7575, 4978, 1807, 6937, 8902, 1870, 416, 2221, - 4968, 4159, 1877, 510, 7447, 7018, 7917, 2673, 4103, - 7464, 6374, 4844, 1357, 6680, 9952, 9107, 8977, 8070, - 7535, 7617, 3548, 3942, 3624, 2103, 1342, 815, 8145, - 42, 1784, 8288, 3031, 7397, 4244, 5651, 2259, 1297, - 9854, 9916, 3346, 9566, 8995, 6587, 9452, 7647, 4825, - 4985, 8252, 2532, 4007, 8716, 6717, 6733, 3323, 9856, - 3231, 4061, 5785, 9390, 6595, 9382, 9945, 75, 1750, - 9402, 428, 5377, 1787, 9936, 561, 7031, 9872, 8282, - 7760, 7533, 2136, 3108, 4844, 1988, 2923, 495, 4478, - 682, 8508, 1432, 4696, 1879, 3229, 8845, 1212, 4612, - 2250, 4232, 3044, 6043, 3817, 6746, 5915, 5264, 9867, - 141, 7835, 5729, 4184, 315, 8998, 2050, 3600, 4117, - 5862, 2996, 2178, 4555, 884, 5987, 4334, 9847, 1952, - 4836, 3829, 984, 7244, 2084, 835, 4528, 7792, 9008, - 5334, 7853, 8949, 2146, 9193, 6477, 3689, 2049, 2288, - 5464, 4792, 7546, 6699, 9477, 2890, 7386, 9757, 1510, - 4437, 9048, 3113, 3313, 8431, 3646, 2338, 9855, 9449, - 3900, 4813, 4177, 5508, 6064, 5008, 6187, 6648, 242, - 7481, 9103, 7824, 2626, 7786, 2902, 8626, 641, 5887, - 616, 2318, 7063, 1731, 4264, 2596, 6108, 7367, 8429, - 7261, 4316, 7571, 7312, 5783, 1021, 3006, 6723, 396, - 4261, 8062, 2569, 7403, 7238, 9524, 5428, 3860, 8888, - 9041, 2258, 3911, 1000, 2523, 6306, 3819, 107, 794, - 8182, 9699, 687, 3249, 6973, 644, 8956, 9151, 1275, - 7335, 8, 278, 7259, 7854, 1802, 6079, 3187, 5102, - 1589, 455, 790, 2994, 8659, 2130, 7127, 3242, 8700, - 1793, 5489, 5845, 8896, 3608, 2206, 8070, 6754, 7664, - 8760, 6948, 6834, 5734, 6896, 2430, 9242, 3118, 2381, - 3495, 4550, 8869, 2771, 7295, 4456, 7206, 3674, 1437, - 7347, 6143, 6252, 5892, 3134, 2631, 655, 4085, 6250, - 4349, 3772, 3760, 6873, 7209, 4226, 8586, 7240, 8091, - 4508, 2034, 948, 8704, 2561, 1268, 8514, 2330, 1681, - 1205, 4009, 1235, 1184, 1415, 9349, 8198, 3685, 5912, - 4711, 8526, 717, 1752, 9308, 9720, 1876, 498, 8065, - 5045, 3908, 634, 8213, 6037, 7420, 6893, 1058, 8086, - 1739, 7540, 1922, 220, 5058, 956, 1526, 9056, 9892, - 8318, 9472, 1910, 5495, 7506, 2778, 8197, 1359, 7540, - 5891, 6992, 9821, 6909, 9480, 4367, 8049, 1416, 265, - 1247, 113, 1688, 975, 3650, 6084, 2184, 6816, 1016, - 4758, 2096, 7591, 2095, 6481, 7845, 3341, 6910, 5458, - 2075, 7414, 7039, 5914, 3393, 633, 9968, 1852, 6911, - 7941, 9946, 5378, 6983, 7821, 7193, 910, 7269, 118, - 7064, 7561, 4796, 7921, 6326, 9540, 9984, 6195, 6109, - 9277, 5989, 1450, 9210, 2130, 5194, 7550, 2719, 1801, - 8480, 7519, 5324, 5739, 6289, 809, 9366, 4086, 2659, - 9058, 5262, 5251, 4342, 66, 7702, 827, 9185, 8548, - 1287, 9991, 7545, 1320, 9173, 9802, 6829, 481, 9759, - 7677, 4822, 9474, 9179, 6647, 1253, 4968, 3118, 1817, - 9521, 5504, 4448, 1197, 3040, 9515, 3309, 6082, 4129, - 3262, 9477, 7455, 9950, 2475, 5368, 190, 6063, 8037, - 1095, 4438, 5638, 7382, 4544, 6912, 6243, 3964, 6238, - 2247, 6716, 399, 9908, 7557, 5599, 8493, 9594, 2531, - 8846, 1544, 6229, 1594, 1978, 9863, 4417, 9023, 2082, - 3833]), - values=tensor([5.3494e-01, 7.0226e-01, 2.1671e-01, 7.9716e-01, - 1.5294e-01, 9.1431e-01, 3.5575e-01, 6.1020e-01, - 4.6334e-01, 2.3542e-01, 2.1404e-01, 5.0982e-01, - 8.3132e-01, 3.2115e-01, 7.1669e-01, 7.2224e-01, - 8.5851e-01, 9.9803e-01, 6.5428e-01, 1.3530e-01, - 3.4815e-01, 1.7625e-01, 9.6787e-01, 7.8503e-01, - 7.3634e-01, 6.9234e-02, 8.8729e-01, 8.3630e-01, - 2.7559e-01, 9.3453e-01, 4.5741e-01, 5.3532e-01, - 2.7304e-01, 7.2083e-01, 5.5287e-01, 6.9040e-02, - 1.9572e-01, 6.8429e-01, 8.4458e-01, 2.7459e-02, - 4.7087e-01, 9.1962e-01, 6.5292e-01, 4.5367e-02, - 9.6999e-01, 5.0419e-01, 3.9615e-01, 9.2229e-01, - 2.3951e-01, 2.8038e-01, 5.6660e-01, 5.0301e-01, - 5.0398e-01, 2.5761e-02, 6.3277e-01, 4.9414e-01, - 4.0270e-01, 7.3018e-01, 3.1073e-01, 1.4216e-01, - 4.3242e-01, 8.9035e-01, 4.2689e-01, 1.8602e-01, - 5.6458e-01, 6.9089e-01, 3.0015e-01, 7.4904e-01, - 7.6259e-01, 7.6037e-01, 6.3282e-01, 7.9171e-01, - 7.9776e-01, 6.7919e-02, 6.6168e-01, 5.2451e-01, - 4.2208e-01, 7.2727e-01, 3.5238e-01, 2.9475e-01, - 1.4905e-01, 3.9320e-01, 4.7593e-01, 3.0412e-01, - 5.5938e-01, 8.1894e-01, 4.2428e-01, 3.2530e-02, - 6.3128e-01, 4.9935e-01, 5.6138e-01, 5.5402e-01, - 5.6829e-01, 3.9829e-01, 2.3937e-01, 9.5803e-01, - 6.7789e-01, 1.1007e-01, 1.3123e-01, 6.9715e-01, - 9.4735e-01, 5.7383e-01, 6.0802e-01, 5.8651e-01, - 3.5996e-01, 6.8343e-01, 7.6429e-01, 6.1667e-01, - 8.5050e-01, 3.4478e-01, 7.9391e-01, 8.7568e-01, - 3.9328e-01, 6.9384e-02, 4.4575e-01, 6.4553e-01, - 6.9449e-01, 1.3080e-01, 5.8886e-01, 7.4885e-01, - 9.8388e-01, 3.8011e-01, 8.9519e-01, 1.7299e-01, - 3.3820e-01, 1.2534e-01, 3.2668e-01, 9.2283e-03, - 1.2917e-01, 8.8043e-01, 4.2154e-01, 4.8916e-01, - 4.3408e-01, 5.5831e-01, 1.3679e-01, 6.6307e-01, - 7.8115e-01, 9.4496e-01, 1.1070e-01, 7.6608e-01, - 6.1543e-01, 4.3543e-01, 9.7861e-01, 2.5149e-01, - 7.4679e-01, 1.9167e-01, 3.6069e-01, 7.1365e-02, - 6.4093e-01, 6.3832e-02, 3.1763e-01, 7.9068e-01, - 5.7565e-01, 3.4190e-01, 4.6958e-01, 8.0388e-01, - 8.3006e-01, 7.2872e-01, 8.3361e-01, 9.8341e-01, - 4.6743e-01, 5.0359e-01, 1.9547e-01, 1.9260e-01, - 4.0587e-01, 6.7866e-01, 5.4252e-01, 5.6365e-02, - 6.9855e-01, 1.7592e-03, 3.4492e-01, 9.2538e-03, - 8.1518e-01, 9.4687e-01, 1.4218e-01, 1.9527e-01, - 7.5539e-02, 4.4876e-01, 4.2342e-01, 5.0992e-01, - 9.9602e-01, 5.3648e-01, 7.5969e-01, 9.3006e-01, - 8.3706e-01, 9.0337e-01, 9.2300e-01, 7.0675e-01, - 4.2837e-01, 2.3719e-04, 6.5248e-01, 4.7451e-01, - 2.4181e-01, 4.2638e-01, 9.2778e-01, 8.1603e-01, - 6.8197e-01, 1.8440e-01, 3.6134e-01, 6.1226e-01, - 1.0357e-01, 6.9728e-01, 6.5411e-01, 2.3559e-01, - 8.7314e-01, 6.9377e-01, 9.9581e-01, 5.7317e-01, - 3.0578e-01, 5.5796e-01, 9.6768e-01, 8.4203e-01, - 5.1244e-01, 4.2505e-01, 9.4739e-02, 8.0927e-01, - 3.4125e-01, 9.8507e-01, 9.1922e-01, 9.9708e-01, - 2.0460e-01, 4.0908e-01, 4.5041e-01, 3.1945e-01, - 6.3554e-01, 8.8918e-01, 3.6344e-01, 4.7777e-01, - 4.3635e-01, 3.4375e-01, 8.0383e-01, 5.4128e-01, - 2.4282e-01, 4.9469e-01, 1.0903e-02, 6.4223e-01, - 9.2686e-01, 3.4284e-02, 6.1503e-01, 1.2871e-01, - 6.9691e-01, 9.5051e-02, 2.6109e-01, 2.7993e-01, - 1.4483e-01, 9.5486e-01, 5.9376e-01, 6.6076e-01, - 6.6898e-01, 9.1987e-01, 8.9167e-01, 5.8476e-01, - 1.6670e-01, 1.8504e-01, 6.4368e-01, 7.7421e-01, - 6.7459e-02, 6.4579e-01, 8.9247e-02, 3.1335e-01, - 4.3829e-01, 3.4589e-01, 8.0592e-01, 8.6378e-01, - 8.9032e-01, 4.7166e-01, 5.3857e-01, 3.6422e-01, - 5.2627e-01, 8.2517e-01, 3.0069e-01, 3.0466e-02, - 6.9778e-01, 6.1976e-01, 3.0304e-01, 6.2317e-01, - 7.5123e-01, 3.9843e-01, 9.2485e-01, 9.1498e-01, - 6.7657e-01, 5.0198e-01, 1.6260e-01, 5.2710e-01, - 7.0488e-01, 4.3809e-02, 4.9102e-02, 1.1326e-01, - 7.2666e-01, 9.8875e-01, 6.3819e-01, 7.3223e-01, - 5.9101e-01, 9.5853e-01, 9.0991e-01, 4.3939e-01, - 5.5491e-01, 4.2698e-01, 1.1272e-01, 1.5961e-01, - 5.3397e-01, 1.4795e-01, 1.8302e-01, 8.0099e-01, - 3.5810e-01, 6.3189e-01, 5.4625e-01, 1.4642e-01, - 2.4942e-01, 2.8846e-02, 8.1257e-01, 8.2196e-01, - 5.1545e-01, 2.4526e-01, 5.6993e-01, 8.8549e-01, - 8.8233e-01, 4.5438e-01, 8.5072e-01, 1.1630e-01, - 9.1439e-02, 2.5783e-01, 7.9677e-02, 6.3369e-02, - 9.5527e-01, 6.4158e-01, 8.9300e-01, 2.3017e-01, - 8.8755e-01, 5.2765e-01, 5.7778e-01, 9.8691e-01, - 9.5033e-01, 5.4723e-01, 5.2403e-01, 8.6838e-02, - 3.3533e-01, 7.5633e-01, 1.8832e-01, 4.9157e-01, - 5.9068e-01, 9.0864e-01, 1.5880e-01, 1.2158e-01, - 8.1804e-01, 9.9871e-01, 7.7085e-01, 2.5916e-01, - 5.3921e-01, 2.7717e-01, 8.1364e-01, 6.0833e-01, - 6.5527e-01, 3.0418e-01, 1.9193e-01, 2.8446e-01, - 9.8016e-01, 1.6921e-01, 8.1176e-01, 3.8852e-01, - 2.2540e-02, 4.2125e-01, 5.2474e-01, 9.7758e-01, - 3.2835e-01, 8.6360e-01, 5.9029e-01, 6.0406e-01, - 1.1566e-01, 3.1892e-01, 3.8155e-01, 2.7657e-01, - 2.6755e-02, 1.5956e-01, 8.0951e-02, 6.1243e-01, - 1.5359e-01, 8.5427e-02, 2.9165e-01, 5.5011e-01, - 8.2504e-01, 8.6517e-01, 4.6796e-01, 1.5989e-01, - 3.2647e-01, 5.4660e-02, 9.8974e-01, 8.4641e-01, - 7.9581e-01, 5.8920e-01, 7.1725e-01, 1.8775e-01, - 3.6992e-01, 5.7226e-01, 8.4525e-01, 6.5221e-01, - 8.3817e-01, 6.9235e-01, 1.8657e-01, 8.7594e-01, - 1.1613e-02, 9.3848e-01, 1.5094e-02, 3.7415e-01, - 7.0746e-01, 7.7351e-01, 6.5104e-01, 7.8463e-01, - 8.5070e-01, 4.6198e-01, 8.9265e-01, 6.7845e-01, - 9.6676e-01, 2.7995e-01, 9.1269e-01, 2.6040e-01, - 5.4642e-04, 4.9587e-01, 7.9907e-01, 3.4662e-01, - 1.2440e-01, 3.0538e-01, 3.8175e-01, 1.5146e-01, - 2.4863e-01, 3.9598e-01, 5.1940e-01, 2.4951e-02, - 6.8522e-01, 6.4197e-01, 3.8664e-01, 2.5372e-01, - 3.0972e-01, 4.5998e-01, 5.9974e-01, 4.2295e-01, - 7.6272e-01, 7.0890e-02, 5.0215e-01, 3.6695e-01, - 1.1325e-01, 5.4199e-02, 2.0994e-01, 1.5755e-01, - 9.2191e-02, 7.5356e-02, 4.0738e-01, 1.1548e-01, - 8.8495e-01, 4.6492e-01, 9.2065e-01, 8.5062e-01, - 1.8884e-01, 9.1183e-01, 6.9594e-01, 3.0290e-01, - 7.5701e-01, 1.0833e-01, 8.9416e-02, 5.4321e-01, - 6.5455e-01, 1.9349e-01, 7.3463e-01, 5.3061e-01, - 1.2923e-01, 6.7922e-01, 4.3448e-01, 5.8524e-02, - 1.0936e-01, 6.1550e-01, 3.1482e-01, 2.0198e-01, - 7.7773e-01, 4.0564e-01, 8.1843e-01, 4.1686e-01, - 7.5219e-02, 6.8049e-01, 7.8257e-01, 1.2532e-01, - 5.4710e-01, 5.9733e-01, 6.1573e-01, 2.0190e-01, - 3.1840e-01, 1.9388e-01, 4.4946e-01, 7.3374e-01, - 1.3914e-01, 6.2631e-01, 8.4141e-01, 7.2916e-01, - 2.3452e-01, 9.8508e-01, 1.7695e-01, 7.7904e-01, - 6.8397e-01, 5.7632e-01, 9.5771e-02, 7.2578e-01, - 1.0257e-01, 1.3969e-01, 1.3670e-01, 3.3417e-01, - 1.6122e-01, 4.1108e-01, 8.5451e-01, 7.2020e-01, - 9.7044e-01, 7.6796e-01, 5.0373e-01, 4.6714e-01, - 1.3568e-01, 4.3124e-01, 6.5967e-01, 1.1454e-01, - 5.9466e-01, 1.2158e-01, 8.1122e-01, 2.1659e-02, - 1.4806e-01, 3.9405e-01, 7.0561e-01, 9.6753e-01, - 7.5261e-01, 6.4833e-01, 5.6556e-01, 7.2806e-01, - 3.0308e-01, 5.2333e-01, 5.3243e-01, 8.0866e-02, - 4.9897e-01, 6.7445e-01, 5.3939e-01, 4.4553e-02, - 1.1984e-01, 8.3972e-01, 1.2906e-01, 6.4756e-01, - 2.2604e-01, 8.8400e-02, 8.5450e-01, 5.2807e-01, - 6.7526e-01, 4.7973e-01, 9.5607e-01, 3.6041e-01, - 6.2899e-01, 3.9580e-02, 2.5659e-01, 6.6980e-01, - 8.9686e-01, 3.2498e-01, 1.2380e-01, 4.5050e-01, - 1.1124e-01, 2.3719e-01, 4.8882e-01, 8.4740e-01, - 4.6802e-01, 5.6961e-01, 4.1714e-01, 1.9139e-01, - 1.4977e-01, 1.6533e-01, 6.1512e-01, 6.1583e-01, - 8.6816e-01, 1.7922e-01, 8.0301e-02, 5.2096e-01, - 6.2359e-01, 5.2949e-01, 2.8555e-01, 1.6938e-01, - 3.1758e-01, 6.8656e-01, 8.7031e-01, 5.6731e-01, - 8.9351e-01, 3.3680e-01, 2.1221e-01, 9.5091e-01, - 7.3853e-01, 4.3306e-01, 7.0611e-01, 4.3816e-01, - 4.7303e-01, 6.8694e-01, 3.2986e-02, 1.6516e-01, - 9.7964e-01, 1.0363e-01, 9.1905e-03, 6.0994e-01, - 3.8461e-01, 8.1119e-01, 6.0960e-02, 3.9688e-01, - 8.4362e-01, 9.5264e-01, 7.5651e-01, 6.1777e-01, - 6.4546e-01, 4.6095e-01, 7.9749e-01, 6.6184e-01, - 6.7159e-01, 4.1572e-01, 8.1975e-01, 8.5704e-01, - 9.5283e-01, 9.4574e-01, 7.0789e-01, 7.3982e-01, - 4.1348e-02, 6.7719e-01, 6.5182e-01, 8.1683e-01, - 1.3839e-01, 2.0278e-01, 6.0828e-01, 1.0797e-01, - 6.0124e-01, 6.0301e-01, 3.9954e-01, 2.0480e-01, - 7.5413e-01, 6.3142e-01, 7.1027e-01, 7.2452e-01, - 1.8486e-01, 6.6465e-01, 8.3245e-01, 8.8389e-01, - 6.5977e-01, 6.9656e-02, 1.6660e-01, 5.9679e-01, - 8.9196e-01, 6.8070e-01, 6.3959e-01, 5.8042e-03, - 8.0853e-01, 8.5659e-01, 3.3640e-01, 1.8581e-01, - 5.9169e-01, 4.2319e-01, 3.8067e-01, 1.9300e-01, - 5.3054e-01, 6.8816e-01, 6.8273e-01, 5.3962e-01, - 2.8424e-01, 5.6442e-01, 9.5795e-01, 9.2218e-01, - 8.0050e-01, 6.8473e-01, 3.8577e-01, 9.7011e-01, - 3.1918e-01, 9.3327e-01, 6.9021e-01, 1.8843e-01, - 8.5606e-01, 9.9524e-01, 4.8329e-01, 8.9000e-01, - 5.3213e-01, 7.3315e-01, 2.5494e-02, 4.0879e-01, - 6.4250e-01, 5.5355e-01, 1.2252e-01, 5.7920e-01, - 3.7143e-01, 5.1322e-01, 8.4723e-02, 7.9295e-01, - 1.2383e-02, 5.1956e-01, 5.8727e-01, 6.1912e-01, - 3.3986e-01, 7.6974e-01, 7.6659e-01, 9.5451e-01, - 8.3764e-01, 2.5156e-01, 4.3059e-01, 7.0635e-01, - 6.0360e-01, 1.4390e-01, 6.5535e-01, 5.7107e-01, - 1.2208e-01, 4.7518e-02, 8.6895e-01, 8.3106e-01, - 4.0396e-01, 9.2759e-01, 2.8476e-02, 7.1715e-01, - 8.2140e-01, 3.0353e-01, 8.1749e-01, 7.6563e-01, - 1.6998e-01, 1.4423e-01, 6.3960e-01, 2.2465e-01, - 8.9137e-01, 2.8630e-01, 1.9775e-01, 4.5517e-01, - 3.4702e-01, 2.0214e-01, 7.5386e-01, 9.1500e-01, - 2.2446e-02, 7.8173e-01, 8.6836e-01, 2.5348e-01, - 7.6978e-01, 8.6174e-01, 9.8956e-01, 8.7990e-01, - 5.1627e-01, 5.1075e-01, 6.1329e-01, 6.2919e-03, - 2.7232e-01, 6.0787e-01, 5.8860e-01, 2.6776e-01, - 3.7047e-02, 5.2311e-01, 2.6293e-01, 4.2249e-01, - 1.4035e-01, 3.5946e-01, 5.0611e-01, 1.0812e-01, - 9.6929e-01, 4.8763e-01, 3.4152e-01, 9.2894e-01, - 3.5740e-01, 8.8255e-01, 3.9777e-01, 3.5602e-01, - 9.7293e-01, 1.3972e-01, 1.7139e-01, 5.8461e-01, - 5.7873e-01, 4.4359e-01, 1.2193e-01, 2.4860e-01, - 4.1304e-01, 1.6021e-01, 3.4864e-02, 9.4921e-01, - 5.8965e-01, 1.5943e-01, 9.8201e-01, 4.9046e-02, - 7.6024e-01, 3.3380e-01, 8.6701e-01, 5.3753e-01, - 1.7502e-01, 4.7328e-02, 1.7840e-01, 9.9289e-01, - 5.9354e-01, 7.7719e-02, 1.5382e-02, 3.4153e-01, - 4.6868e-01, 8.7385e-01, 8.1347e-01, 8.7648e-01, - 5.7851e-02, 4.3920e-01, 4.7258e-01, 2.3403e-01, - 7.3513e-01, 2.3548e-01, 1.5553e-01, 8.3209e-01, - 7.0165e-01, 4.7744e-01, 5.3971e-01, 3.3874e-01, - 1.8556e-01, 2.2626e-01, 4.6003e-01, 3.3582e-01, - 7.5588e-01, 8.3431e-01, 3.3175e-01, 4.9592e-01, - 4.0955e-01, 1.3751e-01, 7.0894e-01, 4.9153e-01, - 1.5322e-01, 4.0113e-01, 9.4939e-01, 5.9441e-01, - 5.8121e-01, 3.3659e-01, 9.7817e-01, 3.4669e-01, - 7.9566e-01, 7.1024e-01, 5.5031e-01, 3.4954e-01, - 3.4465e-01, 7.7224e-02, 9.9026e-01, 6.8618e-01, - 1.3450e-01, 7.8318e-01, 3.1590e-01, 2.8146e-03, - 9.7664e-01, 1.6926e-01, 1.4825e-03, 3.3327e-01, - 5.0830e-01, 2.9658e-01, 9.5750e-01, 3.8518e-01, - 8.9685e-01, 5.4191e-01, 2.8746e-01, 9.6913e-01, - 8.5148e-01, 3.4877e-01, 4.4378e-01, 1.8566e-01, - 8.7999e-02, 4.8281e-02, 9.6971e-01, 2.4078e-01, - 6.9354e-01, 5.9718e-01, 9.7996e-01, 8.8058e-01, - 2.0210e-01, 6.4906e-01, 8.9747e-01, 2.3099e-01, - 5.0524e-01, 5.5068e-01, 9.4371e-01, 3.0247e-01, - 4.0591e-01, 8.1501e-01, 2.7012e-01, 1.7310e-02, - 3.4884e-01, 2.3646e-01, 4.8107e-01, 4.2507e-01, - 7.9939e-01, 3.1643e-01, 7.7881e-01, 8.6766e-01, - 8.6404e-01, 9.0609e-01, 3.6385e-01, 5.1686e-01, - 2.4634e-01, 7.8383e-01, 5.7474e-01, 4.3615e-01, - 4.3340e-01, 5.2652e-01, 5.1281e-02, 6.3129e-01, - 5.2216e-01, 8.4505e-01, 4.9870e-01, 4.4874e-01, - 5.2666e-01, 6.2031e-01, 7.7562e-01, 9.4926e-01, - 8.7988e-01, 4.4981e-02, 7.9699e-01, 9.0923e-01, - 1.5170e-01, 2.4692e-02, 1.8363e-01, 8.7058e-01, - 8.5398e-01, 2.0772e-01, 6.3919e-01, 4.6879e-01, - 2.3371e-01, 7.2800e-02, 4.7250e-01, 2.5422e-01, - 6.1023e-01, 1.6797e-01, 6.0522e-01, 8.0303e-02, - 5.5261e-01, 1.0917e-01, 6.4577e-01, 6.0612e-02, - 7.3816e-01, 9.8764e-01, 5.6014e-01, 8.6349e-01, - 5.5593e-02, 3.5238e-01, 6.3127e-01, 3.1182e-01, - 2.2277e-01, 6.2807e-02, 9.7890e-01, 2.2426e-01, - 8.7528e-01, 3.5611e-01, 3.6472e-01, 3.1276e-01, - 3.5721e-01, 7.0353e-01, 7.9461e-01, 1.1229e-02, - 6.0911e-01, 8.0298e-01, 3.0671e-01, 6.1287e-01, - 6.4400e-01, 9.7229e-01, 1.2032e-01, 1.2383e-01, - 7.0865e-01, 6.1356e-01, 1.5986e-02, 8.9680e-02, - 9.9053e-01, 9.8796e-01, 2.7993e-01, 3.6141e-01, - 9.1806e-01, 7.0575e-01, 8.3233e-03, 3.8565e-01, - 2.8749e-01, 1.7810e-01, 1.2298e-01, 8.0324e-01, - 8.2782e-01, 2.0803e-01, 9.6965e-02, 1.7778e-01, - 7.6076e-01, 9.9037e-01, 6.2066e-01, 9.0984e-01, - 1.6615e-01, 9.0857e-02, 6.4172e-01, 9.6035e-01, - 3.6966e-01, 2.3181e-02, 9.6897e-01, 3.7446e-01, - 6.6189e-01, 4.4041e-01, 4.9516e-01, 9.2437e-01, - 1.3754e-01, 1.7227e-01, 6.5340e-01, 8.0011e-01, - 2.9490e-01, 7.0561e-01, 5.9292e-01, 4.6428e-01, - 1.0080e-01, 9.7790e-01, 8.1424e-01, 7.1818e-01, - 7.6458e-01, 7.8318e-01, 6.1479e-01, 6.7696e-01]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 999, 999, 1000]), + col_indices=tensor([5877, 4250, 4686, 7890, 7967, 6049, 7086, 7350, 7600, + 9415, 8494, 9960, 8463, 9769, 3650, 5660, 4765, 7065, + 9825, 3646, 4508, 4529, 606, 6653, 6914, 7898, 5776, + 6107, 2833, 6469, 9561, 9553, 1012, 7648, 3125, 4659, + 2350, 5605, 2978, 2552, 4772, 6484, 5546, 1359, 9003, + 7295, 3487, 3251, 5797, 4927, 389, 1539, 8241, 4519, + 1811, 2945, 8623, 2872, 552, 7492, 9923, 2010, 2604, + 1552, 2774, 1416, 5396, 4510, 3786, 3444, 9329, 2259, + 6656, 438, 9323, 9111, 8972, 134, 8976, 8888, 3908, + 3185, 8018, 3369, 5475, 1596, 1990, 7816, 5574, 9542, + 3040, 2756, 2500, 9055, 3476, 5796, 5461, 8969, 5649, + 7151, 2742, 7881, 4000, 8377, 5895, 253, 8238, 9426, + 8478, 3876, 7351, 2306, 878, 6391, 2133, 6974, 4104, + 1627, 9784, 8459, 8184, 8357, 1868, 3969, 2547, 7150, + 8567, 8961, 7840, 7042, 8040, 9932, 3202, 7093, 1327, + 135, 6831, 9394, 9093, 3306, 9671, 1032, 7617, 8998, + 2163, 4741, 8281, 7308, 2355, 3050, 9803, 475, 964, + 4964, 1287, 1268, 688, 5896, 7603, 4108, 1012, 5952, + 6363, 4719, 9826, 3138, 4723, 371, 6288, 186, 2654, + 9372, 1971, 681, 4032, 5962, 3030, 4065, 7201, 4857, + 2872, 6140, 9971, 9434, 2396, 6923, 6814, 8785, 1447, + 5345, 7096, 6297, 6233, 7158, 9317, 5741, 6074, 4032, + 6814, 7944, 6913, 5564, 2775, 2220, 3379, 8011, 2520, + 7426, 3687, 6722, 5016, 9979, 6539, 5534, 3577, 2267, + 4731, 7861, 9206, 2320, 6967, 7943, 6827, 5077, 9516, + 1732, 5100, 6381, 2622, 5625, 4486, 9347, 6475, 7166, + 9480, 1934, 1912, 1322, 3081, 8877, 7710, 6129, 2927, + 4146, 7486, 1116, 7420, 7107, 4437, 8910, 4651, 246, + 2259, 7653, 1266, 8805, 4772, 8104, 9150, 1259, 1349, + 109, 204, 8540, 8676, 2304, 2566, 5439, 2010, 293, + 7147, 3963, 22, 8220, 7908, 7876, 1076, 5537, 7724, + 9756, 3549, 9854, 6751, 339, 9417, 1766, 3277, 8532, + 6806, 4159, 4654, 7359, 2826, 3218, 7312, 5563, 5084, + 720, 8203, 1204, 9773, 8158, 7049, 4816, 2638, 6704, + 487, 7745, 2464, 8290, 5518, 1983, 6972, 6806, 9672, + 8173, 709, 2961, 8609, 4687, 1057, 6278, 4688, 7327, + 9180, 8043, 7178, 7879, 7369, 6557, 722, 3977, 9608, + 1559, 4437, 8356, 3564, 6570, 8175, 2552, 4365, 2987, + 3012, 2056, 3377, 3331, 4641, 5584, 6444, 1670, 6584, + 9435, 5889, 834, 1537, 676, 171, 1502, 6374, 8475, + 5546, 3570, 3680, 1262, 1283, 107, 7572, 8294, 8087, + 8493, 4150, 264, 8088, 9558, 8105, 6985, 4679, 5131, + 1351, 5236, 1433, 2355, 7050, 4629, 7395, 4891, 8440, + 1280, 9026, 9611, 4272, 63, 7053, 9273, 2845, 319, + 3849, 5109, 5397, 1333, 5226, 3167, 498, 8358, 6471, + 8441, 2470, 2584, 8523, 3869, 9952, 7331, 1491, 6616, + 4251, 7390, 2569, 8114, 4273, 6594, 7890, 3061, 7707, + 7020, 2254, 6838, 1734, 3358, 9413, 842, 6263, 4309, + 9410, 5850, 3843, 5235, 316, 1680, 4468, 2050, 864, + 4563, 2568, 8117, 5035, 1575, 2174, 5811, 4374, 7269, + 8559, 180, 3758, 3450, 6187, 7641, 2641, 5158, 1693, + 8702, 3953, 8641, 538, 3719, 8146, 831, 9181, 8252, + 4927, 1983, 7946, 8289, 1723, 7454, 571, 2844, 7290, + 976, 3469, 3835, 4443, 9997, 6253, 1783, 4479, 5621, + 5888, 6952, 5207, 6046, 5250, 6277, 1706, 492, 994, + 5337, 2917, 299, 5416, 4823, 2281, 9216, 9811, 536, + 7167, 9365, 8010, 8105, 7428, 9708, 9521, 1759, 7316, + 8408, 6581, 6990, 8370, 4815, 2491, 4443, 2226, 6874, + 4105, 2711, 6768, 3934, 2527, 4953, 24, 5356, 5377, + 4831, 6455, 8164, 4262, 6529, 7609, 5098, 9953, 73, + 5708, 4484, 9730, 7364, 2426, 2680, 3694, 2949, 3657, + 8014, 666, 1786, 4090, 8559, 238, 2595, 2466, 5713, + 7523, 1659, 8441, 1656, 970, 4970, 3471, 7507, 6622, + 7069, 76, 7125, 5124, 6896, 4874, 6040, 3626, 4009, + 5978, 7731, 9276, 8499, 2718, 5361, 3855, 8538, 8822, + 1102, 3105, 7443, 7481, 4376, 5676, 6866, 1405, 7485, + 2438, 4046, 5882, 1928, 5673, 4439, 607, 941, 5323, + 1977, 7526, 7365, 6688, 7725, 1906, 1609, 2579, 5018, + 3043, 3267, 6183, 7049, 4814, 9534, 4551, 7887, 5645, + 1813, 6873, 7950, 4035, 2409, 731, 2725, 7868, 9752, + 5099, 9148, 317, 2640, 5453, 3337, 8401, 1653, 7044, + 8720, 8687, 6535, 6595, 2024, 131, 1182, 5621, 3774, + 8613, 3613, 6404, 992, 2240, 8624, 5340, 4231, 288, + 7954, 9182, 2507, 3131, 9924, 8976, 7751, 7186, 1578, + 5330, 4894, 3874, 643, 8176, 6210, 9564, 1270, 2828, + 996, 8621, 8484, 7493, 2542, 8837, 2084, 2455, 7130, + 6423, 8619, 4652, 8633, 8790, 8354, 4700, 625, 4544, + 8981, 139, 746, 1426, 5043, 1610, 4462, 3417, 4746, + 3507, 9724, 8590, 2898, 5308, 6895, 2788, 7758, 1283, + 5278, 2977, 6428, 3903, 4076, 3359, 2470, 3580, 4331, + 3539, 4887, 6362, 8501, 8925, 9351, 2359, 9321, 3700, + 992, 6168, 7667, 7019, 1921, 9709, 5828, 4661, 8748, + 26, 7218, 1984, 916, 3495, 6818, 2797, 2463, 8871, + 466, 8350, 3314, 2566, 461, 1216, 7103, 2340, 3165, + 3456, 5137, 3852, 7732, 1769, 3570, 4888, 9734, 4571, + 9596, 7452, 8856, 6297, 3826, 9087, 6446, 6072, 5525, + 2221, 6842, 305, 74, 9807, 9458, 3773, 8992, 2847, + 3208, 1466, 8138, 9144, 844, 3669, 85, 7362, 4692, + 4434, 9755, 4805, 46, 4973, 86, 7635, 246, 1636, + 6904, 7255, 4185, 7880, 6034, 2037, 1744, 5951, 5993, + 8444, 4624, 3513, 5997, 6578, 228, 1880, 3027, 9878, + 4245, 2766, 8825, 5139, 9687, 6959, 6779, 7051, 2652, + 7030, 8594, 6127, 8154, 5593, 9334, 579, 9049, 4689, + 8716, 7002, 141, 2019, 1596, 3958, 8926, 5, 6170, + 4726, 9481, 2780, 3951, 2390, 584, 3177, 3470, 7752, + 4306, 5823, 2199, 7046, 23, 4244, 9273, 5740, 1874, + 2178, 1616, 7818, 8922, 3439, 2734, 949, 8848, 9318, + 4711, 5469, 57, 9462, 7894, 8459, 5503, 4003, 4000, + 9534, 5843, 7380, 5907, 8262, 837, 7126, 1835, 5332, + 2239, 6377, 4546, 1768, 4542, 5454, 5561, 6701, 6592, + 8803, 7424, 9723, 9271, 4332, 5034, 3633, 5832, 3031, + 16, 9616, 5952, 7479, 2362, 7093, 1017, 3410, 9968, + 748, 782, 9505, 4394, 2734, 742, 3710, 6975, 7764, + 5523, 6523, 3205, 6400, 1262, 228, 3520, 3840, 3679, + 9469, 5171, 924, 9068, 6478, 7630, 7890, 5895, 4673, + 5479, 7003, 5573, 4585, 78, 4546, 5120, 6277, 4330, + 929, 1748, 6762, 1994, 9459, 8981, 5357, 9666, 8444, + 2582, 2833, 6641, 5097, 7217, 7587, 3909, 7119, 4177, + 17, 9610, 9198, 7546, 4238, 8701, 1566, 4618, 2935, + 956, 694, 7059, 831, 4310, 5024, 3711, 8023, 2644, + 1473, 2862, 2695, 5045, 8915, 7831, 5706, 4042, 704, + 9283]), + values=tensor([6.7911e-01, 3.6274e-01, 2.0782e-01, 5.9428e-01, + 1.9872e-01, 7.2558e-01, 5.4516e-01, 3.6725e-01, + 1.9195e-01, 8.4059e-01, 2.8934e-01, 2.2803e-01, + 6.3913e-01, 2.5392e-01, 6.0290e-01, 4.6241e-01, + 8.2181e-01, 6.7786e-01, 5.4148e-01, 3.0335e-02, + 6.2630e-01, 3.9117e-01, 3.9192e-01, 2.0010e-01, + 5.2026e-01, 9.5610e-01, 8.0735e-01, 5.1212e-01, + 8.3332e-01, 4.1739e-02, 1.6506e-01, 7.4934e-01, + 8.3844e-01, 7.1367e-01, 9.2976e-01, 7.7684e-01, + 5.8772e-01, 1.1678e-01, 8.2152e-01, 5.2555e-02, + 8.6952e-02, 5.0654e-01, 6.5363e-01, 8.1863e-01, + 1.1306e-01, 8.9073e-01, 6.2025e-01, 3.5160e-01, + 1.2207e-01, 8.0819e-01, 5.9475e-01, 1.3406e-01, + 7.5539e-01, 1.0539e-01, 1.5172e-01, 7.2636e-01, + 1.4867e-02, 9.5982e-01, 4.0048e-01, 2.1806e-01, + 1.7595e-01, 3.9873e-01, 3.0672e-01, 3.3588e-01, + 5.2421e-01, 4.4857e-01, 1.1450e-01, 2.1405e-01, + 1.0066e-01, 7.6820e-01, 3.7763e-01, 3.5571e-01, + 5.0755e-01, 2.5013e-01, 4.7386e-01, 7.9787e-01, + 2.5002e-01, 2.1550e-01, 7.3669e-01, 1.5591e-01, + 8.7468e-02, 7.6878e-01, 2.5625e-01, 6.0367e-01, + 2.9342e-01, 5.1642e-01, 3.8998e-01, 1.3190e-01, + 8.4420e-01, 7.6180e-01, 1.5484e-01, 9.5211e-01, + 5.7156e-01, 2.6308e-02, 3.9510e-01, 9.6852e-01, + 1.9691e-01, 4.6907e-01, 6.5401e-01, 3.9243e-01, + 3.7711e-01, 5.6731e-01, 7.8218e-02, 5.5440e-02, + 3.2329e-01, 5.9503e-01, 8.7587e-01, 9.4575e-03, + 8.5340e-01, 9.5231e-01, 3.2669e-01, 4.5683e-01, + 6.6546e-01, 3.7247e-01, 9.4875e-01, 3.7049e-01, + 4.7636e-01, 3.7029e-01, 8.7830e-03, 6.5832e-01, + 7.8315e-01, 9.6722e-01, 5.2348e-01, 5.8408e-01, + 1.8276e-01, 7.7372e-01, 1.7153e-01, 9.8813e-01, + 9.6003e-01, 6.8577e-01, 5.3984e-01, 5.8052e-01, + 5.2230e-02, 7.8780e-01, 7.8448e-01, 4.8815e-01, + 8.7644e-02, 3.3278e-01, 2.6300e-04, 7.3187e-02, + 5.2423e-01, 9.0912e-01, 1.8239e-01, 9.1377e-02, + 3.4063e-01, 9.5308e-01, 5.2093e-01, 9.6968e-01, + 4.7941e-01, 1.1503e-01, 1.9436e-01, 1.6191e-01, + 8.3119e-01, 4.7526e-01, 4.4201e-01, 7.8974e-01, + 7.1431e-04, 5.4472e-01, 4.4667e-01, 9.0362e-01, + 9.8209e-01, 1.3364e-01, 8.4382e-01, 8.4157e-01, + 3.5500e-01, 3.2081e-01, 5.5009e-01, 4.0409e-01, + 3.1810e-01, 4.4880e-01, 3.7142e-01, 6.0779e-01, + 5.8504e-01, 7.1750e-01, 5.7407e-01, 4.9814e-01, + 5.9204e-01, 9.5586e-02, 2.2193e-02, 7.1892e-01, + 7.8247e-01, 9.9933e-01, 5.8047e-01, 5.9317e-01, + 8.2811e-01, 6.4682e-01, 2.4161e-01, 9.3138e-01, + 2.7242e-01, 9.2512e-01, 7.2675e-01, 2.4337e-01, + 8.0080e-01, 3.2907e-01, 4.2906e-01, 6.1785e-03, + 5.0034e-01, 3.6569e-01, 9.6297e-01, 7.4588e-01, + 6.0730e-01, 2.4966e-01, 4.4608e-01, 1.1802e-01, + 5.5784e-01, 3.9164e-01, 4.1325e-01, 5.7944e-01, + 3.5110e-01, 8.6544e-01, 4.7250e-01, 3.6394e-01, + 2.3361e-01, 9.4760e-01, 4.9741e-01, 5.2552e-01, + 6.3145e-01, 4.5745e-02, 4.2101e-01, 6.1188e-01, + 7.2981e-01, 4.3973e-01, 7.0557e-01, 8.5500e-02, + 4.3159e-01, 8.3062e-01, 4.3397e-01, 3.0606e-01, + 4.7281e-01, 8.6673e-01, 3.1518e-01, 3.2265e-01, + 4.8137e-01, 7.5667e-01, 7.7887e-01, 2.9491e-01, + 2.8767e-01, 6.6225e-01, 5.1450e-01, 8.4239e-01, + 2.8493e-01, 5.7720e-01, 2.5022e-01, 9.3703e-01, + 6.8686e-01, 2.4345e-01, 3.1398e-01, 8.4064e-01, + 4.8266e-01, 2.6433e-02, 3.1981e-01, 3.9487e-01, + 3.8680e-01, 8.9470e-01, 4.0471e-01, 3.6538e-01, + 4.7602e-01, 8.8693e-01, 9.9293e-01, 1.2633e-01, + 2.1077e-01, 2.3837e-01, 6.1370e-02, 9.7263e-02, + 9.1151e-01, 7.7234e-01, 2.9750e-01, 5.6292e-01, + 2.4497e-01, 7.7687e-02, 8.8354e-03, 7.1013e-01, + 2.3644e-01, 5.4059e-01, 4.2289e-02, 5.5844e-01, + 5.7342e-01, 5.7040e-01, 6.4986e-01, 2.3291e-01, + 7.4089e-01, 3.0142e-01, 1.6086e-01, 7.1372e-01, + 9.1776e-01, 8.8358e-01, 5.7221e-01, 3.2498e-01, + 7.7286e-01, 5.6067e-01, 7.7656e-01, 8.3678e-01, + 3.2225e-01, 4.7910e-01, 1.7961e-01, 2.7098e-02, + 5.0569e-01, 7.7811e-01, 6.3024e-02, 7.5755e-01, + 8.3257e-01, 5.8538e-01, 1.1575e-01, 6.2562e-01, + 2.2088e-01, 4.5650e-01, 7.0230e-01, 4.4897e-02, + 3.0900e-01, 6.5477e-01, 5.5972e-01, 5.3347e-01, + 2.1702e-02, 4.3247e-01, 1.7055e-01, 7.9480e-01, + 1.1809e-01, 3.2070e-01, 7.8239e-01, 1.8041e-01, + 2.6373e-01, 7.6833e-01, 2.4192e-01, 4.8363e-02, + 7.5678e-01, 2.2666e-01, 6.2957e-03, 9.4640e-01, + 3.4193e-01, 4.2199e-01, 1.8168e-01, 5.2057e-02, + 7.5241e-01, 7.4061e-01, 4.1688e-01, 9.6573e-01, + 3.9466e-01, 6.7007e-01, 6.4988e-01, 2.8095e-01, + 5.7727e-02, 7.3459e-01, 4.8836e-01, 1.2981e-01, + 8.6550e-01, 7.1953e-01, 3.3155e-02, 4.9207e-01, + 3.0199e-02, 8.1620e-01, 3.4773e-01, 4.4022e-02, + 9.4242e-01, 2.3414e-01, 9.3966e-02, 3.9911e-01, + 7.7950e-01, 4.7617e-01, 6.4977e-01, 3.0953e-01, + 8.1677e-01, 5.6634e-01, 3.5953e-01, 9.3820e-01, + 5.3909e-01, 7.0969e-01, 5.2942e-01, 2.9457e-01, + 3.4143e-02, 5.3716e-01, 9.0549e-01, 2.3470e-01, + 5.1654e-01, 3.9032e-02, 8.4763e-01, 8.8936e-01, + 2.0968e-01, 6.7220e-01, 3.0246e-01, 2.9179e-01, + 5.0785e-02, 7.4234e-01, 3.3136e-01, 8.2526e-02, + 4.4915e-02, 2.6356e-01, 7.4292e-01, 4.5549e-01, + 7.8200e-01, 1.2189e-02, 6.1731e-01, 1.6328e-01, + 4.7369e-01, 4.3547e-01, 5.9286e-01, 5.9266e-01, + 5.6379e-01, 3.4797e-01, 1.6829e-01, 2.6337e-01, + 1.1027e-01, 9.9239e-01, 9.9087e-01, 5.8034e-01, + 1.4972e-02, 8.5411e-01, 7.7388e-01, 6.1481e-01, + 1.0120e-01, 8.3804e-01, 3.5737e-01, 9.5958e-02, + 5.6813e-01, 1.5384e-01, 2.1246e-01, 8.7991e-01, + 5.3183e-01, 4.2567e-01, 6.9806e-02, 1.4070e-01, + 7.0911e-01, 5.7805e-01, 9.0032e-01, 2.8967e-01, + 8.4393e-01, 7.0955e-01, 5.1167e-01, 4.4321e-01, + 8.3023e-01, 3.3272e-01, 2.4448e-01, 3.2699e-01, + 9.7977e-01, 9.9894e-01, 3.9459e-01, 7.6675e-01, + 5.7925e-02, 7.9083e-01, 3.9910e-01, 4.0556e-01, + 9.4044e-01, 3.2004e-01, 4.4436e-01, 3.2408e-01, + 5.8114e-01, 7.2815e-01, 7.3948e-01, 2.0721e-01, + 3.0147e-01, 4.5285e-01, 4.2900e-01, 6.9910e-01, + 7.1616e-01, 9.9422e-01, 2.2960e-01, 7.6827e-01, + 4.4748e-01, 7.9808e-01, 7.4186e-02, 9.1484e-01, + 9.5743e-01, 6.9329e-01, 6.7416e-01, 1.6449e-01, + 8.7058e-01, 8.0285e-01, 4.9959e-01, 1.9836e-03, + 3.4591e-01, 2.0250e-01, 6.8474e-01, 7.3271e-01, + 8.7277e-01, 2.6489e-01, 6.9725e-01, 1.8247e-01, + 1.3646e-01, 4.6208e-01, 8.6758e-01, 8.2115e-01, + 2.4326e-01, 6.5762e-01, 2.4240e-01, 3.9894e-01, + 1.7475e-01, 9.7856e-01, 8.9521e-01, 4.8900e-01, + 5.0152e-01, 9.3962e-01, 9.2817e-01, 3.5406e-01, + 1.4374e-01, 1.5610e-01, 9.2532e-01, 5.7780e-01, + 5.9667e-01, 4.3588e-01, 8.8655e-01, 8.3369e-01, + 8.8365e-01, 8.9342e-01, 6.0200e-01, 5.2936e-01, + 5.5865e-01, 1.6584e-01, 9.8088e-01, 5.6768e-01, + 7.5654e-01, 8.8072e-02, 9.4473e-01, 3.0629e-01, + 4.5530e-01, 9.9065e-01, 6.0101e-01, 9.0236e-01, + 5.6152e-01, 6.9979e-02, 8.4466e-01, 6.9695e-01, + 7.0042e-01, 3.3033e-01, 7.0122e-01, 2.8283e-01, + 9.0123e-01, 7.1980e-01, 4.0823e-01, 5.4705e-01, + 5.2391e-01, 5.3599e-01, 2.5812e-01, 1.0750e-01, + 8.8756e-02, 2.6846e-01, 8.4292e-01, 1.3892e-01, + 3.7130e-01, 2.7140e-01, 2.4106e-01, 6.3913e-01, + 6.9383e-01, 2.5859e-01, 4.7475e-01, 9.6678e-01, + 5.6154e-01, 6.0440e-01, 8.5979e-01, 6.3334e-01, + 7.9614e-01, 7.1808e-01, 5.1482e-01, 5.8039e-01, + 1.2588e-01, 3.9638e-01, 2.3187e-02, 3.5209e-01, + 4.1087e-01, 5.6449e-01, 6.5011e-01, 4.9452e-01, + 9.1064e-01, 6.1690e-01, 9.8551e-01, 9.7568e-01, + 6.2623e-01, 2.3823e-01, 6.5306e-01, 8.6989e-01, + 5.2711e-01, 1.2346e-01, 8.6267e-01, 6.8423e-01, + 5.8937e-02, 9.5463e-01, 6.3191e-01, 3.6282e-01, + 1.3229e-01, 6.1912e-01, 6.3345e-01, 2.2200e-01, + 4.0966e-01, 2.6498e-01, 9.7214e-01, 8.0645e-01, + 9.7093e-02, 9.9334e-01, 8.2010e-01, 4.2101e-01, + 4.1734e-01, 2.1756e-02, 6.4813e-02, 5.3620e-01, + 7.0438e-01, 1.4083e-01, 6.4467e-01, 9.7098e-01, + 8.3839e-01, 5.5985e-01, 2.0887e-01, 2.3036e-01, + 8.5515e-01, 1.0184e-01, 6.7921e-01, 6.6760e-01, + 1.0380e-01, 3.5481e-01, 7.7507e-01, 5.3008e-01, + 6.6887e-03, 7.4553e-01, 1.7360e-01, 8.9120e-01, + 6.7769e-01, 8.1441e-02, 1.3726e-01, 4.6109e-02, + 5.8003e-01, 4.6242e-01, 8.8956e-01, 1.6185e-02, + 9.5213e-01, 2.2370e-01, 7.2711e-01, 3.2853e-01, + 4.3371e-01, 5.3927e-01, 9.9624e-01, 9.5974e-01, + 9.2831e-01, 9.1616e-01, 5.3324e-01, 1.4999e-01, + 1.5046e-03, 7.4235e-01, 8.8799e-02, 6.2582e-01, + 4.2415e-01, 8.5871e-02, 8.5839e-02, 8.3054e-01, + 4.3792e-01, 7.4156e-01, 6.6301e-01, 8.8278e-01, + 1.0045e-01, 8.4371e-02, 9.7355e-02, 9.1549e-01, + 7.7045e-01, 7.0279e-01, 7.9142e-01, 8.6937e-01, + 2.3835e-01, 5.2709e-01, 4.6300e-01, 1.1279e-01, + 1.6738e-01, 2.4966e-01, 9.3594e-01, 1.8265e-01, + 8.2812e-01, 4.6700e-01, 1.0566e-01, 2.9449e-01, + 3.8697e-01, 2.1058e-01, 8.9128e-01, 2.8245e-01, + 6.1803e-01, 9.6943e-01, 4.9844e-01, 3.8129e-01, + 4.1366e-01, 3.3715e-01, 4.8966e-01, 1.8916e-01, + 4.1051e-01, 6.5104e-02, 6.7879e-01, 1.7219e-01, + 9.8611e-01, 7.9846e-01, 6.4956e-02, 5.2899e-01, + 7.0038e-01, 2.5009e-01, 1.0036e-01, 5.2806e-01, + 2.8219e-01, 1.1813e-01, 1.7751e-01, 3.7513e-01, + 7.2103e-01, 7.6143e-01, 1.7171e-01, 5.3865e-01, + 3.8347e-01, 5.1723e-01, 9.0538e-01, 2.8388e-01, + 3.8454e-01, 1.4894e-01, 7.4618e-01, 8.3012e-02, + 6.3207e-01, 8.0185e-01, 4.0027e-01, 4.6201e-01, + 3.9573e-03, 9.7798e-01, 3.8023e-01, 5.5993e-01, + 8.3605e-01, 3.9080e-01, 6.5441e-01, 4.0146e-01, + 3.1631e-02, 7.1804e-01, 7.0636e-01, 3.8574e-01, + 3.1058e-01, 6.7122e-01, 1.5380e-01, 9.0007e-01, + 3.7706e-01, 3.5935e-01, 4.0857e-01, 3.4286e-01, + 2.8742e-01, 4.5897e-01, 7.9498e-01, 7.9825e-01, + 7.9126e-01, 9.1831e-01, 3.7304e-01, 7.8587e-01, + 8.7177e-01, 4.8529e-01, 5.2323e-01, 4.3441e-02, + 7.5517e-01, 5.7213e-01, 3.3582e-01, 1.3401e-01, + 2.7285e-02, 1.6001e-01, 1.5595e-01, 5.5976e-01, + 7.1036e-01, 4.5240e-01, 7.6140e-01, 1.1852e-02, + 8.1250e-02, 6.9522e-01, 2.1689e-01, 5.4042e-01, + 1.5117e-01, 4.1569e-01, 9.8104e-01, 7.4230e-01, + 2.1911e-01, 3.0258e-01, 7.8659e-01, 4.1239e-01, + 4.4141e-01, 3.1882e-01, 6.3781e-01, 1.6855e-01, + 9.4322e-02, 7.2210e-01, 8.1753e-01, 5.8294e-01, + 8.0956e-01, 7.2769e-01, 5.3873e-01, 3.4590e-02, + 5.4020e-01, 5.4499e-02, 3.0721e-02, 3.8781e-01, + 5.3718e-01, 9.4856e-01, 9.4062e-01, 2.4671e-01, + 8.7092e-01, 1.1412e-01, 9.4006e-02, 9.0091e-01, + 6.5373e-01, 4.6330e-01, 7.2651e-01, 9.3630e-01, + 2.9917e-01, 6.6133e-01, 4.6541e-01, 7.4622e-01, + 3.4435e-02, 1.8522e-01, 6.6774e-01, 7.7169e-01, + 7.9885e-01, 6.3927e-01, 6.2545e-01, 7.0752e-02, + 5.9121e-01, 7.0132e-01, 5.6839e-01, 7.6939e-01, + 3.4014e-01, 3.7386e-01, 9.5161e-01, 5.6361e-01, + 6.0353e-01, 7.0503e-01, 4.2749e-03, 5.9801e-01, + 7.3419e-01, 5.9424e-01, 6.4743e-01, 3.2864e-01, + 9.1101e-01, 3.7642e-01, 7.0460e-02, 8.9601e-01, + 7.7401e-01, 1.4806e-01, 7.3290e-01, 7.4525e-01, + 9.6181e-01, 9.9589e-01, 6.9491e-01, 6.2451e-01, + 5.8659e-01, 5.0633e-01, 1.2537e-01, 2.2713e-01, + 3.2536e-01, 4.3233e-01, 2.0051e-01, 3.0690e-01, + 1.4207e-01, 3.8751e-01, 5.5312e-01, 7.0443e-01, + 4.2499e-01, 9.9202e-01, 5.5879e-01, 4.4650e-01, + 2.4128e-01, 6.2746e-01, 5.8731e-01, 3.8369e-02, + 3.8085e-02, 2.1800e-01, 8.4532e-01, 9.6580e-01, + 6.8789e-01, 8.8737e-01, 1.2413e-01, 9.2093e-01, + 3.0693e-01, 5.1800e-01, 4.2604e-01, 3.9549e-03, + 5.6568e-01, 7.5623e-01, 6.7967e-01, 2.3458e-01, + 5.4017e-01, 9.7298e-01, 6.8422e-01, 8.7135e-01, + 7.9377e-02, 4.8783e-01, 7.1176e-01, 6.4913e-01, + 8.7535e-01, 5.1563e-01, 1.8421e-01, 4.0508e-01, + 1.2530e-01, 7.2066e-02, 4.7501e-01, 8.9604e-01, + 9.8033e-01, 4.2110e-01, 4.4439e-01, 9.4984e-01, + 6.3960e-01, 3.1398e-01, 6.3584e-01, 1.1905e-01, + 9.8476e-01, 7.3838e-01, 5.9761e-01, 3.8585e-01, + 4.5369e-01, 1.2022e-01, 6.7305e-01, 7.8788e-01, + 8.7938e-01, 7.4945e-01, 6.1574e-01, 5.1676e-01, + 7.2591e-01, 7.7320e-01, 8.6354e-01, 9.7250e-01, + 2.6322e-01, 8.1801e-01, 7.5115e-01, 5.6531e-01, + 6.3913e-01, 7.2438e-02, 1.8105e-01, 2.5968e-01, + 2.2157e-01, 3.6300e-01, 8.5513e-01, 4.0037e-01, + 9.9442e-01, 3.7004e-01, 7.4999e-01, 7.7150e-01, + 8.8575e-01, 4.2972e-01, 9.8154e-01, 8.3038e-01, + 1.7583e-01, 6.3271e-01, 7.4191e-02, 7.7641e-02, + 4.6908e-01, 5.4427e-01, 5.2239e-01, 1.4617e-02, + 1.3158e-01, 4.4277e-01, 2.9545e-01, 6.9176e-01, + 8.5065e-02, 2.3232e-01, 6.5048e-01, 9.2064e-01, + 1.1105e-01, 1.5708e-01, 9.0011e-01, 1.7042e-01, + 4.7935e-01, 3.0795e-01, 2.5331e-01, 7.6145e-01, + 4.8186e-01, 3.0773e-01, 4.0047e-01, 1.8978e-01, + 4.7549e-01, 2.4018e-01, 8.6357e-02, 3.8015e-01, + 6.0164e-01, 1.0270e-01, 2.6943e-01, 2.0495e-01, + 7.4602e-01, 3.0003e-01, 9.9557e-01, 9.5550e-01, + 6.5525e-02, 4.2302e-01, 1.5246e-01, 4.1570e-01, + 7.6132e-01, 6.3810e-01, 3.4472e-01, 9.2114e-01, + 8.9617e-01, 1.1069e-01, 1.8827e-01, 3.5004e-01, + 4.2144e-01, 7.2994e-01, 2.8003e-01, 9.9526e-01, + 1.3122e-01, 7.8430e-01, 8.2154e-01, 8.5472e-01, + 9.9915e-01, 1.0175e-01, 9.5443e-01, 2.0256e-01, + 9.9243e-01, 2.9673e-01, 1.4011e-01, 6.3467e-01, + 6.9318e-01, 9.8979e-01, 3.2336e-01, 9.6873e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.9856, 0.2722, 0.4021, ..., 0.8237, 0.4550, 0.5626]) +tensor([0.7118, 0.5445, 0.7766, ..., 0.3375, 0.1794, 0.0442]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -375,271 +375,378 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 0.05941200256347656 seconds +Time: 0.05389976501464844 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '176731', '-ss', '10000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 6.579680919647217} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '194806', '-ss', '10000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 7.141697645187378} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([1504, 3099, 4004, 1150, 1392, 5460, 1366, 7098, 1310, - 7653, 7132, 7485, 6491, 1708, 1741, 5889, 6633, 4763, - 2335, 7667, 7189, 7712, 8830, 6994, 9528, 3923, 7748, - 9751, 3527, 4817, 1065, 9038, 1439, 7778, 7797, 1760, - 581, 9181, 5233, 2380, 1312, 6119, 1318, 8532, 5773, - 7950, 4559, 535, 3122, 484, 3449, 8220, 8300, 4045, - 9446, 2552, 6931, 9875, 6005, 9524, 8628, 424, 6643, - 755, 101, 4097, 978, 5632, 3675, 2949, 5286, 7265, - 1772, 8623, 6738, 3008, 3529, 8115, 1631, 9342, 745, - 4959, 994, 6574, 6399, 4493, 4340, 4457, 6066, 5468, - 9796, 6503, 4529, 8546, 3580, 1327, 3981, 3795, 190, - 8899, 4487, 1151, 981, 8161, 9891, 7554, 1606, 2191, - 501, 6416, 6764, 6915, 8693, 842, 667, 5088, 2889, - 2340, 4198, 1848, 9366, 7349, 1938, 9093, 4810, 7574, - 4872, 6494, 3389, 7735, 4677, 4983, 5552, 5863, 2929, - 3140, 3500, 281, 5523, 2412, 2145, 7281, 485, 501, - 4658, 799, 5803, 3186, 9799, 796, 1828, 529, 5220, - 511, 1206, 6406, 2968, 2841, 2647, 3672, 4434, 7786, - 9806, 5583, 1592, 8541, 7269, 7056, 9582, 6922, 4787, - 2305, 9413, 5247, 397, 3656, 3804, 3527, 2142, 7290, - 7247, 4905, 5634, 8535, 9565, 339, 744, 6037, 9637, - 9460, 1628, 3, 1771, 6647, 338, 2256, 379, 708, - 4609, 197, 8108, 8962, 7639, 2087, 4711, 4628, 5883, - 7430, 5627, 9880, 1675, 4591, 2652, 7448, 5936, 6297, - 7873, 5268, 7195, 2980, 2157, 9239, 9737, 5346, 4867, - 8334, 9445, 8457, 6122, 450, 6706, 1153, 7923, 1520, - 4513, 3315, 1820, 599, 4976, 1470, 7792, 4365, 4978, - 703, 5872, 7079, 1174, 5912, 3839, 6207, 8174, 7243, - 9811, 2337, 4217, 6899, 5228, 927, 4725, 7472, 336, - 6301, 1371, 3176, 1154, 7762, 2845, 8548, 8811, 9702, - 2601, 634, 9382, 5875, 7329, 1394, 2851, 5756, 1857, - 1393, 2021, 7676, 4374, 3100, 7383, 8099, 6950, 8386, - 2425, 3633, 8089, 5488, 6403, 5138, 3438, 8348, 1632, - 8249, 1318, 1810, 7668, 8138, 7204, 8283, 9712, 4220, - 6479, 2329, 8929, 9111, 1076, 6240, 6882, 9300, 3175, - 7084, 8253, 4744, 9435, 3307, 8858, 1495, 6648, 9955, - 3141, 4955, 9881, 6166, 6863, 7755, 4543, 3360, 1304, - 2491, 130, 8953, 2704, 2173, 7251, 4890, 5141, 8546, - 3008, 2284, 5892, 8116, 9667, 1393, 8025, 354, 1194, - 1882, 5501, 5281, 8588, 4405, 6232, 5693, 7500, 5289, - 4832, 194, 9943, 3198, 6104, 6121, 7446, 707, 544, - 1653, 5880, 2435, 4811, 3977, 3517, 400, 1529, 2947, - 1585, 288, 2, 6997, 1487, 4049, 5514, 2584, 4709, - 5967, 342, 565, 5609, 6203, 18, 4569, 6819, 4803, - 8089, 164, 1406, 5749, 4901, 6777, 9505, 2817, 4676, - 4501, 3368, 8473, 4649, 726, 6243, 4027, 7343, 7734, - 8837, 6566, 8591, 7047, 2595, 379, 1982, 4344, 4978, - 4736, 8583, 6503, 8199, 4626, 5126, 9719, 1002, 9042, - 9523, 128, 9225, 982, 8392, 7559, 5463, 8036, 3745, - 3176, 8009, 9295, 9552, 3167, 1345, 276, 8693, 8017, - 5204, 7251, 6299, 2085, 5969, 3905, 5663, 2565, 5965, - 645, 520, 6000, 3298, 388, 4005, 2224, 7511, 9686, - 4080, 3588, 8629, 9093, 9508, 5361, 7962, 370, 2439, - 5219, 3882, 7248, 5395, 270, 7381, 4302, 4363, 2207, - 4662, 5466, 7711, 8471, 5548, 75, 977, 2775, 5430, - 2234, 1131, 9542, 5839, 5109, 6581, 7709, 8434, 4907, - 6530, 5217, 3726, 337, 1715, 866, 7308, 3618, 6557, - 1527, 1002, 3707, 1179, 5241, 1089, 1496, 6675, 7386, - 2847, 4681, 1035, 5874, 8289, 1900, 3378, 3926, 2618, - 4592, 4374, 3550, 8264, 3205, 410, 3679, 1948, 838, - 3556, 6228, 4681, 1158, 6736, 7755, 2790, 7244, 7096, - 6263, 5478, 3768, 3461, 8248, 8092, 8531, 1313, 4969, - 5370, 4315, 3322, 3749, 187, 7550, 7837, 7860, 3557, - 335, 5629, 7074, 8236, 467, 4046, 1116, 4876, 2409, - 4083, 1725, 8263, 9131, 4333, 8542, 5321, 5263, 2697, - 5140, 3630, 3072, 702, 1702, 4933, 1839, 4688, 5480, - 3284, 3461, 3585, 1266, 2777, 7100, 7424, 1393, 4346, - 2665, 918, 9363, 7559, 6271, 5807, 7574, 8631, 3570, - 8046, 9813, 4564, 4046, 6392, 3313, 2613, 7556, 7214, - 8532, 156, 4129, 9090, 3781, 3510, 5521, 4941, 1039, - 6207, 9585, 5651, 3796, 2251, 4814, 7135, 4638, 274, - 7298, 7170, 9107, 6247, 4674, 5325, 3592, 9661, 333, - 2023, 4234, 1071, 6858, 8695, 6728, 4181, 5454, 4250, - 1450, 4542, 9761, 3858, 9458, 296, 8038, 4239, 8592, - 9312, 1625, 9860, 6000, 1575, 5405, 2205, 255, 165, - 9085, 4235, 8538, 6219, 7921, 5076, 7407, 8421, 8337, - 6724, 7265, 4262, 7588, 1810, 4604, 8332, 6981, 343, - 3524, 2442, 8260, 3956, 5626, 8798, 5445, 7298, 920, - 1504, 4390, 2474, 4172, 3669, 2195, 2152, 2627, 9739, - 4415, 4105, 4626, 9659, 6433, 8303, 3484, 7190, 9552, - 7206, 8836, 3057, 3137, 4456, 9535, 5783, 3216, 1597, - 6065, 9330, 3235, 902, 8776, 8756, 4727, 2548, 4711, - 7235, 6844, 2271, 6464, 7218, 4495, 5477, 9217, 4896, - 1873, 7807, 6461, 2708, 6330, 747, 4535, 89, 6855, - 8611, 4462, 8552, 3726, 7142, 5548, 5723, 2430, 6398, - 3795, 989, 9572, 6463, 1806, 7636, 7490, 7840, 9869, - 508, 3343, 967, 9912, 9634, 6887, 6472, 2064, 5202, - 4621, 2419, 2502, 5190, 7622, 1155, 4554, 8122, 5338, - 942, 9679, 8966, 8442, 3506, 6771, 9700, 1891, 1736, - 1812, 3923, 1097, 1811, 1887, 4126, 5689, 203, 2986, - 547, 2374, 9229, 2732, 9653, 5462, 9455, 9662, 6890, - 4506, 9078, 5285, 8481, 5981, 2701, 1765, 8256, 6470, - 3083, 5913, 3042, 9147, 9324, 8785, 5084, 211, 3626, - 8593, 314, 7332, 4103, 5149, 8975, 9201, 9884, 7650, - 6098, 420, 1920, 7326, 2881, 161, 8106, 5643, 4716, - 7087, 3616, 3230, 603, 6580, 6811, 4421, 2350, 9035, - 3407, 4230, 3124, 8671, 8273, 804, 8204, 6453, 8083, - 4163, 5243, 424, 9961, 6498, 1444, 3806, 7023, 5787, - 3298, 4766, 7662, 5115, 8511, 5962, 5834, 3528, 2009, - 7716, 894, 6464, 816, 606, 9512, 2155, 5802, 8197, - 3640, 3457, 6802, 9357, 1847, 7556, 9331, 2269, 9710, - 4086, 521, 71, 3853, 6049, 9611, 8181, 7543, 8075, - 3056, 504, 9545, 1895, 5702, 9663, 6557, 5268, 5558, - 9942, 730, 8283, 2364, 2523, 1925, 6382, 7494, 8845, - 6201, 243, 2685, 420, 2294, 6911, 6669, 8058, 2744, - 4906, 9626, 735, 8791, 9013, 1195, 812, 4286, 8340, - 5875, 9467, 5617, 1844, 1184, 6712, 1388, 5770, 4852, - 1291, 6391, 7901, 3776, 8634, 2205, 574, 5343, 1096, - 2895, 7494, 270, 9267, 8977, 2790, 9580, 6376, 6037, - 9396, 6317, 4748, 3869, 8407, 8131, 5131, 5038, 2277, - 6479, 419, 6444, 7942, 1392, 2965, 2230, 2611, 5216, - 5819]), - values=tensor([0.7467, 0.8354, 0.8789, 0.4204, 0.7717, 0.2232, 0.1499, - 0.0344, 0.7363, 0.9713, 0.7208, 0.9097, 0.8947, 0.3049, - 0.6561, 0.9022, 0.2976, 0.6971, 0.5911, 0.3147, 0.1310, - 0.3271, 0.9369, 0.5111, 0.3198, 0.0811, 0.2464, 0.0480, - 0.2027, 0.7099, 0.8056, 0.0418, 0.5020, 0.1613, 0.3123, - 0.7734, 0.7770, 0.5410, 0.4713, 0.9866, 0.1361, 0.9358, - 0.9151, 0.6482, 0.5032, 0.1248, 0.9695, 0.7101, 0.7393, - 0.0540, 0.9817, 0.8551, 0.5882, 0.5454, 0.8099, 0.4829, - 0.0487, 0.0913, 0.0172, 0.2430, 0.7307, 0.8104, 0.0097, - 0.3087, 0.0574, 0.7258, 0.7698, 0.7383, 0.9314, 0.0456, - 0.0673, 0.6807, 0.0704, 0.8277, 0.1331, 0.1490, 0.4915, - 0.3113, 0.1267, 0.0934, 0.3037, 0.5287, 0.4640, 0.2874, - 0.6223, 0.2233, 0.5101, 0.8720, 0.2195, 0.9271, 0.1344, - 0.1055, 0.1477, 0.8744, 0.7010, 0.8409, 0.4047, 0.4719, - 0.3950, 0.2521, 0.3126, 0.8370, 0.0922, 0.9784, 0.5158, - 0.7455, 0.1178, 0.6258, 0.0383, 0.7569, 0.5151, 0.6815, - 0.0495, 0.0392, 0.2275, 0.6572, 0.8031, 0.4166, 0.6576, - 0.5660, 0.8271, 0.1219, 0.3624, 0.8760, 0.4463, 0.8000, - 0.6969, 0.0103, 0.1415, 0.2117, 0.7143, 0.7258, 0.8575, - 0.3239, 0.1440, 0.6460, 0.3468, 0.6646, 0.9502, 0.7849, - 0.0160, 0.2557, 0.5135, 0.8823, 0.8979, 0.5154, 0.8255, - 0.0884, 0.6180, 0.5891, 0.6532, 0.4457, 0.6335, 0.9159, - 0.6559, 0.2515, 0.8048, 0.7241, 0.9864, 0.4182, 0.8651, - 0.9442, 0.4034, 0.3764, 0.4228, 0.7479, 0.3412, 0.8945, - 0.4273, 0.7897, 0.1891, 0.1742, 0.5500, 0.6530, 0.6992, - 0.8339, 0.5042, 0.7425, 0.5554, 0.3348, 0.8989, 0.0461, - 0.5163, 0.8515, 0.6896, 0.5275, 0.8665, 0.2652, 0.5623, - 0.2676, 0.9720, 0.8394, 0.7177, 0.7252, 0.6472, 0.0788, - 0.5973, 0.1826, 0.9090, 0.6216, 0.9811, 0.3422, 0.2537, - 0.2318, 0.6696, 0.4190, 0.3221, 0.5987, 0.2694, 0.0672, - 0.9592, 0.1593, 0.8027, 0.1318, 0.1332, 0.8746, 0.6822, - 0.3541, 0.7704, 0.4230, 0.9552, 0.7601, 0.9928, 0.5369, - 0.4961, 0.1278, 0.7090, 0.1215, 0.8975, 0.2398, 0.5280, - 0.5371, 0.7003, 0.3062, 0.4088, 0.7112, 0.3779, 0.5538, - 0.7588, 0.5309, 0.3775, 0.2231, 0.3086, 0.9548, 0.9153, - 0.9388, 0.1666, 0.6598, 0.9735, 0.0414, 0.3204, 0.3603, - 0.9436, 0.0406, 0.8205, 0.2090, 0.8181, 0.5050, 0.5756, - 0.2329, 0.5121, 0.9088, 0.9475, 0.4941, 0.3383, 0.9031, - 0.7613, 0.1888, 0.0570, 0.0843, 0.2365, 0.9173, 0.0573, - 0.4713, 0.1354, 0.0959, 0.4629, 0.2932, 0.7317, 0.8381, - 0.3740, 0.4991, 0.5826, 0.8759, 0.6512, 0.0172, 0.4202, - 0.0764, 0.0315, 0.4182, 0.3493, 0.7892, 0.0128, 0.5157, - 0.3028, 0.1641, 0.1826, 0.2604, 0.8810, 0.0981, 0.3282, - 0.9640, 0.8997, 0.9552, 0.8266, 0.2868, 0.6160, 0.0243, - 0.7892, 0.8972, 0.8709, 0.4156, 0.8535, 0.1469, 0.0575, - 0.7017, 0.9264, 0.2099, 0.9803, 0.6196, 0.1986, 0.6481, - 0.7156, 0.9366, 0.5404, 0.0288, 0.9861, 0.4184, 0.5436, - 0.9217, 0.5460, 0.4070, 0.6495, 0.5316, 0.6177, 0.4010, - 0.3482, 0.7350, 0.8952, 0.5389, 0.5623, 0.8490, 0.3707, - 0.3698, 0.0123, 0.7325, 0.7195, 0.8881, 0.2879, 0.8368, - 0.9197, 0.7775, 0.3643, 0.5484, 0.1674, 0.1069, 0.4749, - 0.1394, 0.0434, 0.6186, 0.5289, 0.3648, 0.2886, 0.6590, - 0.8834, 0.8125, 0.4782, 0.8204, 0.5843, 0.4274, 0.2838, - 0.7416, 0.4167, 0.9954, 0.3534, 0.0950, 0.7390, 0.2449, - 0.1994, 0.3896, 0.4629, 0.1811, 0.1236, 0.7689, 0.3781, - 0.5069, 0.7597, 0.3530, 0.9613, 0.2925, 0.6169, 0.9997, - 0.1166, 0.0343, 0.2967, 0.7714, 0.5907, 0.3569, 0.3642, - 0.4772, 0.7473, 0.9593, 0.2689, 0.9527, 0.6422, 0.3639, - 0.4866, 0.9157, 0.5316, 0.0382, 0.0500, 0.3008, 0.8346, - 0.4395, 0.8330, 0.8715, 0.7079, 0.3038, 0.0861, 0.1187, - 0.9485, 0.6116, 0.8025, 0.0336, 0.0740, 0.0462, 0.3122, - 0.4819, 0.9203, 0.4894, 0.6551, 0.7424, 0.7384, 0.9517, - 0.9751, 0.0184, 0.6613, 0.3660, 0.9469, 0.4030, 0.8522, - 0.5951, 0.3829, 0.4841, 0.9823, 0.4553, 0.5041, 0.4493, - 0.6799, 0.7742, 0.8697, 0.4861, 0.8989, 0.9993, 0.4908, - 0.1217, 0.3436, 0.0711, 0.3941, 0.5653, 0.3453, 0.4600, - 0.0019, 0.8200, 0.7548, 0.9925, 0.1950, 0.4375, 0.0876, - 0.0545, 0.8708, 0.8847, 0.5442, 0.3831, 0.3845, 0.2264, - 0.3709, 0.0964, 0.1887, 0.0345, 0.9963, 0.9845, 0.7547, - 0.6656, 0.6711, 0.0521, 0.0059, 0.5569, 0.2868, 0.9531, - 0.6085, 0.6756, 0.6170, 0.8035, 0.7219, 0.8596, 0.9935, - 0.6736, 0.5461, 0.1019, 0.4352, 0.3666, 0.0766, 0.1100, - 0.0285, 0.2597, 0.2132, 0.6382, 0.4681, 0.2564, 0.2271, - 0.9805, 0.2869, 0.9275, 0.9865, 0.7866, 0.7332, 0.1282, - 0.7573, 0.5598, 0.5043, 0.3676, 0.9213, 0.5799, 0.2807, - 0.9752, 0.2947, 0.0641, 0.9531, 0.5348, 0.0010, 0.7435, - 0.7719, 0.2118, 0.8754, 0.5538, 0.4779, 0.8650, 0.6418, - 0.6431, 0.4644, 0.1877, 0.3080, 0.8519, 0.1095, 0.1184, - 0.4411, 0.4758, 0.0611, 0.5779, 0.7578, 0.0287, 0.6000, - 0.9316, 0.0010, 0.4785, 0.9461, 0.1258, 0.4284, 0.7563, - 0.3076, 0.6399, 0.8224, 0.6585, 0.3967, 0.4175, 0.7215, - 0.0128, 0.3849, 0.6926, 0.2936, 0.7038, 0.2757, 0.4692, - 0.3468, 0.7666, 0.2596, 0.7912, 0.8896, 0.8809, 0.0059, - 0.2967, 0.9128, 0.0723, 0.0052, 0.9469, 0.8966, 0.5197, - 0.0949, 0.0308, 0.1701, 0.9911, 0.9507, 0.9187, 0.5231, - 0.7573, 0.3017, 0.9246, 0.2495, 0.6044, 0.8340, 0.2483, - 0.5625, 0.6091, 0.5083, 0.6674, 0.1755, 0.4307, 0.7132, - 0.1112, 0.0365, 0.1570, 0.7746, 0.8507, 0.8213, 0.3732, - 0.2036, 0.9453, 0.8539, 0.9172, 0.0928, 0.4934, 0.4764, - 0.9651, 0.0970, 0.4340, 0.6824, 0.9577, 0.5403, 0.8440, - 0.8176, 0.7649, 0.0142, 0.6065, 0.2439, 0.1621, 0.0508, - 0.2838, 0.7247, 0.2337, 0.8936, 0.1110, 0.8591, 0.1007, - 0.3180, 0.0296, 0.7491, 0.5542, 0.8529, 0.9486, 0.0760, - 0.5042, 0.8275, 0.2472, 0.5600, 0.5119, 0.3775, 0.2305, - 0.7960, 0.0983, 0.0924, 0.3391, 0.0740, 0.9041, 0.3018, - 0.8816, 0.2439, 0.3331, 0.7706, 0.1233, 0.1673, 0.8906, - 0.8856, 0.6832, 0.9240, 0.9154, 0.9988, 0.9112, 0.2300, - 0.9611, 0.7710, 0.5601, 0.3442, 0.7704, 0.3949, 0.0630, - 0.3164, 0.8465, 0.1384, 0.2009, 0.3981, 0.0471, 0.7501, - 0.3021, 0.9317, 0.2838, 0.5702, 0.8311, 0.1984, 0.4158, - 0.5512, 0.2415, 0.9168, 0.1557, 0.9722, 0.2083, 0.9310, - 0.2854, 0.3296, 0.8389, 0.1930, 0.5648, 0.9005, 0.2382, - 0.6475, 0.3336, 0.9293, 0.2971, 0.9734, 0.0801, 0.9406, - 0.6141, 0.2201, 0.3176, 0.0332, 0.2823, 0.8970, 0.6078, - 0.7508, 0.0094, 0.2669, 0.0551, 0.1264, 0.5449, 0.1361, - 0.9754, 0.4822, 0.9515, 0.6364, 0.0981, 0.5598, 0.8382, - 0.5254, 0.0685, 0.8906, 0.8992, 0.2617, 0.7668, 0.2228, - 0.8462, 0.0339, 0.7493, 0.7201, 0.9366, 0.4188, 0.1804, - 0.1494, 0.8053, 0.2257, 0.6325, 0.3236, 0.3885, 0.3952, - 0.4395, 0.5226, 0.0978, 0.6158, 0.8731, 0.0319, 0.5423, - 0.6762, 0.9538, 0.0376, 0.4503, 0.9658, 0.9633, 0.4630, - 0.8800, 0.3784, 0.9350, 0.8712, 0.9332, 0.6804, 0.0999, - 0.2383, 0.5366, 0.6021, 0.4415, 0.0995, 0.6759, 0.6192, - 0.9807, 0.4059, 0.4669, 0.5511, 0.1347, 0.4787, 0.3980, - 0.5675, 0.9839, 0.6574, 0.0411, 0.0685, 0.1554, 0.5685, - 0.8165, 0.2072, 0.3451, 0.6895, 0.5489, 0.1369, 0.1888, - 0.5408, 0.0289, 0.0593, 0.2654, 0.9500, 0.9242, 0.3884, - 0.5208, 0.6608, 0.9135, 0.0065, 0.4095, 0.4932, 0.1269, - 0.3243, 0.4039, 0.8400, 0.0793, 0.5154, 0.2087, 0.3919, - 0.4758, 0.1531, 0.9070, 0.3809, 0.6719, 0.4901, 0.2111, - 0.9908, 0.8730, 0.3814, 0.1372, 0.2073, 0.9797, 0.4051, - 0.8671, 0.9780, 0.3153, 0.9767, 0.4671, 0.4459, 0.4147, - 0.9662, 0.3628, 0.6849, 0.6582, 0.5036, 0.8509, 0.7848, - 0.3086, 0.0093, 0.4011, 0.9222, 0.4489, 0.3083, 0.9840, - 0.7003, 0.3847, 0.9044, 0.4172, 0.6487, 0.3232, 0.8882, - 0.3507, 0.4746, 0.2057, 0.7891, 0.9884, 0.0182, 0.2421, - 0.6518, 0.9508, 0.5277, 0.4701, 0.1985, 0.1981, 0.9592, - 0.6556, 0.3536, 0.2018, 0.5108, 0.2993, 0.5685, 0.6970, - 0.4429, 0.4512, 0.6646, 0.8770, 0.3713, 0.4887, 0.4934, - 0.9094, 0.0628, 0.1047, 0.3828, 0.0538, 0.7423, 0.0483, - 0.5247, 0.5619, 0.6010, 0.0610, 0.6313, 0.7678, 0.2957, - 0.2187, 0.7911, 0.9416, 1.0000, 0.6833, 0.2789, 0.5629, - 0.0302, 0.3182, 0.3499, 0.4337, 0.4962, 0.1298, 0.4456, - 0.0387, 0.2487, 0.3136, 0.3069, 0.9675, 0.4312, 0.3624, - 0.1589, 0.5045, 0.5909, 0.3991, 0.7502, 0.3595, 0.0925, - 0.5385, 0.8123, 0.1086, 0.1197, 0.1917, 0.3958, 0.8592, - 0.5579, 0.0049, 0.2422, 0.9021, 0.7969, 0.6116, 0.7882, - 0.0064, 0.7028, 0.1202, 0.0407, 0.8816, 0.5750, 0.7270, - 0.7359, 0.9426, 0.4471, 0.6645, 0.7391, 0.4953, 0.1855, - 0.9757, 0.8337, 0.4173, 0.8819, 0.7926, 0.9439, 0.6488, - 0.5255, 0.2177, 0.8967, 0.4852, 0.8970, 0.3385, 0.1072, - 0.7487, 0.0835, 0.3044, 0.9792, 0.4704, 0.2580, 0.5151, - 0.8842, 0.7470, 0.1408, 0.9051, 0.3976, 0.8983]), + col_indices=tensor([9699, 5692, 1856, 5883, 6609, 910, 190, 8818, 6729, + 6588, 9895, 4579, 6475, 3640, 7256, 5776, 2107, 112, + 4061, 7172, 5250, 3509, 2541, 6334, 7236, 9125, 3397, + 1986, 8020, 7426, 640, 4809, 3135, 3852, 5226, 6972, + 7839, 6382, 8902, 5331, 2656, 2128, 1074, 1853, 2415, + 6472, 2510, 5655, 1427, 2596, 3458, 3907, 4524, 7308, + 7182, 5604, 363, 3020, 382, 2413, 6757, 1843, 5926, + 9800, 9243, 1216, 726, 2755, 3879, 2089, 6276, 1446, + 5747, 3255, 7160, 527, 7938, 2938, 6480, 2054, 3947, + 5160, 1424, 3755, 6322, 4755, 7220, 3748, 8641, 8485, + 4072, 5143, 4083, 6468, 5181, 8054, 5262, 8901, 1842, + 5556, 9197, 2422, 9598, 8776, 1431, 4844, 2968, 4592, + 1117, 3790, 2119, 9402, 1591, 3654, 5945, 8184, 2423, + 4084, 8724, 1704, 4602, 6181, 1446, 6069, 9025, 6809, + 4068, 6820, 4308, 8837, 7638, 9939, 7584, 715, 3080, + 5110, 1646, 270, 4750, 1889, 7344, 9493, 1362, 2806, + 2042, 9330, 8128, 1139, 8055, 3285, 5025, 3427, 4630, + 3457, 487, 5769, 1461, 1223, 6014, 676, 4177, 4788, + 808, 6218, 3273, 1857, 7645, 7883, 45, 3023, 2949, + 6931, 7238, 3245, 2607, 1940, 9243, 7118, 1761, 8715, + 6342, 6537, 2112, 8823, 435, 8620, 6948, 5293, 2230, + 860, 4367, 88, 989, 2922, 3107, 2766, 5993, 5889, + 9018, 6344, 7354, 9373, 2993, 5145, 4372, 1500, 3401, + 7249, 7339, 2560, 4049, 4794, 1097, 4845, 8722, 1758, + 8888, 497, 786, 8206, 3869, 9310, 8168, 6537, 4297, + 4957, 888, 1560, 5004, 6079, 1472, 9820, 7535, 5, + 6957, 2891, 4973, 5114, 8846, 3802, 6063, 5430, 3488, + 4815, 6740, 5508, 7371, 7426, 341, 8923, 8690, 171, + 6415, 5450, 2787, 9982, 3748, 3838, 4615, 5263, 102, + 6690, 3873, 9381, 4288, 2954, 8360, 3646, 2615, 4141, + 1050, 1125, 185, 7696, 5529, 6425, 4054, 2857, 7604, + 5735, 6919, 671, 7369, 4003, 5598, 8207, 1635, 5650, + 901, 7034, 7990, 5233, 8098, 195, 5419, 2566, 7620, + 9130, 5961, 1895, 6390, 2059, 5841, 9008, 9683, 9749, + 6124, 7032, 9157, 6822, 4774, 3289, 1834, 7396, 7421, + 7499, 2291, 6909, 7905, 4469, 6511, 7350, 2615, 4732, + 8418, 5371, 4919, 2663, 2137, 3983, 7792, 7288, 1612, + 4647, 3391, 7921, 4140, 3599, 635, 5337, 1503, 2825, + 1470, 9664, 4879, 5908, 6754, 5907, 8818, 5502, 2991, + 75, 371, 7734, 1045, 9930, 8101, 7139, 5503, 5475, + 2679, 461, 8155, 5016, 8667, 5905, 7548, 4722, 3455, + 1638, 1102, 3780, 3499, 2110, 514, 4587, 204, 9098, + 6201, 2603, 7037, 2599, 671, 2366, 9398, 7381, 7138, + 1169, 4549, 4654, 1754, 1153, 4615, 7103, 4051, 2487, + 3616, 8644, 5118, 3483, 5335, 4026, 1783, 2758, 2984, + 638, 9209, 7083, 3264, 265, 9549, 7169, 7144, 6068, + 6545, 1580, 8287, 7166, 1700, 633, 6101, 210, 3447, + 405, 8401, 2116, 3061, 120, 3758, 3224, 2049, 8242, + 4839, 9464, 693, 5141, 764, 6513, 8446, 8212, 3275, + 9682, 1420, 3408, 4145, 9904, 1057, 5587, 901, 5152, + 2841, 3558, 7842, 4197, 3309, 2335, 1192, 5405, 9916, + 2481, 1125, 8151, 7638, 6922, 2430, 912, 4770, 4712, + 6647, 5230, 6086, 4985, 7308, 9288, 1384, 7330, 8063, + 5564, 9472, 1976, 40, 4812, 6504, 4391, 3464, 2689, + 1114, 2263, 9898, 1567, 9540, 6822, 821, 1998, 7878, + 9181, 1818, 1209, 2539, 1160, 9808, 463, 263, 8208, + 8171, 129, 8433, 4617, 8359, 312, 1640, 1522, 4048, + 5980, 3300, 1945, 1110, 4379, 3980, 2115, 283, 7658, + 7769, 8159, 4283, 7740, 5868, 9747, 2476, 5456, 5897, + 9500, 9092, 4934, 2014, 2634, 9897, 912, 3436, 4361, + 2970, 722, 6724, 1342, 9452, 1039, 4991, 8561, 2030, + 3999, 5414, 5316, 712, 5681, 2518, 6790, 9860, 3506, + 5093, 7295, 6572, 6868, 2355, 5778, 9203, 4880, 2791, + 9320, 3014, 9918, 2921, 8988, 141, 1475, 9846, 4713, + 9024, 692, 5076, 7563, 6498, 8907, 1295, 1351, 4964, + 5270, 1482, 1910, 1143, 9509, 3679, 6920, 2839, 5271, + 5790, 7550, 7877, 7196, 8110, 3458, 8863, 6442, 4179, + 5065, 9844, 2060, 799, 7085, 603, 6004, 1749, 2077, + 2038, 4582, 1298, 8893, 181, 4569, 3070, 7157, 8790, + 2391, 724, 2063, 3622, 5206, 7595, 2202, 5552, 9002, + 347, 505, 5459, 2834, 7758, 2282, 1970, 9079, 6558, + 1962, 4357, 2522, 992, 5448, 7106, 6521, 6066, 8664, + 1489, 8809, 8981, 3254, 631, 6925, 9786, 4711, 2080, + 9941, 4418, 9086, 462, 1764, 3142, 8362, 7446, 6622, + 3369, 4863, 5373, 9178, 1481, 8646, 5900, 9548, 4408, + 7750, 7399, 1581, 2547, 9301, 5969, 179, 2851, 1207, + 428, 6125, 8190, 5103, 7928, 6601, 7331, 9667, 2432, + 6776, 2279, 2842, 2285, 9680, 8221, 1724, 9070, 1222, + 3603, 9120, 3653, 5672, 3399, 4625, 2429, 8889, 5981, + 1000, 5470, 2261, 5871, 8703, 4691, 1671, 5061, 2598, + 6933, 2768, 853, 7015, 5601, 5218, 381, 537, 8936, + 4791, 6282, 4998, 3247, 4654, 6455, 8944, 8290, 8745, + 2692, 6962, 2252, 2867, 5432, 4051, 4941, 5684, 7006, + 3108, 2162, 9080, 649, 4797, 4290, 9784, 6945, 4661, + 1160, 570, 6461, 4929, 266, 64, 9848, 9068, 6428, + 2769, 84, 8378, 7068, 3001, 2149, 8583, 5287, 436, + 9111, 5124, 9797, 2012, 2411, 4043, 4222, 1696, 546, + 4712, 4905, 8837, 5463, 8521, 9390, 2336, 2226, 1407, + 4822, 7155, 316, 6391, 9589, 6434, 8313, 6737, 696, + 6410, 972, 3354, 3601, 6436, 9895, 5209, 2618, 8553, + 7811, 2958, 4986, 6996, 6746, 7395, 92, 2747, 39, + 3164, 1246, 6647, 6895, 5256, 8149, 9776, 8926, 7338, + 1299, 6106, 854, 9373, 1443, 3055, 677, 459, 1881, + 3164, 1865, 8156, 3059, 1315, 8469, 7724, 3143, 7132, + 3791, 4564, 3865, 5879, 7306, 568, 9812, 1785, 5323, + 5765, 3615, 4800, 7634, 8579, 1808, 3502, 7965, 2087, + 2874, 1541, 2579, 3128, 3863, 562, 8156, 7588, 2199, + 3579, 2253, 4682, 7858, 2016, 6187, 2976, 6788, 2542, + 3791, 5389, 1314, 9919, 5823, 7598, 7913, 4913, 6239, + 5585, 4565, 8135, 4632, 5898, 7355, 196, 8411, 9673, + 9719, 3316, 7270, 9467, 5676, 72, 6041, 6756, 7259, + 7416, 3170, 8693, 2493, 2027, 6871, 1234, 5081, 4177, + 2314, 1689, 4125, 6054, 3950, 1579, 9517, 5957, 3594, + 6383, 7678, 9558, 5291, 268, 5430, 2249, 368, 2109, + 683, 1529, 3492, 84, 4074, 5390, 403, 9378, 7210, + 3148, 1655, 9598, 6011, 2729, 4928, 4344, 2719, 1872, + 6784, 2105, 8880, 6712, 1644, 4470, 800, 8040, 3214, + 8452, 1233, 5357, 3667, 4151, 6027, 6841, 7686, 1673, + 5009, 4659, 1398, 3855, 9108, 8464, 7121, 6004, 5958, + 4060, 4437, 9638, 5697, 6382, 800, 2529, 9907, 3927, + 3494, 5242, 9767, 6705, 7803, 4937, 5829, 6107, 4596, + 3160]), + values=tensor([3.4107e-01, 5.8259e-01, 9.0206e-01, 1.5231e-01, + 1.5774e-01, 5.3916e-01, 6.1194e-01, 9.5075e-01, + 4.0186e-02, 9.8706e-01, 9.4080e-01, 5.8716e-01, + 6.7869e-02, 3.7279e-01, 5.8225e-01, 5.0243e-01, + 5.6104e-01, 8.1432e-01, 5.5779e-01, 3.1556e-01, + 2.4611e-01, 5.6301e-02, 4.5138e-01, 1.3073e-01, + 2.2739e-01, 1.4106e-01, 5.6086e-01, 3.1390e-01, + 9.4666e-01, 5.6872e-01, 7.4045e-01, 4.4636e-01, + 9.3540e-01, 6.8254e-01, 8.7194e-02, 4.9288e-01, + 2.8910e-01, 1.5515e-01, 6.9777e-01, 8.1779e-01, + 3.7513e-01, 2.9492e-01, 7.3948e-01, 2.5269e-01, + 1.5901e-01, 3.4237e-01, 1.4025e-01, 2.9783e-01, + 4.2632e-01, 8.3642e-01, 7.5962e-01, 7.7581e-01, + 9.4394e-01, 4.6305e-01, 8.1264e-01, 6.5263e-01, + 6.3669e-01, 5.5673e-01, 4.0066e-01, 6.0592e-01, + 5.1269e-01, 2.7275e-01, 6.9278e-01, 3.2280e-01, + 2.2773e-01, 4.8282e-01, 9.4239e-01, 9.4261e-01, + 5.6880e-01, 1.9748e-01, 9.6239e-01, 6.2399e-01, + 1.0298e-01, 3.2784e-01, 5.0119e-01, 6.7033e-01, + 9.5959e-01, 3.2246e-01, 1.1867e-01, 1.1081e-01, + 9.7342e-01, 6.9772e-01, 8.4391e-01, 2.0712e-01, + 4.1353e-01, 6.2809e-01, 8.2107e-01, 7.0050e-02, + 2.4630e-01, 8.1426e-01, 6.4572e-01, 2.3166e-01, + 1.1184e-01, 3.9971e-02, 4.0002e-01, 9.7835e-01, + 2.0212e-01, 1.8854e-01, 1.5726e-01, 8.3404e-01, + 4.2840e-01, 9.5166e-01, 3.2665e-01, 3.2984e-01, + 4.3002e-01, 3.7293e-01, 8.9415e-01, 7.2970e-01, + 8.8637e-01, 6.2604e-01, 1.9380e-02, 2.6423e-01, + 4.4435e-01, 1.5585e-01, 7.1475e-01, 3.1465e-01, + 3.5278e-01, 3.9353e-01, 2.4005e-01, 2.3418e-01, + 1.9743e-01, 2.4430e-01, 1.6408e-01, 1.1091e-01, + 3.2187e-01, 8.6647e-01, 3.3577e-01, 7.5728e-01, + 9.5105e-02, 9.1206e-01, 6.8281e-01, 1.3476e-01, + 1.6985e-01, 3.5679e-01, 5.7880e-01, 2.6246e-01, + 8.6490e-01, 6.1564e-02, 1.3512e-03, 5.8975e-01, + 6.3041e-01, 2.8240e-01, 7.9244e-01, 9.7654e-01, + 2.4998e-03, 1.4618e-01, 9.6273e-01, 8.5326e-01, + 6.9665e-01, 4.2947e-02, 2.9434e-01, 6.4109e-01, + 3.5892e-01, 4.9565e-01, 3.7949e-01, 7.4967e-01, + 8.7995e-01, 6.6710e-01, 1.8686e-01, 3.9784e-01, + 3.8422e-01, 7.0529e-01, 9.2915e-02, 7.3076e-01, + 1.9805e-01, 6.8473e-01, 5.3952e-01, 2.9416e-01, + 4.5626e-01, 9.6675e-01, 5.9033e-01, 9.0386e-01, + 3.7952e-01, 9.9075e-01, 6.9146e-01, 5.8950e-01, + 2.2022e-01, 3.9380e-01, 7.0756e-01, 5.0251e-01, + 5.7638e-01, 7.2018e-01, 2.6283e-01, 7.7764e-03, + 4.5319e-02, 5.3242e-01, 5.7104e-01, 8.9803e-01, + 1.7680e-01, 3.6901e-01, 2.9846e-01, 3.2596e-01, + 6.1306e-01, 6.6736e-01, 4.1856e-01, 3.3300e-01, + 7.0737e-01, 9.7738e-01, 9.7458e-01, 1.1018e-01, + 1.5941e-01, 1.4525e-01, 5.1916e-01, 9.2485e-01, + 1.2389e-01, 9.6093e-01, 4.5693e-01, 3.6103e-01, + 7.2257e-01, 8.6213e-01, 7.5457e-01, 6.8732e-01, + 6.5566e-01, 3.3795e-01, 2.0535e-01, 1.4076e-01, + 3.5574e-01, 2.3672e-02, 8.9044e-01, 6.4557e-01, + 7.6495e-01, 4.9628e-01, 2.4630e-01, 1.6034e-01, + 9.5737e-01, 8.8421e-01, 2.4213e-01, 7.7080e-01, + 1.2698e-01, 9.7317e-01, 4.5912e-01, 1.8065e-01, + 6.0195e-01, 1.8788e-01, 5.6326e-01, 9.7076e-01, + 4.6756e-01, 4.5233e-01, 2.0422e-01, 6.3361e-01, + 8.5462e-02, 3.5504e-02, 4.7209e-01, 2.7244e-01, + 4.7504e-02, 4.3276e-01, 8.5968e-01, 8.8808e-01, + 6.8370e-01, 9.7504e-01, 7.9917e-01, 7.7973e-01, + 4.8502e-01, 2.2438e-01, 1.5410e-01, 4.3549e-01, + 6.2630e-01, 2.0717e-01, 7.3108e-01, 8.7995e-01, + 5.8807e-01, 1.8904e-01, 9.7540e-01, 3.5022e-01, + 6.1668e-01, 4.2630e-01, 1.6345e-01, 6.3050e-01, + 3.3571e-01, 4.6183e-01, 3.2077e-01, 5.6221e-01, + 5.6951e-01, 8.9562e-01, 7.7176e-01, 2.8270e-01, + 2.2731e-01, 8.5000e-01, 7.9221e-01, 3.0058e-01, + 2.5522e-01, 1.9619e-01, 8.9598e-01, 5.5217e-01, + 1.2127e-01, 1.1458e-02, 1.8720e-01, 2.6580e-01, + 3.6930e-01, 3.3263e-01, 2.3041e-01, 3.8251e-01, + 1.7020e-01, 5.6143e-01, 8.5724e-01, 2.4612e-01, + 4.1008e-01, 6.7912e-01, 9.0655e-01, 9.8953e-01, + 5.1157e-01, 4.0535e-01, 1.2339e-01, 4.2239e-04, + 6.3665e-01, 2.5100e-01, 4.6716e-01, 9.3143e-01, + 3.1984e-01, 2.3807e-02, 1.6142e-01, 1.1064e-01, + 3.4538e-01, 1.1648e-01, 7.6242e-01, 9.1220e-01, + 1.0508e-01, 2.2946e-01, 2.0923e-02, 2.8600e-04, + 5.7946e-01, 6.4038e-02, 6.1635e-01, 7.6588e-01, + 2.1260e-01, 3.9026e-01, 5.8087e-01, 6.1305e-01, + 7.5460e-01, 2.3937e-01, 9.1516e-01, 1.8689e-02, + 5.9670e-01, 4.7403e-01, 7.5566e-01, 2.4853e-01, + 8.7701e-02, 5.3822e-01, 6.4989e-01, 5.3743e-01, + 6.4136e-01, 3.3117e-01, 9.5819e-01, 3.9978e-01, + 7.8790e-01, 9.7921e-01, 1.4496e-01, 2.8499e-01, + 7.3365e-01, 8.2835e-01, 5.9327e-01, 7.6840e-01, + 1.7718e-01, 2.2172e-01, 5.5148e-01, 9.9897e-01, + 8.0818e-01, 6.3864e-01, 8.1272e-01, 1.6892e-01, + 4.9513e-01, 7.3640e-01, 7.4037e-01, 7.2909e-01, + 2.0953e-01, 1.2960e-01, 4.9217e-01, 4.6350e-02, + 9.1230e-01, 1.6167e-01, 7.9224e-01, 3.4126e-01, + 5.7486e-01, 6.7833e-01, 5.6509e-01, 2.4295e-01, + 9.4750e-01, 1.8697e-01, 6.7314e-01, 3.5818e-01, + 3.5532e-01, 6.9046e-01, 5.1394e-01, 2.6353e-01, + 5.5840e-03, 5.9048e-01, 1.0845e-01, 8.7156e-02, + 1.4373e-01, 2.7625e-01, 5.6213e-01, 7.3289e-01, + 8.6444e-01, 3.8270e-01, 7.8160e-01, 2.8543e-01, + 4.9744e-03, 7.0182e-01, 4.3329e-01, 3.0222e-01, + 6.5215e-03, 1.5251e-01, 2.8373e-02, 5.8213e-01, + 5.8849e-01, 8.6320e-01, 3.7353e-01, 2.8151e-01, + 8.8041e-01, 6.6590e-01, 3.8896e-01, 2.0248e-01, + 6.0602e-02, 8.3962e-01, 9.3127e-01, 9.5709e-01, + 2.7723e-01, 7.5722e-01, 5.1121e-01, 6.3922e-01, + 5.0456e-01, 3.7729e-01, 7.3885e-01, 6.6998e-01, + 5.9712e-01, 7.2155e-01, 5.2050e-01, 1.8090e-01, + 7.6263e-01, 2.8091e-01, 6.5032e-02, 9.5283e-01, + 4.9306e-01, 5.2967e-01, 5.4182e-01, 9.8136e-01, + 1.5577e-01, 6.6767e-01, 6.9720e-01, 7.8091e-01, + 6.4307e-01, 4.3554e-01, 2.8592e-01, 4.6926e-01, + 6.5065e-01, 9.1978e-01, 1.0899e-01, 7.3189e-01, + 5.0993e-01, 5.5218e-01, 7.7634e-01, 3.3534e-01, + 3.5754e-02, 9.2200e-01, 2.1710e-01, 4.5560e-01, + 1.0296e-01, 4.1832e-03, 5.3775e-01, 7.2447e-01, + 8.5718e-01, 1.1389e-01, 1.1775e-01, 2.5285e-01, + 1.0865e-01, 3.5541e-01, 6.2477e-01, 1.8628e-01, + 5.0797e-01, 5.5687e-01, 7.7752e-01, 5.0363e-01, + 4.2753e-01, 5.4263e-01, 9.3575e-01, 3.3169e-01, + 8.2404e-02, 3.4090e-01, 3.3525e-01, 7.3596e-01, + 6.0975e-02, 1.7112e-01, 4.8399e-01, 6.3775e-01, + 1.4649e-01, 2.3445e-01, 8.3492e-01, 5.2535e-01, + 7.9017e-01, 9.0307e-01, 4.8658e-01, 7.5883e-01, + 3.5785e-01, 6.0096e-01, 5.7555e-01, 1.7943e-01, + 7.3885e-02, 1.2125e-01, 3.2272e-01, 2.2891e-01, + 8.4517e-01, 6.2769e-02, 6.0906e-01, 4.9417e-01, + 3.8138e-01, 8.5100e-01, 6.5139e-02, 3.6631e-01, + 9.9685e-01, 4.3060e-01, 1.6032e-01, 4.7700e-01, + 4.0976e-01, 1.4277e-01, 1.2966e-01, 5.1290e-01, + 4.5698e-01, 9.2478e-01, 7.7864e-01, 4.4411e-01, + 7.3661e-01, 7.8892e-01, 1.5212e-01, 2.8232e-01, + 7.1727e-01, 7.6833e-01, 3.2281e-01, 7.9870e-02, + 3.6650e-01, 2.3854e-01, 8.8911e-01, 5.5056e-01, + 7.6939e-01, 2.5148e-01, 4.7798e-01, 7.6194e-01, + 2.2033e-01, 1.3396e-01, 3.3324e-01, 5.7380e-01, + 3.7708e-02, 8.4699e-01, 6.8381e-01, 6.1197e-01, + 4.9954e-01, 2.0119e-02, 4.9933e-01, 4.5701e-01, + 4.7771e-01, 2.2332e-01, 7.1689e-01, 8.5814e-01, + 4.7145e-01, 5.9539e-01, 1.2098e-01, 3.3254e-01, + 2.4795e-01, 2.5808e-01, 9.2196e-01, 1.1027e-01, + 6.9737e-01, 9.9262e-03, 3.9118e-01, 3.0348e-01, + 8.9520e-01, 5.6578e-01, 5.0959e-02, 7.6907e-01, + 4.8625e-01, 9.7317e-01, 8.3900e-01, 4.0362e-01, + 7.6045e-04, 4.9537e-01, 1.0689e-01, 5.7224e-01, + 8.2067e-01, 1.7758e-01, 1.7972e-01, 9.8133e-01, + 4.6896e-01, 2.0499e-01, 6.5187e-01, 8.8343e-01, + 5.0622e-01, 2.6635e-01, 9.3242e-01, 3.8417e-01, + 2.6588e-01, 4.5938e-02, 4.7982e-02, 8.9591e-01, + 2.3498e-01, 2.4143e-01, 1.7232e-01, 7.1027e-01, + 5.6187e-01, 6.2697e-01, 6.7454e-02, 1.5053e-01, + 9.3465e-01, 3.4974e-01, 5.3987e-01, 9.7711e-01, + 9.4404e-01, 6.2483e-02, 8.1453e-01, 5.0008e-01, + 3.2546e-01, 1.5956e-01, 7.5035e-01, 7.1251e-01, + 9.6113e-01, 7.9846e-01, 7.5855e-01, 6.3405e-01, + 6.5622e-01, 8.0636e-01, 6.1280e-01, 1.0840e-01, + 5.9194e-01, 8.3410e-02, 5.7581e-01, 3.0383e-01, + 2.8427e-01, 6.7511e-01, 3.3074e-01, 9.1898e-01, + 7.9597e-01, 9.5382e-01, 8.0795e-01, 3.8605e-01, + 8.7692e-01, 5.9298e-01, 7.0179e-01, 8.1178e-01, + 9.1999e-01, 9.7833e-02, 9.7685e-01, 1.2559e-01, + 3.7595e-01, 1.0619e-01, 4.6649e-01, 9.5235e-02, + 6.1260e-01, 1.3643e-01, 5.0897e-01, 8.7723e-01, + 5.5802e-01, 9.6592e-01, 8.1266e-01, 8.2225e-01, + 2.2504e-01, 3.6534e-02, 2.9586e-01, 5.3084e-01, + 1.9037e-01, 5.4451e-01, 5.8438e-01, 8.9833e-01, + 3.7645e-01, 3.3522e-01, 8.7779e-01, 3.3495e-01, + 2.6928e-01, 9.6575e-01, 8.1937e-02, 2.6664e-01, + 9.2253e-01, 6.7376e-01, 7.4185e-01, 8.8061e-01, + 7.9892e-01, 6.8636e-01, 8.7397e-01, 7.2702e-01, + 6.0810e-01, 1.7136e-01, 7.2996e-01, 1.8868e-01, + 3.6151e-01, 4.2774e-01, 7.8979e-01, 1.7459e-01, + 7.0642e-01, 3.1498e-01, 7.1813e-02, 6.6140e-01, + 2.5986e-01, 2.7422e-02, 4.5608e-01, 9.1117e-01, + 4.2735e-01, 9.4874e-01, 7.4648e-01, 8.4866e-01, + 6.2441e-01, 2.4072e-01, 4.4676e-01, 5.1553e-01, + 8.6749e-01, 5.8036e-01, 7.1183e-01, 6.5350e-01, + 5.5260e-01, 5.5351e-01, 6.2691e-01, 7.4629e-01, + 8.2506e-01, 4.4927e-01, 4.2554e-01, 3.1876e-01, + 1.4230e-02, 5.6366e-01, 6.2014e-01, 6.7544e-01, + 6.6287e-01, 1.5509e-01, 1.8691e-01, 8.1803e-01, + 1.8324e-01, 3.7563e-01, 8.2116e-01, 4.7677e-01, + 8.3668e-01, 3.7147e-01, 2.1279e-01, 6.7702e-01, + 5.2652e-01, 6.5494e-01, 2.3574e-01, 3.1116e-02, + 8.1466e-01, 2.1925e-01, 6.3967e-01, 9.1210e-01, + 3.5590e-01, 3.2824e-02, 7.8165e-02, 6.7910e-01, + 7.7991e-02, 6.3361e-01, 8.9714e-01, 8.1623e-02, + 4.8409e-01, 6.2221e-01, 6.6241e-01, 5.7971e-01, + 6.0738e-01, 2.5849e-02, 7.0566e-01, 1.7704e-01, + 3.6729e-01, 1.3839e-01, 9.9366e-01, 4.4142e-01, + 1.9823e-01, 8.1609e-01, 5.8385e-01, 4.8917e-01, + 2.2523e-01, 8.3428e-01, 5.9977e-01, 6.3523e-01, + 1.6607e-01, 9.4440e-01, 1.9587e-01, 6.1683e-01, + 3.5810e-01, 4.7400e-01, 4.3512e-01, 6.0426e-01, + 2.8234e-01, 2.5576e-01, 7.5082e-01, 8.4535e-01, + 1.3393e-01, 1.8660e-01, 7.0341e-01, 1.1964e-02, + 4.0743e-01, 6.8186e-01, 5.1220e-01, 6.6603e-01, + 1.6310e-01, 6.8026e-01, 8.4955e-01, 3.6216e-01, + 4.1255e-01, 1.3285e-01, 8.7437e-01, 5.8389e-01, + 9.5977e-01, 6.3271e-01, 6.2448e-01, 7.7622e-01, + 4.0343e-01, 1.3948e-01, 1.5996e-01, 4.1658e-01, + 1.5073e-02, 9.1613e-01, 6.4840e-01, 4.0343e-01, + 8.8647e-01, 1.1074e-01, 7.7289e-01, 5.6628e-01, + 8.4421e-02, 1.1948e-01, 3.1581e-01, 1.9177e-01, + 4.8860e-01, 1.5470e-01, 6.2486e-01, 4.4421e-01, + 3.9513e-01, 1.4266e-01, 2.1619e-01, 2.0583e-01, + 7.4146e-01, 1.8079e-01, 3.9100e-01, 4.3376e-01, + 4.5332e-01, 4.6722e-01, 4.8936e-01, 7.7059e-01, + 3.2068e-01, 6.0436e-01, 7.5036e-01, 4.4729e-01, + 5.1029e-01, 2.4280e-01, 8.3554e-01, 4.1030e-01, + 6.4449e-01, 2.1449e-01, 8.9230e-01, 7.7362e-01, + 9.0345e-01, 2.9250e-01, 7.2883e-01, 2.6007e-01, + 4.3249e-02, 3.2905e-01, 7.8983e-01, 3.3206e-01, + 6.1799e-01, 5.1913e-01, 2.4351e-01, 9.6034e-01, + 4.3264e-01, 5.1049e-01, 1.7936e-01, 6.6325e-01, + 9.0829e-01, 5.2970e-01, 8.9989e-01, 1.3213e-01, + 4.3889e-01, 8.5246e-01, 6.0845e-01, 5.0519e-01, + 9.1563e-01, 5.5071e-01, 4.8853e-01, 6.4062e-01, + 7.4041e-02, 2.8417e-01, 4.8817e-01, 7.7442e-01, + 8.6382e-01, 2.5864e-01, 4.9162e-01, 9.7010e-01, + 7.8372e-02, 5.3483e-01, 4.5161e-01, 8.0667e-01, + 9.2389e-01, 6.6574e-01, 3.0939e-01, 2.5476e-01, + 6.3040e-01, 2.6954e-01, 5.2755e-01, 4.1507e-01, + 6.6436e-01, 4.2837e-01, 6.9834e-01, 4.2882e-01, + 6.5943e-01, 2.0974e-01, 1.9505e-01, 5.3006e-01, + 8.1315e-01, 7.9665e-01, 3.0262e-01, 6.5602e-01, + 8.3583e-01, 9.6204e-01, 3.0115e-01, 3.5509e-02, + 5.9246e-01, 4.1207e-01, 5.4602e-02, 2.0125e-01, + 8.2332e-01, 5.7405e-02, 8.1456e-01, 8.0780e-01, + 5.7221e-01, 5.6654e-01, 7.9385e-01, 2.5343e-01, + 8.6088e-01, 4.4022e-01, 7.5807e-01, 7.0350e-01, + 6.7430e-01, 2.6578e-01, 4.6147e-01, 4.5759e-01, + 9.2754e-01, 1.4889e-01, 7.6663e-01, 2.1741e-01, + 2.2244e-01, 8.5749e-01, 1.3077e-02, 7.9269e-01, + 1.6559e-01, 2.2460e-01, 7.5782e-01, 4.7896e-01, + 5.1457e-01, 6.9584e-01, 1.8496e-01, 9.1267e-01, + 1.1589e-01, 5.4519e-01, 7.0833e-01, 9.4859e-01, + 8.1018e-01, 7.1465e-01, 8.8549e-01, 3.7149e-01, + 5.1221e-01, 7.0856e-01, 3.6562e-01, 6.5691e-01, + 9.8664e-01, 5.0664e-01, 8.1957e-01, 2.6047e-01, + 2.4718e-01, 1.8765e-01, 2.7938e-01, 4.4195e-01, + 8.2697e-01, 2.6656e-01, 6.3635e-01, 3.3041e-01, + 8.6448e-01, 2.9462e-01, 3.1636e-01, 2.0534e-01, + 9.6163e-01, 8.7485e-01, 2.0748e-01, 4.9018e-01, + 9.6966e-01, 5.8261e-01, 3.1234e-01, 7.5691e-02, + 9.3811e-01, 2.6135e-01, 9.6636e-01, 3.1124e-01, + 3.7538e-01, 5.2802e-01, 9.7340e-01, 5.0175e-01, + 9.0082e-02, 2.2941e-01, 6.5343e-01, 2.4368e-01, + 8.2203e-01, 4.5287e-01, 5.9015e-01, 6.0898e-01, + 7.0289e-01, 6.1400e-02, 3.4722e-01, 1.2295e-01, + 6.7460e-01, 3.0153e-01, 5.3121e-01, 2.6773e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.6331, 0.4592, 0.8230, ..., 0.6920, 0.8755, 0.3375]) +tensor([0.1721, 0.8059, 0.1299, ..., 0.7732, 0.0077, 0.2449]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -647,378 +754,378 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 6.579680919647217 seconds +Time: 7.141697645187378 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '282031', '-ss', '10000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.372447967529297} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '286411', '-ss', '10000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.634347915649414} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([ 641, 1820, 5085, 7616, 6287, 2843, 3768, 139, 557, - 8943, 8505, 7281, 43, 6135, 7303, 4893, 489, 615, - 6714, 3680, 855, 4855, 479, 9230, 4436, 9603, 1635, - 9190, 9268, 3004, 1125, 8864, 107, 6184, 8970, 8700, - 7610, 2464, 2526, 7595, 3071, 5215, 1177, 6775, 4184, - 7851, 6577, 5571, 7909, 9344, 735, 6183, 9381, 8186, - 7299, 7523, 9047, 1302, 3301, 6829, 1465, 8532, 8991, - 1047, 5588, 9587, 3024, 6187, 7730, 4690, 6326, 2702, - 2537, 5158, 9461, 7448, 9578, 6012, 7028, 226, 6053, - 1967, 8146, 5831, 6774, 2244, 6191, 9992, 2390, 9133, - 8890, 766, 5014, 4790, 2155, 4691, 2161, 5599, 1756, - 7675, 496, 9605, 711, 5336, 9031, 2531, 2338, 9491, - 3768, 7092, 9040, 599, 4662, 8394, 522, 7316, 1506, - 525, 4754, 5479, 3359, 6765, 8131, 5941, 6009, 2305, - 1065, 3240, 5116, 987, 2923, 7533, 799, 2666, 5958, - 7677, 3749, 6265, 1775, 7785, 8588, 7337, 5345, 1189, - 7948, 6558, 2457, 299, 6806, 3954, 5519, 1039, 5338, - 7232, 812, 6127, 3761, 4004, 6956, 9330, 4265, 8611, - 6724, 8377, 8527, 2706, 1567, 2671, 2286, 3304, 1272, - 2070, 9256, 4736, 9758, 5406, 1452, 7915, 3101, 144, - 9979, 1960, 6381, 3432, 2638, 7424, 537, 2559, 9614, - 8179, 4691, 7438, 1120, 5872, 3070, 8489, 6931, 1985, - 4925, 4797, 5226, 7081, 8212, 445, 4109, 5640, 2257, - 7467, 2012, 6933, 9812, 6077, 3165, 2359, 17, 5933, - 7004, 1444, 9368, 5351, 6245, 3660, 7222, 2545, 8008, - 2336, 3100, 3043, 8828, 7123, 7845, 9969, 4531, 3791, - 9658, 582, 9738, 7316, 8440, 1704, 64, 9634, 7709, - 5983, 9441, 9604, 5121, 141, 2351, 3725, 6220, 7490, - 9494, 5209, 7692, 9939, 632, 7847, 6751, 9374, 2312, - 8674, 2385, 1543, 8905, 8015, 222, 786, 7500, 8256, - 2521, 6696, 1643, 8098, 4270, 7100, 3669, 2113, 1313, - 8306, 6672, 298, 1769, 5946, 9397, 7730, 9951, 7831, - 3644, 512, 9197, 3290, 2307, 6307, 5995, 9826, 8701, - 1474, 5284, 3261, 2218, 2945, 4813, 3037, 7819, 5822, - 7884, 4720, 2534, 1208, 3616, 922, 2226, 4382, 3494, - 6973, 6642, 6587, 3020, 4947, 3226, 2552, 101, 2200, - 8571, 1979, 8248, 2284, 9387, 3200, 9086, 615, 4324, - 7614, 1291, 9099, 2422, 9376, 7855, 2869, 4373, 9650, - 4367, 4622, 5124, 7211, 2179, 1614, 3245, 5389, 6821, - 6300, 8718, 1035, 3582, 8257, 7452, 2297, 3762, 3338, - 2343, 6557, 3623, 5128, 4244, 4231, 1091, 2759, 1073, - 1438, 9204, 3250, 1109, 5974, 5625, 9640, 5403, 9827, - 9860, 2538, 6365, 3807, 7739, 4746, 196, 7104, 1414, - 9645, 5312, 4324, 1738, 8925, 3231, 558, 7534, 4725, - 9093, 1277, 4867, 2640, 3909, 8870, 6450, 5182, 1941, - 5107, 8388, 8527, 7970, 1975, 7722, 4808, 8077, 5290, - 5743, 6355, 7161, 573, 7648, 7139, 100, 2042, 3159, - 1046, 6596, 5452, 1650, 9645, 6382, 6862, 555, 3741, - 118, 8221, 8479, 8346, 7654, 5102, 3554, 7238, 258, - 9058, 600, 7288, 3482, 1231, 52, 6020, 6829, 8334, - 4647, 3307, 2858, 528, 4606, 6172, 3855, 4925, 5620, - 132, 8810, 4311, 4446, 8520, 9436, 6123, 4339, 1754, - 5002, 7908, 529, 6941, 1458, 1377, 7366, 3326, 2975, - 4976, 2358, 9764, 3735, 4314, 9201, 3224, 3442, 4608, - 7255, 153, 1882, 8568, 6384, 2712, 5756, 5384, 9835, - 2913, 8596, 324, 1552, 8661, 2777, 8374, 5359, 1615, - 3969, 6396, 7689, 2470, 778, 7314, 3640, 427, 3557, - 3960, 5759, 9579, 3154, 4508, 9713, 2509, 4663, 4700, - 2397, 2730, 4887, 7158, 9627, 992, 5544, 8476, 9970, - 6284, 6686, 6616, 1433, 1753, 1639, 788, 8113, 3009, - 2084, 9407, 5338, 8482, 2017, 2846, 3658, 5916, 2942, - 3321, 8671, 2490, 5057, 4147, 6392, 6458, 674, 981, - 3352, 6130, 6143, 264, 4531, 5618, 3537, 3145, 3094, - 4051, 1817, 5211, 9273, 9450, 6106, 8283, 7138, 7857, - 3244, 6049, 1477, 4734, 9695, 9753, 4169, 9542, 2937, - 3509, 7510, 4689, 707, 8198, 7008, 6722, 3337, 5751, - 3766, 833, 8750, 4331, 5594, 3997, 5578, 9017, 3910, - 3070, 1260, 7089, 5023, 6162, 4544, 2963, 5389, 8290, - 2433, 8470, 268, 2658, 5350, 9452, 9566, 5799, 6076, - 5093, 4524, 728, 5385, 601, 1121, 2937, 6609, 9033, - 112, 8436, 1613, 1934, 5295, 1872, 5673, 2468, 20, - 629, 8138, 8511, 4404, 5469, 9306, 3180, 1675, 8874, - 5243, 5951, 1999, 1207, 2097, 2731, 7103, 9382, 5573, - 4542, 3567, 7221, 1313, 4524, 4765, 9415, 2646, 4434, - 2523, 8400, 4785, 4828, 6799, 641, 9477, 4083, 3467, - 6803, 5831, 7241, 1931, 3753, 6755, 5101, 8779, 276, - 723, 6290, 3559, 163, 1895, 5804, 7812, 452, 8815, - 3364, 1546, 636, 1173, 4838, 5941, 3629, 5615, 1065, - 6095, 1559, 4709, 6197, 6796, 6190, 1496, 4155, 8680, - 8774, 2349, 1020, 7910, 216, 8442, 9179, 6834, 1499, - 2872, 6140, 8913, 9738, 5850, 8410, 7955, 3577, 3138, - 5103, 4605, 8243, 2613, 2619, 6014, 1536, 619, 7266, - 4067, 4761, 4808, 7604, 7128, 3593, 8227, 5574, 5711, - 3815, 6532, 6664, 4778, 9035, 2519, 6829, 8402, 9861, - 5598, 8030, 991, 5754, 1489, 8219, 1588, 7984, 3957, - 7982, 7783, 5801, 4818, 6928, 5945, 1030, 6679, 8996, - 2066, 5413, 9651, 8908, 3779, 697, 6794, 9930, 9736, - 8162, 6895, 2355, 5033, 4891, 2352, 2673, 6602, 6389, - 2319, 1938, 3169, 4827, 2997, 199, 8604, 2753, 197, - 9848, 5455, 5838, 5131, 3306, 3152, 5760, 1477, 8162, - 8523, 382, 6174, 1580, 2419, 5000, 1351, 4299, 2598, - 8064, 714, 8757, 4986, 9885, 1, 9906, 7850, 9127, - 609, 9264, 7572, 5203, 1497, 5370, 2825, 6608, 5307, - 1067, 1059, 6801, 4949, 4490, 3210, 7421, 7811, 1102, - 5058, 8487, 8843, 5590, 9083, 3587, 9580, 6809, 4814, - 5774, 5460, 5247, 1639, 4042, 1058, 5514, 2499, 4333, - 2608, 5359, 3700, 2316, 8701, 7433, 6840, 4441, 7816, - 5852, 6477, 8172, 7852, 5968, 9671, 5374, 1033, 1307, - 7427, 1879, 7842, 1991, 8425, 3031, 6842, 7640, 2472, - 5231, 4002, 6652, 3522, 211, 9391, 7872, 5134, 1378, - 6610, 2732, 1051, 7787, 2466, 1112, 7365, 9226, 5845, - 3593, 4013, 3040, 449, 9502, 9775, 5013, 72, 3221, - 1993, 2626, 656, 562, 8318, 7938, 2465, 4300, 2538, - 873, 5650, 590, 6320, 2620, 5043, 8788, 7544, 7372, - 3427, 2476, 6556, 1948, 1008, 9277, 8552, 7791, 838, - 8482, 7805, 2152, 8325, 132, 5590, 2315, 1155, 2035, - 4363, 7292, 6257, 538, 3095, 8540, 2594, 1676, 6100, - 432, 8590, 397, 6028, 5770, 4864, 259, 9108, 1343, - 3496, 3662, 6106, 9274, 2047, 2081, 3620, 438, 3494, - 55, 6088, 3113, 2526, 6605, 5078, 9048, 4362, 5299, - 8201, 7239, 2961, 6652, 4458, 3937, 4156, 4861, 4275, - 6038]), - values=tensor([4.7714e-01, 6.5835e-01, 6.6810e-01, 8.6236e-01, - 4.8150e-01, 2.9025e-01, 3.8068e-01, 1.2618e-02, - 8.9304e-01, 4.7334e-01, 3.1033e-01, 6.2877e-01, - 4.7847e-01, 6.3115e-01, 3.7549e-01, 7.1375e-01, - 7.1370e-01, 1.5617e-01, 5.9230e-01, 8.6992e-01, - 1.0824e-02, 9.5002e-01, 9.3768e-01, 3.9687e-01, - 2.4891e-01, 3.5396e-01, 7.9816e-01, 4.0629e-01, - 1.1462e-01, 9.2518e-02, 5.4557e-01, 2.8237e-01, - 2.7320e-01, 6.0124e-01, 9.2649e-01, 9.6609e-01, - 2.5218e-02, 8.7923e-01, 2.8120e-01, 8.5964e-01, - 7.5022e-01, 6.9189e-01, 8.6902e-01, 6.6175e-01, - 4.8078e-01, 1.0568e-01, 6.1760e-01, 5.1173e-01, - 7.1935e-01, 7.9758e-01, 5.1397e-01, 9.2681e-01, - 1.5881e-01, 4.0107e-01, 3.7000e-02, 4.5709e-01, - 8.7828e-01, 9.9798e-01, 2.0436e-01, 8.2405e-01, - 2.2018e-01, 8.1418e-01, 6.6142e-01, 6.5186e-01, - 1.7800e-01, 5.1282e-01, 5.4169e-01, 5.1202e-01, - 7.2361e-01, 6.0514e-01, 5.7929e-02, 5.8574e-01, - 1.3412e-01, 2.4876e-01, 8.0263e-02, 3.0969e-01, - 7.6202e-01, 9.0548e-01, 6.8427e-01, 9.4370e-02, - 1.2529e-03, 5.6984e-01, 2.4518e-01, 5.3912e-01, - 7.6442e-01, 1.5031e-01, 7.1477e-01, 5.7099e-01, - 9.6944e-01, 7.7954e-01, 1.9253e-01, 4.3041e-01, - 4.5620e-01, 8.4686e-01, 7.6515e-01, 3.4939e-02, - 6.8882e-01, 2.9488e-01, 3.1215e-01, 5.0502e-01, - 4.1170e-01, 9.0587e-01, 2.6328e-01, 6.3719e-01, - 6.4003e-01, 1.4477e-01, 3.2101e-01, 6.3956e-01, - 5.8353e-01, 6.2460e-02, 8.7149e-01, 5.0409e-01, - 2.9944e-01, 1.5857e-01, 7.5177e-01, 3.8737e-01, - 5.8319e-01, 8.6482e-01, 5.5542e-01, 1.6361e-01, - 9.6946e-01, 7.4385e-01, 7.8405e-01, 9.1404e-01, - 3.1006e-01, 6.1069e-01, 9.8763e-01, 7.8296e-01, - 5.8330e-01, 2.2488e-01, 5.3122e-01, 9.0733e-01, - 1.0823e-01, 7.1544e-01, 5.3686e-01, 9.7645e-01, - 7.0111e-01, 8.3358e-01, 6.7151e-01, 4.5959e-01, - 1.8565e-01, 7.9015e-01, 1.8143e-02, 5.2429e-01, - 9.8713e-01, 7.8291e-02, 3.2014e-01, 7.0142e-01, - 3.2823e-02, 2.0523e-01, 2.8556e-01, 6.5603e-01, - 6.6879e-01, 1.8088e-01, 6.8243e-01, 8.0618e-01, - 5.9647e-01, 2.7644e-01, 6.0267e-01, 9.4412e-01, - 9.6862e-01, 4.6876e-01, 9.2399e-01, 8.4139e-02, - 1.1788e-01, 6.5004e-01, 4.1362e-01, 6.3517e-01, - 9.1004e-01, 8.6067e-01, 3.3935e-01, 9.9589e-01, - 6.5934e-01, 9.0696e-01, 3.5917e-01, 3.4094e-01, - 1.2945e-01, 8.7930e-01, 5.9039e-01, 9.4529e-01, - 5.8159e-01, 4.0632e-01, 3.0761e-02, 3.3183e-01, - 8.7530e-01, 1.2750e-01, 4.8617e-01, 9.8340e-01, - 9.7630e-02, 3.4204e-01, 4.1919e-01, 9.3104e-01, - 8.6768e-01, 7.0351e-02, 9.3605e-01, 3.1020e-01, - 8.5241e-01, 8.9140e-01, 9.8230e-01, 1.2324e-01, - 3.6008e-01, 2.9767e-01, 9.7191e-01, 1.9919e-03, - 2.2594e-01, 6.6331e-02, 1.2033e-02, 9.2786e-01, - 5.1123e-01, 8.4921e-01, 9.4593e-01, 1.5175e-01, - 9.6212e-01, 4.9790e-01, 3.6524e-01, 6.4334e-01, - 8.5222e-01, 8.3525e-01, 4.4515e-01, 9.2110e-01, - 6.7497e-01, 9.1729e-01, 9.5976e-01, 3.4302e-01, - 1.5117e-01, 9.0576e-01, 2.9185e-01, 3.2189e-01, - 6.6528e-01, 4.5914e-01, 8.8528e-01, 9.1793e-01, - 5.4030e-01, 5.4753e-01, 1.9790e-01, 5.5714e-01, - 3.5467e-01, 4.9917e-01, 5.8322e-01, 5.3679e-01, - 6.0002e-01, 2.0784e-01, 3.4511e-01, 2.2987e-01, - 7.2294e-01, 4.0254e-01, 2.7296e-01, 3.4605e-02, - 8.6606e-01, 1.7876e-01, 9.4357e-01, 5.2248e-01, - 7.6849e-01, 7.7606e-01, 3.8884e-01, 3.4760e-01, - 3.4674e-01, 7.9396e-01, 9.2260e-01, 7.9808e-01, - 7.3693e-01, 5.4535e-01, 2.7120e-01, 5.2527e-01, - 8.1707e-01, 9.8153e-01, 4.7831e-02, 9.0352e-01, - 2.7109e-01, 3.6352e-01, 3.9784e-01, 5.4847e-01, - 2.2590e-01, 7.0093e-01, 7.2317e-01, 9.8874e-01, - 2.4136e-01, 8.9288e-01, 4.3400e-01, 3.2705e-01, - 4.0069e-01, 1.8910e-01, 8.1844e-01, 7.9620e-01, - 5.6564e-01, 1.2793e-01, 2.8968e-01, 9.3505e-01, - 3.0875e-01, 7.9151e-01, 2.4407e-01, 2.0703e-02, - 9.6879e-01, 8.8887e-01, 9.7410e-01, 1.8982e-01, - 4.3029e-01, 6.1504e-02, 4.4247e-01, 7.0771e-01, - 8.5999e-01, 6.0147e-02, 1.0992e-01, 2.1045e-02, - 2.9122e-01, 5.5412e-01, 6.6382e-01, 6.2970e-01, - 9.8414e-01, 2.8818e-01, 9.9151e-02, 7.1218e-01, - 9.6381e-01, 3.1820e-01, 8.8603e-01, 2.0768e-01, - 2.5266e-01, 4.1578e-01, 4.2094e-02, 8.3200e-01, - 1.5533e-01, 2.6469e-01, 8.4578e-01, 6.7183e-01, - 9.4744e-01, 5.0742e-01, 9.2285e-01, 2.7251e-01, - 7.5077e-01, 8.9139e-01, 1.5554e-02, 3.6012e-01, - 8.4348e-01, 2.6198e-01, 8.1818e-01, 4.6823e-02, - 7.7674e-01, 3.7275e-01, 2.2973e-01, 8.8084e-01, - 9.4058e-01, 8.3742e-01, 4.1623e-01, 3.1520e-02, - 3.4727e-01, 9.5595e-01, 1.9390e-01, 6.7977e-01, - 3.6465e-01, 2.6049e-01, 2.9796e-01, 1.1722e-01, - 3.9747e-01, 6.8602e-01, 5.6184e-01, 6.8826e-01, - 2.0046e-01, 6.7113e-01, 5.8623e-01, 4.6542e-01, - 6.7784e-01, 2.2631e-01, 4.2916e-01, 5.2430e-01, - 5.5908e-01, 8.4205e-02, 1.0124e-01, 9.4289e-01, - 5.4475e-03, 4.6761e-01, 9.1604e-01, 5.3590e-01, - 8.3150e-01, 5.3336e-01, 1.4978e-01, 5.6498e-01, - 6.6969e-02, 6.4111e-01, 9.1692e-01, 6.8961e-02, - 5.2052e-01, 6.7275e-01, 9.6648e-01, 1.0708e-02, - 3.7208e-01, 8.8149e-01, 4.5841e-01, 4.2290e-01, - 2.8680e-02, 6.1936e-01, 3.2559e-01, 4.5333e-01, - 9.4508e-01, 6.9227e-01, 5.5121e-01, 8.7142e-01, - 2.5169e-01, 2.1455e-01, 5.7989e-01, 1.4356e-01, - 8.7386e-01, 6.7318e-01, 4.6051e-01, 1.5353e-01, - 9.0016e-01, 6.2262e-01, 3.1852e-01, 7.0931e-02, - 6.4187e-01, 9.5944e-01, 8.9126e-01, 2.8364e-01, - 9.9916e-01, 1.9702e-01, 2.2416e-01, 4.3509e-01, - 1.4583e-01, 1.4836e-01, 9.0843e-01, 7.9091e-02, - 8.2524e-01, 5.2035e-01, 1.1139e-01, 1.7207e-01, - 5.1057e-01, 4.3832e-01, 5.2320e-01, 1.3588e-01, - 7.2917e-01, 9.7882e-01, 5.3764e-01, 1.1408e-01, - 9.6750e-01, 5.2236e-02, 1.7656e-01, 6.3825e-01, - 4.2668e-02, 3.1796e-01, 4.0240e-02, 1.7456e-01, - 5.1237e-01, 8.3579e-01, 6.7024e-01, 6.4843e-01, - 7.7335e-01, 5.1312e-01, 1.0212e-01, 3.8262e-01, - 4.8249e-01, 8.2486e-02, 8.1760e-01, 9.7405e-01, - 1.3984e-01, 2.0012e-01, 5.7656e-01, 1.0390e-01, - 4.9240e-01, 7.8365e-01, 9.2216e-01, 7.6854e-01, - 6.6574e-01, 8.6726e-01, 3.9857e-01, 8.9556e-01, - 5.5728e-01, 3.7013e-01, 2.4661e-01, 7.1962e-02, - 2.1412e-01, 6.4604e-01, 9.0376e-01, 7.0492e-01, - 4.2638e-01, 5.8832e-01, 3.4304e-01, 6.3964e-01, - 8.6998e-01, 2.4576e-01, 8.2567e-01, 1.0762e-01, - 3.1310e-01, 8.9782e-01, 6.0560e-01, 2.1446e-02, - 5.3497e-02, 9.6383e-01, 7.7932e-01, 2.1853e-01, - 7.3464e-01, 2.3548e-01, 9.3060e-01, 5.7317e-03, - 7.4298e-01, 5.3537e-01, 4.8560e-01, 9.6730e-01, - 7.6250e-01, 6.9670e-02, 6.0534e-01, 5.0924e-01, - 2.0278e-01, 1.0778e-01, 9.0005e-01, 1.0226e-01, - 4.6301e-01, 1.3695e-01, 4.8460e-01, 5.5213e-01, - 4.9099e-01, 6.6150e-01, 1.4123e-01, 4.8495e-02, - 3.2005e-01, 9.2497e-01, 3.4661e-01, 2.2349e-01, - 6.0811e-01, 6.4216e-01, 5.9953e-01, 4.6989e-01, - 2.0930e-01, 2.5490e-01, 3.4212e-01, 5.8880e-01, - 8.9119e-01, 9.5757e-01, 7.8986e-01, 9.1785e-01, - 8.4240e-02, 7.0146e-01, 4.2999e-01, 8.7659e-01, - 6.6600e-01, 9.3144e-01, 6.6034e-01, 2.5563e-01, - 5.4123e-01, 2.4843e-01, 4.2146e-01, 2.4451e-01, - 5.9355e-02, 7.9812e-01, 9.9665e-01, 1.2106e-01, - 4.8230e-01, 1.6177e-01, 6.8532e-01, 2.3983e-01, - 3.0260e-01, 5.4610e-01, 1.8362e-01, 2.4822e-01, - 5.2604e-01, 1.7262e-01, 9.3319e-01, 5.2875e-01, - 4.4548e-01, 9.6324e-01, 9.3822e-01, 1.4101e-01, - 2.9004e-01, 9.8271e-01, 1.8284e-01, 6.5851e-01, - 5.8992e-01, 1.9685e-01, 7.9180e-02, 4.3183e-01, - 2.5144e-01, 7.4977e-01, 5.0010e-01, 5.3582e-01, - 8.5925e-01, 5.7023e-01, 7.6546e-01, 2.7229e-01, - 6.3567e-01, 1.9853e-02, 9.8156e-01, 5.1300e-01, - 7.0410e-01, 1.9884e-01, 8.2894e-01, 6.0498e-01, - 8.3068e-01, 4.5943e-01, 9.1785e-01, 7.5169e-01, - 6.0400e-02, 6.3811e-01, 4.3791e-01, 6.6424e-01, - 2.3959e-01, 6.6513e-01, 9.6708e-01, 6.6745e-01, - 7.8435e-01, 4.7840e-01, 1.4220e-01, 8.4696e-01, - 3.9492e-01, 1.7747e-01, 1.0384e-01, 3.4803e-01, - 4.4867e-01, 2.1118e-02, 5.1986e-01, 8.6635e-03, - 2.9422e-01, 6.5401e-01, 7.4585e-01, 6.1559e-01, - 9.5539e-02, 1.5754e-01, 5.3164e-01, 1.9059e-01, - 4.7617e-01, 8.1445e-01, 3.7421e-01, 3.5266e-01, - 2.7694e-01, 1.6864e-01, 4.1843e-01, 3.8316e-01, - 6.9262e-01, 6.9038e-01, 8.3762e-01, 6.6963e-01, - 3.0783e-01, 5.9280e-02, 2.5202e-01, 8.1615e-01, - 1.1046e-02, 2.1282e-01, 7.3311e-01, 2.8725e-01, - 7.6723e-01, 5.5518e-01, 2.1261e-01, 3.9670e-01, - 1.2823e-01, 8.5473e-01, 8.2945e-01, 6.0473e-03, - 8.1675e-01, 2.9842e-01, 2.7518e-01, 8.4889e-01, - 1.4176e-01, 2.2999e-01, 9.8103e-01, 6.1864e-01, - 1.7848e-01, 1.4930e-02, 9.0227e-01, 3.7526e-01, - 6.7379e-01, 7.4257e-01, 2.2657e-01, 1.3257e-01, - 6.5286e-02, 4.5247e-01, 1.1977e-02, 4.5326e-01, - 6.2023e-01, 2.9594e-01, 8.5625e-01, 3.2932e-01, - 8.5327e-01, 3.7474e-01, 5.2005e-01, 1.9805e-03, - 1.8896e-01, 2.2181e-01, 1.4462e-01, 2.7939e-01, - 3.3215e-01, 7.7546e-01, 9.2522e-01, 8.1760e-01, - 5.1863e-01, 7.7043e-01, 9.7478e-01, 5.0158e-01, - 4.9716e-01, 1.9079e-01, 3.1415e-01, 9.3979e-01, - 3.9996e-02, 5.6242e-01, 2.6871e-01, 3.4933e-04, - 6.5883e-01, 6.9675e-01, 4.0822e-01, 3.8498e-01, - 1.0241e-01, 8.6637e-01, 2.2285e-01, 7.4028e-01, - 1.7733e-01, 2.7094e-01, 4.5353e-01, 4.3450e-01, - 4.5140e-01, 4.4938e-01, 9.0305e-01, 6.1982e-01, - 1.4139e-01, 8.3423e-01, 9.4445e-01, 9.7407e-01, - 3.3746e-01, 1.2854e-01, 3.3241e-01, 8.0687e-01, - 4.7193e-01, 6.3117e-01, 6.1655e-01, 7.1355e-01, - 9.6168e-01, 4.7774e-01, 6.2907e-01, 8.0398e-02, - 7.1037e-01, 5.8516e-01, 8.8072e-01, 4.9747e-01, - 5.7621e-01, 5.3898e-01, 1.5911e-01, 3.2921e-01, - 3.7609e-01, 2.5010e-01, 4.9033e-01, 6.6828e-01, - 8.3216e-01, 3.2885e-01, 3.5639e-01, 6.1506e-01, - 3.9507e-01, 6.8564e-01, 9.3219e-01, 8.1971e-01, - 3.7975e-01, 6.2635e-02, 7.3499e-01, 8.3335e-01, - 9.6516e-01, 3.6389e-01, 1.4785e-01, 9.8734e-01, - 4.6517e-01, 4.7021e-01, 4.5035e-01, 8.5602e-01, - 8.8317e-01, 6.9377e-01, 1.0737e-01, 3.0491e-01, - 7.4477e-01, 2.7987e-01, 9.1324e-01, 4.3301e-01, - 1.0371e-01, 4.6262e-01, 7.3666e-01, 4.1720e-01, - 8.9850e-01, 7.7097e-01, 8.4133e-01, 9.0364e-01, - 3.7363e-01, 4.2931e-01, 4.3065e-01, 4.7899e-01, - 6.1030e-01, 9.2078e-01, 2.4479e-01, 6.3372e-01, - 7.5302e-01, 6.8533e-02, 2.4034e-01, 2.9799e-01, - 5.9541e-01, 8.0769e-01, 6.0826e-02, 4.9975e-01, - 6.7782e-01, 9.2380e-01, 6.5933e-01, 6.2628e-01, - 3.5499e-01, 7.7217e-01, 7.1395e-01, 8.3721e-01, - 8.8629e-01, 9.0689e-01, 2.0537e-01, 3.0803e-01, - 6.5047e-01, 7.1533e-01, 8.5211e-01, 6.9320e-01, - 9.0068e-01, 1.3613e-01, 7.8974e-01, 8.7644e-01, - 1.5286e-02, 5.1885e-02, 3.0124e-01, 3.2126e-02, - 8.9848e-01, 5.7828e-01, 9.6688e-01, 1.8717e-01, - 8.2536e-01, 6.1247e-01, 4.0347e-01, 4.7207e-01, - 7.2367e-01, 9.3299e-01, 4.6252e-01, 6.5511e-01, - 9.2534e-01, 7.0945e-01, 4.6262e-01, 9.6054e-01, - 9.4908e-01, 4.6484e-01, 9.9122e-01, 1.7594e-02, - 1.4339e-01, 7.1119e-02, 7.0679e-01, 7.2761e-01, - 4.9954e-01, 7.5889e-01, 1.8391e-01, 6.9856e-01, - 9.4138e-01, 9.0301e-01, 5.2853e-01, 5.0671e-01, - 8.2311e-01, 6.1262e-01, 6.5198e-01, 1.4274e-01, - 8.5886e-01, 2.5963e-02, 6.0455e-01, 9.0689e-01, - 4.6808e-02, 3.6223e-01, 3.6475e-01, 1.2150e-01, - 9.1193e-01, 9.1870e-01, 5.4367e-01, 2.4872e-01, - 8.8178e-01, 2.1879e-01, 4.8890e-01, 2.5284e-01, - 1.0187e-01, 3.9640e-02, 2.3500e-01, 1.6553e-01, - 3.0865e-01, 4.7595e-01, 2.7209e-01, 9.0120e-01, - 8.3193e-01, 1.3589e-01, 9.8747e-01, 9.9208e-01, - 3.3995e-01, 2.0806e-01, 4.9592e-02, 8.8307e-01, - 4.4688e-01, 8.3109e-01, 7.4699e-01, 8.7723e-01, - 2.2585e-01, 4.2030e-01, 2.1791e-01, 6.5667e-01, - 6.7550e-01, 8.1568e-01, 1.4914e-01, 8.1042e-01, - 2.8686e-01, 6.8659e-01, 4.8032e-01, 2.5172e-01, - 3.8307e-01, 5.3647e-01, 6.7392e-01, 3.3138e-01, - 4.2032e-02, 4.4574e-01, 1.6246e-01, 6.0810e-01, - 7.0306e-01, 2.1204e-01, 3.9409e-02, 2.2795e-01, - 8.4389e-01, 1.7474e-01, 8.9677e-01, 1.4316e-01, - 8.6058e-01, 6.1491e-01, 3.1661e-01, 9.2257e-01, - 7.4407e-01, 2.4658e-01, 2.5910e-01, 8.2101e-03, - 8.6598e-01, 7.2740e-01, 9.0937e-01, 6.4394e-01, - 4.9736e-01, 3.6977e-01, 6.7672e-01, 3.2331e-01, - 8.7654e-01, 5.8446e-01, 8.9775e-01, 1.7963e-01, - 7.2940e-01, 6.4353e-01, 5.5434e-01, 6.6167e-01, - 6.1351e-01, 3.2832e-01, 6.7163e-01, 6.7831e-01, - 5.1237e-01, 2.1545e-01, 3.3231e-01, 2.8996e-01, - 6.5585e-01, 2.0358e-02, 7.7431e-01, 9.8544e-01, - 9.1419e-01, 9.5741e-01, 7.0935e-01, 3.3533e-01, - 8.7434e-01, 6.1952e-01, 8.9673e-01, 1.8459e-01, - 2.2639e-02, 3.3693e-02, 1.4487e-01, 2.5814e-01, - 7.7431e-01, 3.5981e-01, 6.2179e-01, 2.9769e-01, - 1.7809e-01, 9.9498e-01, 3.6807e-01, 9.3312e-01, - 9.5690e-01, 6.4950e-01, 8.9949e-02, 6.1082e-01, - 5.5730e-04, 2.8380e-01, 1.5247e-01, 2.6631e-01, - 7.0591e-01, 9.1447e-01, 5.5121e-01, 8.2357e-01, - 3.1340e-01, 9.1639e-01, 5.7718e-02, 9.6115e-01, - 9.1335e-01, 7.8484e-01, 2.8462e-01, 7.3102e-01, - 3.0921e-01, 2.4763e-01, 1.7843e-01, 6.2335e-01, - 9.5565e-01, 3.9810e-01, 4.9350e-01, 3.6155e-01, - 9.9764e-01, 2.8685e-01, 3.0377e-01, 2.8867e-01]), + col_indices=tensor([5653, 663, 2356, 6335, 1601, 9179, 5758, 4032, 1184, + 1367, 4244, 4842, 4720, 9582, 4215, 5795, 5613, 5508, + 3150, 2956, 6349, 4941, 1636, 8225, 9972, 2582, 3679, + 1135, 9620, 6084, 6291, 4048, 7001, 5472, 7361, 7937, + 5298, 6533, 2776, 1036, 1344, 6057, 6180, 9014, 5073, + 6811, 5946, 5681, 492, 615, 6472, 4769, 5564, 541, + 800, 5736, 579, 8317, 7029, 3695, 499, 9654, 3281, + 205, 9052, 6707, 6645, 6832, 4626, 4664, 2914, 7622, + 9393, 3855, 9403, 5918, 5868, 9444, 851, 6317, 57, + 1210, 2172, 6037, 9204, 3658, 7620, 6983, 3781, 1735, + 686, 9439, 6244, 8175, 2372, 965, 2150, 8571, 4157, + 2512, 9938, 4043, 8875, 882, 623, 1012, 3731, 7589, + 9758, 4803, 9290, 1234, 774, 2176, 4572, 2018, 3222, + 7583, 187, 3819, 9911, 5564, 8603, 9156, 1382, 5716, + 6346, 5522, 2563, 4347, 9272, 9447, 4471, 4638, 4983, + 9892, 6310, 7516, 7140, 6156, 6553, 5760, 3519, 546, + 4365, 2039, 9350, 4877, 5823, 286, 7838, 4403, 5744, + 777, 1260, 2101, 8748, 5207, 3240, 7189, 894, 9416, + 9973, 7357, 198, 3268, 3546, 227, 3220, 2795, 5781, + 5660, 9451, 8013, 3655, 5219, 9893, 2124, 1825, 7217, + 2907, 3224, 62, 5333, 4441, 9222, 7187, 7656, 8345, + 6235, 4925, 4392, 9403, 7654, 8355, 5586, 6058, 1290, + 4866, 5435, 1268, 4572, 9153, 7079, 1500, 588, 4650, + 8759, 9375, 1661, 8828, 6616, 8199, 6392, 6520, 9958, + 2116, 3225, 5132, 9375, 4924, 4325, 419, 1259, 5490, + 8365, 3133, 1934, 496, 7648, 3510, 5952, 6594, 9025, + 8913, 3315, 3979, 9410, 9159, 3471, 5346, 7471, 2113, + 6014, 6280, 8075, 582, 6331, 2784, 6053, 556, 989, + 6164, 8084, 411, 640, 8216, 8772, 5075, 9022, 5692, + 7825, 9613, 5866, 5376, 3288, 3147, 9301, 386, 8701, + 9166, 418, 2003, 9005, 8592, 4188, 3735, 4232, 5238, + 8853, 7246, 2646, 1343, 9472, 371, 1177, 8422, 8202, + 5353, 9648, 2908, 8320, 6076, 6771, 8304, 1075, 2469, + 1568, 9483, 3555, 5266, 3405, 9971, 7501, 9677, 5813, + 5924, 7587, 1603, 937, 3813, 1988, 637, 2885, 5831, + 8209, 1869, 5873, 3244, 9083, 4316, 1649, 3716, 7799, + 3429, 9162, 139, 9399, 4042, 4832, 2298, 2896, 1054, + 6210, 4421, 1743, 2336, 4261, 861, 3352, 4781, 2982, + 219, 1594, 6937, 1157, 7791, 8670, 8548, 3732, 1054, + 1298, 3515, 1443, 4515, 5404, 1104, 8525, 7928, 1938, + 9321, 4093, 9617, 5069, 7691, 6537, 1777, 1177, 4792, + 3990, 8613, 2141, 639, 6090, 3243, 3817, 2379, 597, + 2806, 6242, 129, 4735, 1063, 7452, 9642, 1260, 4687, + 6658, 1205, 1033, 2603, 6809, 9426, 6394, 7227, 8386, + 557, 2634, 1874, 8349, 1415, 4690, 6198, 8318, 8284, + 9176, 6704, 6584, 6885, 9606, 1543, 7428, 2559, 2822, + 1315, 6792, 6148, 819, 6087, 4634, 2762, 1934, 3174, + 6495, 6068, 138, 6548, 3452, 8896, 8290, 5349, 4350, + 9259, 6574, 8978, 6459, 1935, 8370, 6950, 8071, 2398, + 2191, 2389, 8876, 4727, 5597, 1126, 3589, 4971, 4846, + 7348, 4101, 516, 7156, 3095, 2299, 76, 5189, 1287, + 7838, 754, 6520, 6985, 2666, 2752, 173, 2069, 2831, + 5027, 1950, 6590, 8614, 8084, 463, 9089, 1779, 3430, + 7584, 6410, 8803, 4256, 5381, 1369, 2241, 8259, 582, + 686, 2251, 1507, 161, 211, 57, 8365, 5950, 8733, + 1228, 3943, 7405, 6953, 7761, 3781, 1755, 2225, 1783, + 4413, 7802, 9456, 2432, 8496, 3459, 8182, 666, 5081, + 9711, 4750, 8408, 873, 1701, 3598, 2226, 4834, 8977, + 119, 1439, 6621, 8888, 6718, 4964, 6775, 8285, 756, + 683, 7518, 7638, 5345, 9636, 8272, 8196, 4295, 5469, + 9453, 7671, 9819, 4008, 1695, 8702, 5830, 9258, 6438, + 993, 8991, 6137, 5725, 8489, 6299, 3472, 1535, 8632, + 272, 8780, 428, 5376, 6220, 1971, 1957, 4900, 8226, + 2265, 5227, 4555, 7142, 7595, 5252, 4188, 1870, 3747, + 6218, 6943, 3294, 5178, 3131, 9628, 2154, 8397, 1648, + 9870, 9081, 2344, 6566, 3675, 241, 9683, 5548, 2030, + 2234, 794, 9927, 3501, 4995, 2027, 4834, 6504, 479, + 6498, 8648, 2353, 2297, 8318, 4623, 9046, 6743, 6207, + 8130, 1077, 656, 4641, 2263, 3756, 9177, 3120, 1083, + 8650, 1248, 6568, 6625, 7483, 1893, 6194, 8846, 7443, + 2988, 5728, 9652, 4567, 9731, 6950, 8352, 8733, 938, + 9915, 5254, 7482, 724, 5457, 2774, 5506, 2482, 1189, + 551, 4600, 2755, 6757, 1057, 9353, 4932, 8508, 5036, + 6380, 4795, 7526, 4341, 907, 8663, 6669, 4592, 373, + 6374, 4856, 4138, 2881, 1151, 2689, 4911, 1512, 2587, + 794, 786, 7277, 8732, 6720, 810, 9573, 2632, 8645, + 89, 2002, 9562, 5004, 8476, 9057, 6183, 1456, 8245, + 1499, 3084, 7040, 3182, 7421, 2467, 18, 1654, 8066, + 3892, 645, 1826, 817, 5601, 6826, 1555, 8599, 8669, + 4406, 2057, 9546, 4111, 3145, 7482, 8521, 8104, 237, + 1875, 7275, 1479, 6187, 44, 3298, 5094, 4170, 2205, + 4442, 3536, 2699, 1153, 3332, 7163, 1256, 2533, 733, + 9820, 3400, 1795, 1338, 589, 3332, 1481, 8779, 8377, + 2005, 3701, 6750, 2118, 9260, 2101, 7768, 3538, 2479, + 5907, 2574, 4797, 4045, 5900, 5465, 8521, 8466, 1697, + 6282, 1744, 352, 397, 1611, 969, 8666, 9409, 3614, + 8478, 8247, 8885, 3286, 5250, 7001, 3291, 6758, 8084, + 1510, 7677, 9604, 8489, 8033, 3291, 1326, 5168, 9323, + 8370, 8206, 1664, 6593, 8357, 8619, 1887, 4419, 5990, + 5278, 9128, 5485, 8058, 6566, 5979, 5259, 6492, 5081, + 936, 5810, 9231, 9063, 9410, 4079, 9581, 4526, 2618, + 2468, 2724, 6060, 1881, 3175, 4090, 6363, 9964, 4758, + 3486, 3202, 540, 9157, 4166, 1974, 5549, 3666, 6137, + 9665, 498, 6014, 2287, 6885, 214, 9852, 8570, 7221, + 3853, 5536, 8282, 2123, 5809, 8936, 7986, 6716, 392, + 7672, 9526, 804, 2336, 8391, 2208, 9067, 7291, 4449, + 8158, 6745, 6107, 8769, 3393, 1713, 4310, 2615, 7559, + 4259, 3361, 9493, 9346, 98, 563, 4262, 6949, 9119, + 7647, 5199, 7674, 6087, 4360, 6308, 3631, 1778, 7472, + 8582, 6581, 3300, 4228, 2667, 6631, 7759, 6925, 8155, + 5702, 2695, 9383, 27, 9479, 9275, 1062, 9608, 9103, + 7513, 9349, 1805, 2186, 8934, 7211, 8306, 3787, 426, + 7793, 8765, 7635, 8057, 8447, 3456, 7831, 5006, 7449, + 5886, 5800, 19, 5725, 37, 4869, 8698, 7941, 6598, + 1906, 369, 6708, 9109, 1374, 2105, 1459, 743, 1027, + 7047, 4135, 1279, 479, 3744, 992, 6277, 4043, 1901, + 9461, 1701, 183, 5585, 7539, 7017, 158, 3044, 4339, + 2736, 3950, 2868, 1786, 3760, 3722, 687, 8148, 3388, + 6, 185, 8988, 5275, 757, 2881, 8187, 3221, 1592, + 9830, 3889, 4101, 6097, 9491, 8094, 3893, 5702, 2525, + 4242, 6698, 2410, 2070, 2272, 8560, 3241, 7303, 1110, + 121]), + values=tensor([4.5491e-02, 3.2468e-01, 2.9501e-01, 6.3274e-01, + 1.0864e-02, 7.2561e-01, 2.7833e-01, 8.9921e-01, + 9.5712e-01, 7.8304e-01, 7.9990e-01, 5.7548e-02, + 2.6717e-01, 8.6061e-01, 8.3671e-01, 6.9606e-01, + 5.5892e-01, 5.1427e-01, 2.7201e-01, 7.5072e-01, + 7.9922e-01, 1.5437e-01, 8.2474e-01, 9.8408e-01, + 9.0110e-01, 9.1307e-01, 7.3003e-01, 2.2108e-01, + 5.0958e-01, 3.3159e-01, 9.0686e-01, 3.0235e-01, + 2.2882e-01, 1.5485e-01, 4.1432e-01, 7.3323e-01, + 5.5252e-01, 9.6080e-01, 7.7897e-01, 4.5968e-01, + 7.1762e-02, 9.5266e-01, 2.5126e-01, 3.6906e-01, + 7.1595e-01, 4.5846e-01, 2.7393e-01, 1.8774e-01, + 2.4921e-01, 3.1854e-01, 4.8334e-01, 1.4183e-02, + 3.9405e-01, 7.8745e-01, 1.2611e-01, 4.9633e-01, + 1.5275e-01, 9.4294e-03, 7.0521e-01, 8.7227e-01, + 5.8672e-01, 9.8469e-01, 7.4888e-01, 4.3210e-01, + 6.9548e-01, 4.5036e-01, 2.1180e-01, 6.2584e-01, + 6.8274e-01, 4.4221e-02, 3.7077e-01, 2.7813e-01, + 8.0969e-01, 4.9060e-01, 3.7621e-01, 9.4320e-02, + 6.5185e-03, 3.2050e-02, 9.6548e-01, 7.9314e-01, + 5.2607e-01, 8.4014e-01, 5.1244e-01, 4.5536e-01, + 2.9243e-01, 3.4722e-01, 2.6706e-01, 2.3069e-01, + 4.0751e-01, 6.0825e-01, 7.8459e-01, 8.8005e-01, + 5.0828e-01, 1.3147e-01, 2.6742e-01, 9.5215e-01, + 6.5675e-01, 5.9280e-01, 9.7954e-01, 6.0187e-01, + 3.3183e-01, 1.0600e-02, 7.6686e-01, 4.6235e-01, + 7.2398e-01, 1.2355e-01, 8.7187e-01, 1.5465e-01, + 5.5476e-02, 4.5871e-01, 6.3394e-01, 5.6422e-01, + 1.3625e-01, 7.8116e-01, 2.6024e-01, 6.0334e-01, + 2.2029e-01, 2.8339e-01, 2.5566e-01, 7.3421e-01, + 2.2859e-01, 6.0130e-01, 3.1568e-01, 6.6567e-01, + 5.6914e-02, 2.5935e-01, 5.8822e-02, 5.2249e-01, + 8.9886e-01, 9.2019e-01, 5.6993e-02, 7.7705e-01, + 2.3500e-01, 3.8917e-01, 7.0200e-01, 2.6454e-01, + 1.4123e-01, 5.5982e-01, 6.0613e-01, 8.7605e-01, + 4.7423e-01, 2.7690e-01, 5.0264e-02, 9.8423e-01, + 8.7080e-01, 8.7532e-01, 1.5537e-03, 5.7053e-01, + 3.2342e-01, 7.0720e-01, 7.0123e-01, 6.9576e-01, + 8.8003e-01, 1.1500e-01, 5.4614e-01, 2.4413e-01, + 8.5389e-01, 5.6087e-01, 9.5104e-01, 9.3133e-01, + 2.8049e-02, 5.6062e-01, 4.8771e-01, 3.9350e-01, + 5.7535e-01, 2.0051e-01, 9.2461e-02, 3.4240e-01, + 8.6489e-01, 1.7069e-01, 1.4503e-01, 8.2908e-01, + 8.4715e-01, 6.2055e-01, 6.0250e-01, 6.9669e-03, + 6.7451e-01, 9.1211e-01, 7.7998e-01, 9.1172e-01, + 6.4672e-01, 8.2260e-01, 2.9749e-01, 3.4357e-01, + 8.8965e-01, 1.6998e-01, 4.6817e-01, 5.6114e-01, + 2.9832e-03, 3.5782e-01, 2.3354e-01, 7.0389e-01, + 8.6494e-01, 2.9523e-01, 7.1093e-01, 8.5747e-01, + 3.9862e-01, 9.9242e-02, 6.3404e-01, 1.8341e-01, + 3.8813e-01, 4.5591e-01, 9.0054e-01, 9.5016e-01, + 4.2228e-01, 1.2941e-01, 3.8481e-01, 7.4146e-01, + 5.8461e-01, 5.7050e-01, 9.4103e-01, 9.6133e-01, + 2.7176e-01, 8.4268e-01, 9.5349e-01, 9.8518e-01, + 7.0966e-01, 1.9918e-01, 8.0055e-01, 4.7484e-01, + 7.3362e-01, 6.1497e-01, 6.4193e-02, 6.3662e-01, + 2.5121e-01, 8.4678e-01, 1.6158e-01, 1.1583e-01, + 7.4799e-01, 5.4370e-02, 2.4911e-01, 4.5653e-01, + 9.0328e-01, 2.0017e-01, 1.0692e-02, 8.5215e-01, + 3.1864e-01, 5.7972e-01, 9.9903e-01, 1.1865e-01, + 6.9590e-01, 9.8482e-01, 4.6454e-01, 6.6789e-02, + 2.3784e-01, 3.8803e-01, 9.5572e-01, 6.9527e-01, + 7.7733e-01, 3.0422e-01, 6.2625e-01, 2.4065e-01, + 2.9101e-01, 4.5591e-01, 7.5354e-01, 6.1629e-02, + 4.8890e-01, 6.2485e-01, 4.0109e-03, 5.2769e-01, + 5.1763e-01, 1.8385e-01, 1.3278e-01, 8.0457e-01, + 8.4173e-01, 9.2545e-01, 1.3840e-01, 7.6490e-01, + 1.4038e-01, 5.3636e-01, 6.8826e-01, 5.8422e-01, + 5.2788e-01, 9.1865e-01, 9.1687e-02, 9.4108e-01, + 7.7907e-01, 8.5824e-01, 6.6717e-01, 6.0324e-01, + 9.6810e-01, 8.6025e-01, 4.3579e-01, 5.0796e-01, + 7.7129e-01, 3.4580e-01, 6.2934e-01, 7.1330e-01, + 5.4797e-02, 2.5678e-01, 7.7649e-01, 1.6495e-02, + 6.2250e-01, 6.7574e-01, 1.4771e-01, 6.5970e-01, + 8.5384e-01, 1.0370e-01, 4.4441e-01, 8.2723e-01, + 6.6853e-01, 7.7504e-02, 3.9059e-01, 9.2182e-01, + 6.0726e-01, 9.3213e-01, 4.2342e-01, 1.5624e-01, + 6.0098e-02, 6.1591e-02, 5.0596e-01, 2.6916e-01, + 4.4467e-01, 1.9151e-01, 5.7240e-01, 4.4660e-02, + 2.8517e-01, 3.5291e-01, 6.3467e-01, 3.2806e-01, + 5.5092e-01, 8.7141e-01, 5.9479e-01, 1.1783e-01, + 6.7772e-01, 3.7557e-01, 2.1769e-01, 5.1208e-01, + 1.1902e-01, 1.9006e-01, 7.0631e-03, 6.8733e-01, + 6.8166e-01, 2.2002e-01, 7.1981e-01, 8.9421e-01, + 4.6344e-01, 9.9377e-01, 4.8175e-01, 6.3229e-01, + 3.8380e-01, 6.8316e-01, 5.8573e-01, 7.7837e-01, + 6.6946e-02, 2.3475e-01, 7.0140e-01, 1.2344e-01, + 2.8019e-01, 5.9542e-01, 7.5818e-01, 8.5928e-01, + 6.5466e-01, 2.9697e-01, 3.0249e-01, 5.1461e-01, + 3.0574e-02, 4.4546e-02, 5.1599e-02, 7.1790e-01, + 7.6362e-01, 2.5265e-01, 3.6333e-01, 5.7543e-01, + 4.4933e-01, 5.2750e-01, 1.8405e-01, 3.7109e-01, + 7.9021e-01, 6.3106e-01, 6.5594e-02, 5.0774e-01, + 7.9707e-01, 7.4175e-01, 3.6721e-01, 3.0570e-01, + 9.9687e-01, 3.7871e-01, 4.1780e-01, 9.4569e-01, + 7.8552e-01, 2.4122e-01, 6.0346e-01, 5.2993e-02, + 9.2558e-01, 1.4753e-01, 7.8873e-01, 3.8921e-02, + 9.4993e-01, 1.7318e-01, 1.0655e-01, 8.9241e-01, + 6.7408e-01, 3.1152e-01, 6.8847e-01, 4.4325e-01, + 4.8165e-01, 2.8106e-01, 3.5513e-01, 6.8342e-01, + 9.4058e-01, 1.0810e-01, 7.4330e-01, 9.5478e-01, + 1.4664e-01, 9.7611e-01, 4.7803e-01, 6.2009e-01, + 6.7093e-01, 4.0554e-01, 7.0217e-01, 5.0924e-01, + 4.5965e-01, 3.8968e-01, 9.5584e-01, 2.4917e-02, + 3.1405e-02, 3.1252e-01, 7.7549e-02, 9.5983e-01, + 1.6207e-01, 8.7600e-01, 5.6135e-01, 6.6565e-01, + 2.7677e-01, 9.8016e-01, 3.9823e-01, 7.3076e-01, + 6.4197e-01, 9.0002e-01, 4.2386e-01, 4.2004e-01, + 3.9384e-01, 2.3617e-01, 8.5746e-02, 4.3909e-01, + 3.1866e-01, 8.4048e-01, 3.8010e-01, 6.8652e-01, + 3.8902e-01, 7.5018e-01, 9.1396e-01, 1.6753e-01, + 8.1305e-01, 5.6153e-01, 7.6107e-01, 8.7271e-02, + 3.6435e-01, 1.5824e-01, 7.5343e-01, 1.3463e-02, + 4.7761e-01, 5.9639e-01, 4.4348e-01, 6.7500e-02, + 3.3355e-01, 8.6420e-01, 3.0451e-01, 4.0484e-01, + 6.4028e-01, 8.2717e-01, 4.6771e-01, 5.9629e-01, + 8.2755e-01, 5.9855e-01, 9.1490e-01, 3.9521e-01, + 4.4891e-02, 7.3522e-01, 8.1976e-01, 8.1029e-01, + 8.2031e-01, 2.7847e-01, 7.2509e-01, 4.1215e-01, + 9.0617e-01, 4.7701e-02, 4.0429e-01, 5.2302e-01, + 3.1622e-01, 6.8745e-01, 3.6865e-01, 7.9997e-01, + 6.0227e-01, 9.8518e-01, 3.9481e-01, 6.8483e-01, + 7.5143e-01, 6.1870e-01, 9.4705e-01, 1.8463e-01, + 7.7813e-01, 7.9055e-01, 3.9150e-01, 6.0592e-02, + 1.8376e-01, 3.3696e-01, 6.6841e-01, 1.6702e-01, + 2.8734e-01, 7.5517e-01, 3.7677e-01, 9.7449e-01, + 4.5025e-01, 2.0193e-01, 2.3506e-01, 1.5941e-02, + 9.6738e-01, 1.7205e-01, 6.2912e-01, 3.3533e-02, + 9.6599e-01, 5.5655e-01, 2.2836e-01, 3.1634e-01, + 9.4812e-03, 9.6607e-01, 4.7036e-01, 3.9668e-01, + 8.7698e-01, 8.9147e-02, 1.2127e-01, 5.5813e-01, + 1.7205e-01, 8.2425e-01, 8.7990e-01, 3.3131e-01, + 3.1468e-01, 5.9720e-01, 7.9343e-01, 3.9190e-01, + 4.2435e-01, 1.9421e-01, 4.9058e-01, 7.2547e-01, + 5.8086e-01, 7.0869e-01, 7.5320e-01, 3.4621e-01, + 5.0496e-01, 5.5857e-01, 6.6373e-01, 6.5189e-01, + 2.8694e-01, 1.2657e-01, 9.0532e-01, 9.5797e-01, + 4.0699e-01, 5.3664e-01, 2.8145e-01, 3.3471e-02, + 3.3984e-01, 7.2274e-02, 3.8278e-01, 6.3313e-02, + 1.2888e-01, 4.5359e-01, 8.4959e-01, 7.1804e-01, + 7.5488e-01, 9.4004e-01, 5.2125e-01, 2.7131e-01, + 6.1402e-01, 3.8144e-01, 8.7560e-01, 3.2266e-01, + 5.5649e-01, 7.3627e-01, 8.5755e-01, 7.2072e-03, + 2.8846e-01, 7.2430e-01, 5.3878e-01, 8.4452e-01, + 1.3342e-01, 3.8786e-01, 6.5924e-01, 2.7442e-01, + 8.6804e-02, 7.3956e-02, 1.0735e-02, 1.9793e-01, + 4.7212e-01, 1.6133e-01, 4.7390e-01, 2.2255e-01, + 7.5177e-01, 4.2505e-01, 3.0972e-01, 3.7468e-01, + 7.6238e-01, 9.4803e-01, 2.5885e-01, 2.9590e-01, + 5.0495e-01, 8.5311e-01, 8.1344e-01, 2.3873e-02, + 3.3971e-01, 4.1176e-01, 3.5343e-01, 6.8713e-01, + 2.5518e-01, 5.5269e-01, 1.9510e-01, 8.5124e-01, + 2.5981e-01, 2.1895e-01, 4.7009e-01, 1.9285e-01, + 4.7878e-01, 3.9918e-01, 8.3453e-01, 3.3741e-01, + 6.4600e-01, 6.7534e-01, 5.9816e-01, 4.3317e-01, + 1.6662e-01, 6.1035e-01, 5.5817e-01, 6.0579e-01, + 1.3883e-02, 1.7728e-01, 8.1614e-01, 8.4365e-01, + 9.6240e-01, 7.8831e-01, 1.7662e-01, 7.5505e-01, + 3.1979e-02, 7.5072e-01, 2.0440e-01, 3.6909e-01, + 2.5393e-01, 3.8386e-01, 2.0864e-01, 1.2775e-01, + 1.9641e-02, 7.4518e-01, 4.0993e-01, 4.4977e-01, + 8.1556e-01, 1.6905e-01, 5.3044e-02, 1.8450e-01, + 8.8919e-01, 6.9877e-01, 5.3492e-01, 9.8340e-01, + 3.0043e-01, 8.6934e-01, 5.0952e-01, 3.3501e-01, + 8.6829e-01, 9.5828e-01, 2.6305e-01, 6.8996e-01, + 9.7548e-01, 5.6316e-01, 8.0327e-01, 4.6951e-01, + 8.8042e-01, 9.6984e-01, 4.4366e-01, 1.3588e-01, + 6.2216e-01, 2.6858e-02, 1.8300e-01, 1.1829e-01, + 6.6395e-01, 8.3757e-01, 3.2313e-03, 5.4541e-01, + 2.8596e-01, 9.7916e-01, 9.8076e-02, 4.9537e-01, + 7.6776e-01, 1.2633e-01, 2.9367e-01, 8.2086e-01, + 8.4752e-01, 4.9738e-01, 1.2453e-01, 8.8421e-01, + 9.3727e-01, 5.0640e-01, 5.3707e-01, 1.8069e-01, + 9.2864e-01, 8.8341e-01, 3.8319e-01, 7.4187e-01, + 3.9035e-01, 4.2286e-01, 1.2256e-01, 9.7913e-01, + 2.7306e-02, 4.3080e-01, 2.8349e-01, 8.0193e-01, + 4.5756e-01, 7.5877e-01, 6.0580e-01, 3.6874e-01, + 1.3218e-01, 8.0418e-02, 2.7395e-01, 5.6953e-01, + 9.6882e-02, 9.2899e-01, 4.1200e-01, 6.4434e-01, + 6.0126e-01, 7.2073e-01, 1.2206e-02, 6.5301e-01, + 7.5807e-01, 4.5420e-01, 9.4519e-01, 1.2548e-01, + 4.0270e-01, 7.5287e-01, 2.7328e-01, 6.7114e-01, + 4.6869e-04, 6.0847e-02, 3.8973e-01, 1.6576e-01, + 6.9844e-01, 2.5031e-01, 2.6646e-01, 7.4219e-01, + 1.9507e-01, 5.4664e-01, 4.6153e-02, 9.0518e-01, + 9.0946e-01, 8.4267e-01, 8.0420e-01, 7.2293e-01, + 3.5989e-01, 4.2914e-01, 9.9099e-01, 6.2425e-01, + 1.3521e-01, 5.7729e-01, 5.8151e-01, 2.2300e-01, + 7.6972e-01, 4.8647e-01, 9.6036e-01, 3.5618e-01, + 1.0842e-01, 6.3586e-01, 1.0170e-01, 6.7330e-01, + 8.1168e-01, 6.2387e-01, 5.7831e-01, 4.7809e-01, + 1.1553e-01, 8.0639e-01, 2.6396e-02, 9.3913e-01, + 1.0144e-01, 7.0822e-03, 9.5812e-01, 6.1419e-01, + 5.5769e-01, 9.4462e-01, 2.4523e-01, 7.7377e-01, + 1.6828e-01, 9.6151e-01, 9.8418e-01, 4.1049e-01, + 8.0509e-01, 8.5756e-01, 4.0852e-01, 1.7004e-01, + 4.2725e-01, 2.6924e-01, 2.7568e-02, 6.4782e-01, + 2.3400e-01, 6.4486e-01, 7.4743e-01, 2.3534e-01, + 7.7784e-01, 2.9787e-01, 7.0881e-01, 3.6903e-01, + 9.8723e-01, 9.5179e-01, 6.6481e-01, 7.9932e-01, + 3.4256e-01, 6.3989e-01, 9.4136e-01, 6.3103e-01, + 9.7416e-01, 7.1185e-01, 4.3842e-01, 7.0811e-01, + 6.2721e-01, 2.3714e-01, 2.9081e-01, 8.3555e-01, + 9.0083e-01, 7.3734e-01, 3.2326e-02, 8.2624e-01, + 9.9258e-01, 9.0930e-01, 6.7614e-01, 5.4796e-02, + 8.5340e-01, 9.2531e-01, 7.6230e-01, 6.7267e-01, + 7.9031e-01, 4.7174e-01, 4.1194e-01, 1.8776e-01, + 6.0910e-01, 5.2252e-01, 2.1673e-01, 1.8473e-01, + 3.5143e-01, 7.0776e-01, 1.2603e-01, 5.0802e-01, + 4.5209e-01, 4.3706e-01, 7.6797e-02, 6.1164e-01, + 7.8143e-01, 1.3925e-01, 5.7531e-02, 8.7507e-01, + 5.7001e-01, 4.3906e-01, 9.2820e-01, 7.6700e-01, + 2.9968e-01, 5.9688e-01, 3.1468e-01, 4.2212e-01, + 7.9895e-02, 2.9011e-01, 1.8494e-01, 9.2888e-01, + 8.9319e-01, 8.2784e-01, 1.7488e-02, 8.1506e-01, + 5.8942e-01, 2.6648e-01, 7.2461e-01, 4.3524e-01, + 3.3783e-02, 3.0604e-01, 5.7524e-01, 1.2624e-01, + 1.8255e-01, 3.5454e-01, 3.6503e-01, 4.9452e-01, + 4.3380e-01, 1.0044e-01, 1.5676e-01, 7.4210e-01, + 7.7909e-01, 1.5953e-01, 9.7173e-01, 9.8386e-01, + 6.0732e-01, 4.4764e-01, 9.7740e-02, 2.5615e-01, + 4.6915e-01, 2.7954e-01, 5.9480e-01, 5.5372e-01, + 4.4142e-01, 7.4870e-01, 7.8726e-01, 5.3797e-01, + 7.5394e-01, 3.8676e-01, 9.0069e-01, 7.9215e-02, + 2.6986e-01, 7.4047e-01, 6.0732e-01, 3.5085e-01, + 9.1826e-01, 9.4896e-01, 6.8926e-03, 1.0309e-01, + 2.7614e-01, 5.7639e-01, 6.1056e-01, 6.5885e-01, + 7.8603e-01, 4.6450e-01, 7.5119e-01, 7.6419e-01, + 5.8530e-02, 5.5102e-01, 4.6821e-01, 4.6438e-01, + 3.5602e-01, 8.8841e-01, 5.1596e-01, 9.1532e-01, + 6.0969e-01, 4.8093e-01, 4.6162e-01, 9.1442e-03, + 8.7271e-01, 3.3897e-01, 1.0331e-01, 7.9221e-01, + 5.8692e-01, 6.9774e-01, 3.0172e-01, 1.0667e-01, + 1.0951e-02, 1.3612e-01, 7.8502e-02, 4.6719e-01, + 8.6153e-01, 2.2920e-01, 8.7337e-01, 2.1362e-02, + 8.6720e-01, 9.6225e-01, 3.2705e-01, 7.9640e-01, + 3.6238e-01, 3.5585e-01, 1.3643e-01, 3.7646e-01, + 5.3997e-01, 7.8911e-02, 9.4754e-01, 5.9488e-01, + 4.3521e-02, 6.4602e-01, 3.7163e-02, 1.4246e-01, + 9.2557e-01, 3.9489e-01, 9.4390e-01, 1.2061e-01, + 7.9882e-01, 5.0438e-01, 5.2005e-01, 9.3905e-01, + 4.3100e-01, 8.5312e-01, 7.6276e-01, 8.8805e-01, + 6.9230e-02, 7.5638e-01, 2.7686e-02, 6.4170e-01, + 6.8542e-01, 9.3072e-01, 9.1971e-02, 9.7074e-01, + 4.9244e-01, 9.7479e-01, 3.9805e-01, 6.5312e-01, + 2.7671e-01, 7.9289e-01, 4.7310e-01, 7.6491e-01, + 2.0056e-01, 9.8477e-01, 3.5288e-01, 1.5954e-01, + 9.1449e-01, 9.5312e-01, 6.0952e-01, 7.7001e-01, + 6.5414e-01, 3.7977e-01, 5.5246e-01, 8.1022e-01, + 4.6688e-01, 8.6118e-01, 7.9898e-01, 6.4956e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.3038, 0.6445, 0.5741, ..., 0.8215, 0.9151, 0.6540]) +tensor([0.7123, 0.9016, 0.3604, ..., 0.7264, 0.3786, 0.3585]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -1026,375 +1133,375 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 10.372447967529297 seconds +Time: 10.634347915649414 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([ 641, 1820, 5085, 7616, 6287, 2843, 3768, 139, 557, - 8943, 8505, 7281, 43, 6135, 7303, 4893, 489, 615, - 6714, 3680, 855, 4855, 479, 9230, 4436, 9603, 1635, - 9190, 9268, 3004, 1125, 8864, 107, 6184, 8970, 8700, - 7610, 2464, 2526, 7595, 3071, 5215, 1177, 6775, 4184, - 7851, 6577, 5571, 7909, 9344, 735, 6183, 9381, 8186, - 7299, 7523, 9047, 1302, 3301, 6829, 1465, 8532, 8991, - 1047, 5588, 9587, 3024, 6187, 7730, 4690, 6326, 2702, - 2537, 5158, 9461, 7448, 9578, 6012, 7028, 226, 6053, - 1967, 8146, 5831, 6774, 2244, 6191, 9992, 2390, 9133, - 8890, 766, 5014, 4790, 2155, 4691, 2161, 5599, 1756, - 7675, 496, 9605, 711, 5336, 9031, 2531, 2338, 9491, - 3768, 7092, 9040, 599, 4662, 8394, 522, 7316, 1506, - 525, 4754, 5479, 3359, 6765, 8131, 5941, 6009, 2305, - 1065, 3240, 5116, 987, 2923, 7533, 799, 2666, 5958, - 7677, 3749, 6265, 1775, 7785, 8588, 7337, 5345, 1189, - 7948, 6558, 2457, 299, 6806, 3954, 5519, 1039, 5338, - 7232, 812, 6127, 3761, 4004, 6956, 9330, 4265, 8611, - 6724, 8377, 8527, 2706, 1567, 2671, 2286, 3304, 1272, - 2070, 9256, 4736, 9758, 5406, 1452, 7915, 3101, 144, - 9979, 1960, 6381, 3432, 2638, 7424, 537, 2559, 9614, - 8179, 4691, 7438, 1120, 5872, 3070, 8489, 6931, 1985, - 4925, 4797, 5226, 7081, 8212, 445, 4109, 5640, 2257, - 7467, 2012, 6933, 9812, 6077, 3165, 2359, 17, 5933, - 7004, 1444, 9368, 5351, 6245, 3660, 7222, 2545, 8008, - 2336, 3100, 3043, 8828, 7123, 7845, 9969, 4531, 3791, - 9658, 582, 9738, 7316, 8440, 1704, 64, 9634, 7709, - 5983, 9441, 9604, 5121, 141, 2351, 3725, 6220, 7490, - 9494, 5209, 7692, 9939, 632, 7847, 6751, 9374, 2312, - 8674, 2385, 1543, 8905, 8015, 222, 786, 7500, 8256, - 2521, 6696, 1643, 8098, 4270, 7100, 3669, 2113, 1313, - 8306, 6672, 298, 1769, 5946, 9397, 7730, 9951, 7831, - 3644, 512, 9197, 3290, 2307, 6307, 5995, 9826, 8701, - 1474, 5284, 3261, 2218, 2945, 4813, 3037, 7819, 5822, - 7884, 4720, 2534, 1208, 3616, 922, 2226, 4382, 3494, - 6973, 6642, 6587, 3020, 4947, 3226, 2552, 101, 2200, - 8571, 1979, 8248, 2284, 9387, 3200, 9086, 615, 4324, - 7614, 1291, 9099, 2422, 9376, 7855, 2869, 4373, 9650, - 4367, 4622, 5124, 7211, 2179, 1614, 3245, 5389, 6821, - 6300, 8718, 1035, 3582, 8257, 7452, 2297, 3762, 3338, - 2343, 6557, 3623, 5128, 4244, 4231, 1091, 2759, 1073, - 1438, 9204, 3250, 1109, 5974, 5625, 9640, 5403, 9827, - 9860, 2538, 6365, 3807, 7739, 4746, 196, 7104, 1414, - 9645, 5312, 4324, 1738, 8925, 3231, 558, 7534, 4725, - 9093, 1277, 4867, 2640, 3909, 8870, 6450, 5182, 1941, - 5107, 8388, 8527, 7970, 1975, 7722, 4808, 8077, 5290, - 5743, 6355, 7161, 573, 7648, 7139, 100, 2042, 3159, - 1046, 6596, 5452, 1650, 9645, 6382, 6862, 555, 3741, - 118, 8221, 8479, 8346, 7654, 5102, 3554, 7238, 258, - 9058, 600, 7288, 3482, 1231, 52, 6020, 6829, 8334, - 4647, 3307, 2858, 528, 4606, 6172, 3855, 4925, 5620, - 132, 8810, 4311, 4446, 8520, 9436, 6123, 4339, 1754, - 5002, 7908, 529, 6941, 1458, 1377, 7366, 3326, 2975, - 4976, 2358, 9764, 3735, 4314, 9201, 3224, 3442, 4608, - 7255, 153, 1882, 8568, 6384, 2712, 5756, 5384, 9835, - 2913, 8596, 324, 1552, 8661, 2777, 8374, 5359, 1615, - 3969, 6396, 7689, 2470, 778, 7314, 3640, 427, 3557, - 3960, 5759, 9579, 3154, 4508, 9713, 2509, 4663, 4700, - 2397, 2730, 4887, 7158, 9627, 992, 5544, 8476, 9970, - 6284, 6686, 6616, 1433, 1753, 1639, 788, 8113, 3009, - 2084, 9407, 5338, 8482, 2017, 2846, 3658, 5916, 2942, - 3321, 8671, 2490, 5057, 4147, 6392, 6458, 674, 981, - 3352, 6130, 6143, 264, 4531, 5618, 3537, 3145, 3094, - 4051, 1817, 5211, 9273, 9450, 6106, 8283, 7138, 7857, - 3244, 6049, 1477, 4734, 9695, 9753, 4169, 9542, 2937, - 3509, 7510, 4689, 707, 8198, 7008, 6722, 3337, 5751, - 3766, 833, 8750, 4331, 5594, 3997, 5578, 9017, 3910, - 3070, 1260, 7089, 5023, 6162, 4544, 2963, 5389, 8290, - 2433, 8470, 268, 2658, 5350, 9452, 9566, 5799, 6076, - 5093, 4524, 728, 5385, 601, 1121, 2937, 6609, 9033, - 112, 8436, 1613, 1934, 5295, 1872, 5673, 2468, 20, - 629, 8138, 8511, 4404, 5469, 9306, 3180, 1675, 8874, - 5243, 5951, 1999, 1207, 2097, 2731, 7103, 9382, 5573, - 4542, 3567, 7221, 1313, 4524, 4765, 9415, 2646, 4434, - 2523, 8400, 4785, 4828, 6799, 641, 9477, 4083, 3467, - 6803, 5831, 7241, 1931, 3753, 6755, 5101, 8779, 276, - 723, 6290, 3559, 163, 1895, 5804, 7812, 452, 8815, - 3364, 1546, 636, 1173, 4838, 5941, 3629, 5615, 1065, - 6095, 1559, 4709, 6197, 6796, 6190, 1496, 4155, 8680, - 8774, 2349, 1020, 7910, 216, 8442, 9179, 6834, 1499, - 2872, 6140, 8913, 9738, 5850, 8410, 7955, 3577, 3138, - 5103, 4605, 8243, 2613, 2619, 6014, 1536, 619, 7266, - 4067, 4761, 4808, 7604, 7128, 3593, 8227, 5574, 5711, - 3815, 6532, 6664, 4778, 9035, 2519, 6829, 8402, 9861, - 5598, 8030, 991, 5754, 1489, 8219, 1588, 7984, 3957, - 7982, 7783, 5801, 4818, 6928, 5945, 1030, 6679, 8996, - 2066, 5413, 9651, 8908, 3779, 697, 6794, 9930, 9736, - 8162, 6895, 2355, 5033, 4891, 2352, 2673, 6602, 6389, - 2319, 1938, 3169, 4827, 2997, 199, 8604, 2753, 197, - 9848, 5455, 5838, 5131, 3306, 3152, 5760, 1477, 8162, - 8523, 382, 6174, 1580, 2419, 5000, 1351, 4299, 2598, - 8064, 714, 8757, 4986, 9885, 1, 9906, 7850, 9127, - 609, 9264, 7572, 5203, 1497, 5370, 2825, 6608, 5307, - 1067, 1059, 6801, 4949, 4490, 3210, 7421, 7811, 1102, - 5058, 8487, 8843, 5590, 9083, 3587, 9580, 6809, 4814, - 5774, 5460, 5247, 1639, 4042, 1058, 5514, 2499, 4333, - 2608, 5359, 3700, 2316, 8701, 7433, 6840, 4441, 7816, - 5852, 6477, 8172, 7852, 5968, 9671, 5374, 1033, 1307, - 7427, 1879, 7842, 1991, 8425, 3031, 6842, 7640, 2472, - 5231, 4002, 6652, 3522, 211, 9391, 7872, 5134, 1378, - 6610, 2732, 1051, 7787, 2466, 1112, 7365, 9226, 5845, - 3593, 4013, 3040, 449, 9502, 9775, 5013, 72, 3221, - 1993, 2626, 656, 562, 8318, 7938, 2465, 4300, 2538, - 873, 5650, 590, 6320, 2620, 5043, 8788, 7544, 7372, - 3427, 2476, 6556, 1948, 1008, 9277, 8552, 7791, 838, - 8482, 7805, 2152, 8325, 132, 5590, 2315, 1155, 2035, - 4363, 7292, 6257, 538, 3095, 8540, 2594, 1676, 6100, - 432, 8590, 397, 6028, 5770, 4864, 259, 9108, 1343, - 3496, 3662, 6106, 9274, 2047, 2081, 3620, 438, 3494, - 55, 6088, 3113, 2526, 6605, 5078, 9048, 4362, 5299, - 8201, 7239, 2961, 6652, 4458, 3937, 4156, 4861, 4275, - 6038]), - values=tensor([4.7714e-01, 6.5835e-01, 6.6810e-01, 8.6236e-01, - 4.8150e-01, 2.9025e-01, 3.8068e-01, 1.2618e-02, - 8.9304e-01, 4.7334e-01, 3.1033e-01, 6.2877e-01, - 4.7847e-01, 6.3115e-01, 3.7549e-01, 7.1375e-01, - 7.1370e-01, 1.5617e-01, 5.9230e-01, 8.6992e-01, - 1.0824e-02, 9.5002e-01, 9.3768e-01, 3.9687e-01, - 2.4891e-01, 3.5396e-01, 7.9816e-01, 4.0629e-01, - 1.1462e-01, 9.2518e-02, 5.4557e-01, 2.8237e-01, - 2.7320e-01, 6.0124e-01, 9.2649e-01, 9.6609e-01, - 2.5218e-02, 8.7923e-01, 2.8120e-01, 8.5964e-01, - 7.5022e-01, 6.9189e-01, 8.6902e-01, 6.6175e-01, - 4.8078e-01, 1.0568e-01, 6.1760e-01, 5.1173e-01, - 7.1935e-01, 7.9758e-01, 5.1397e-01, 9.2681e-01, - 1.5881e-01, 4.0107e-01, 3.7000e-02, 4.5709e-01, - 8.7828e-01, 9.9798e-01, 2.0436e-01, 8.2405e-01, - 2.2018e-01, 8.1418e-01, 6.6142e-01, 6.5186e-01, - 1.7800e-01, 5.1282e-01, 5.4169e-01, 5.1202e-01, - 7.2361e-01, 6.0514e-01, 5.7929e-02, 5.8574e-01, - 1.3412e-01, 2.4876e-01, 8.0263e-02, 3.0969e-01, - 7.6202e-01, 9.0548e-01, 6.8427e-01, 9.4370e-02, - 1.2529e-03, 5.6984e-01, 2.4518e-01, 5.3912e-01, - 7.6442e-01, 1.5031e-01, 7.1477e-01, 5.7099e-01, - 9.6944e-01, 7.7954e-01, 1.9253e-01, 4.3041e-01, - 4.5620e-01, 8.4686e-01, 7.6515e-01, 3.4939e-02, - 6.8882e-01, 2.9488e-01, 3.1215e-01, 5.0502e-01, - 4.1170e-01, 9.0587e-01, 2.6328e-01, 6.3719e-01, - 6.4003e-01, 1.4477e-01, 3.2101e-01, 6.3956e-01, - 5.8353e-01, 6.2460e-02, 8.7149e-01, 5.0409e-01, - 2.9944e-01, 1.5857e-01, 7.5177e-01, 3.8737e-01, - 5.8319e-01, 8.6482e-01, 5.5542e-01, 1.6361e-01, - 9.6946e-01, 7.4385e-01, 7.8405e-01, 9.1404e-01, - 3.1006e-01, 6.1069e-01, 9.8763e-01, 7.8296e-01, - 5.8330e-01, 2.2488e-01, 5.3122e-01, 9.0733e-01, - 1.0823e-01, 7.1544e-01, 5.3686e-01, 9.7645e-01, - 7.0111e-01, 8.3358e-01, 6.7151e-01, 4.5959e-01, - 1.8565e-01, 7.9015e-01, 1.8143e-02, 5.2429e-01, - 9.8713e-01, 7.8291e-02, 3.2014e-01, 7.0142e-01, - 3.2823e-02, 2.0523e-01, 2.8556e-01, 6.5603e-01, - 6.6879e-01, 1.8088e-01, 6.8243e-01, 8.0618e-01, - 5.9647e-01, 2.7644e-01, 6.0267e-01, 9.4412e-01, - 9.6862e-01, 4.6876e-01, 9.2399e-01, 8.4139e-02, - 1.1788e-01, 6.5004e-01, 4.1362e-01, 6.3517e-01, - 9.1004e-01, 8.6067e-01, 3.3935e-01, 9.9589e-01, - 6.5934e-01, 9.0696e-01, 3.5917e-01, 3.4094e-01, - 1.2945e-01, 8.7930e-01, 5.9039e-01, 9.4529e-01, - 5.8159e-01, 4.0632e-01, 3.0761e-02, 3.3183e-01, - 8.7530e-01, 1.2750e-01, 4.8617e-01, 9.8340e-01, - 9.7630e-02, 3.4204e-01, 4.1919e-01, 9.3104e-01, - 8.6768e-01, 7.0351e-02, 9.3605e-01, 3.1020e-01, - 8.5241e-01, 8.9140e-01, 9.8230e-01, 1.2324e-01, - 3.6008e-01, 2.9767e-01, 9.7191e-01, 1.9919e-03, - 2.2594e-01, 6.6331e-02, 1.2033e-02, 9.2786e-01, - 5.1123e-01, 8.4921e-01, 9.4593e-01, 1.5175e-01, - 9.6212e-01, 4.9790e-01, 3.6524e-01, 6.4334e-01, - 8.5222e-01, 8.3525e-01, 4.4515e-01, 9.2110e-01, - 6.7497e-01, 9.1729e-01, 9.5976e-01, 3.4302e-01, - 1.5117e-01, 9.0576e-01, 2.9185e-01, 3.2189e-01, - 6.6528e-01, 4.5914e-01, 8.8528e-01, 9.1793e-01, - 5.4030e-01, 5.4753e-01, 1.9790e-01, 5.5714e-01, - 3.5467e-01, 4.9917e-01, 5.8322e-01, 5.3679e-01, - 6.0002e-01, 2.0784e-01, 3.4511e-01, 2.2987e-01, - 7.2294e-01, 4.0254e-01, 2.7296e-01, 3.4605e-02, - 8.6606e-01, 1.7876e-01, 9.4357e-01, 5.2248e-01, - 7.6849e-01, 7.7606e-01, 3.8884e-01, 3.4760e-01, - 3.4674e-01, 7.9396e-01, 9.2260e-01, 7.9808e-01, - 7.3693e-01, 5.4535e-01, 2.7120e-01, 5.2527e-01, - 8.1707e-01, 9.8153e-01, 4.7831e-02, 9.0352e-01, - 2.7109e-01, 3.6352e-01, 3.9784e-01, 5.4847e-01, - 2.2590e-01, 7.0093e-01, 7.2317e-01, 9.8874e-01, - 2.4136e-01, 8.9288e-01, 4.3400e-01, 3.2705e-01, - 4.0069e-01, 1.8910e-01, 8.1844e-01, 7.9620e-01, - 5.6564e-01, 1.2793e-01, 2.8968e-01, 9.3505e-01, - 3.0875e-01, 7.9151e-01, 2.4407e-01, 2.0703e-02, - 9.6879e-01, 8.8887e-01, 9.7410e-01, 1.8982e-01, - 4.3029e-01, 6.1504e-02, 4.4247e-01, 7.0771e-01, - 8.5999e-01, 6.0147e-02, 1.0992e-01, 2.1045e-02, - 2.9122e-01, 5.5412e-01, 6.6382e-01, 6.2970e-01, - 9.8414e-01, 2.8818e-01, 9.9151e-02, 7.1218e-01, - 9.6381e-01, 3.1820e-01, 8.8603e-01, 2.0768e-01, - 2.5266e-01, 4.1578e-01, 4.2094e-02, 8.3200e-01, - 1.5533e-01, 2.6469e-01, 8.4578e-01, 6.7183e-01, - 9.4744e-01, 5.0742e-01, 9.2285e-01, 2.7251e-01, - 7.5077e-01, 8.9139e-01, 1.5554e-02, 3.6012e-01, - 8.4348e-01, 2.6198e-01, 8.1818e-01, 4.6823e-02, - 7.7674e-01, 3.7275e-01, 2.2973e-01, 8.8084e-01, - 9.4058e-01, 8.3742e-01, 4.1623e-01, 3.1520e-02, - 3.4727e-01, 9.5595e-01, 1.9390e-01, 6.7977e-01, - 3.6465e-01, 2.6049e-01, 2.9796e-01, 1.1722e-01, - 3.9747e-01, 6.8602e-01, 5.6184e-01, 6.8826e-01, - 2.0046e-01, 6.7113e-01, 5.8623e-01, 4.6542e-01, - 6.7784e-01, 2.2631e-01, 4.2916e-01, 5.2430e-01, - 5.5908e-01, 8.4205e-02, 1.0124e-01, 9.4289e-01, - 5.4475e-03, 4.6761e-01, 9.1604e-01, 5.3590e-01, - 8.3150e-01, 5.3336e-01, 1.4978e-01, 5.6498e-01, - 6.6969e-02, 6.4111e-01, 9.1692e-01, 6.8961e-02, - 5.2052e-01, 6.7275e-01, 9.6648e-01, 1.0708e-02, - 3.7208e-01, 8.8149e-01, 4.5841e-01, 4.2290e-01, - 2.8680e-02, 6.1936e-01, 3.2559e-01, 4.5333e-01, - 9.4508e-01, 6.9227e-01, 5.5121e-01, 8.7142e-01, - 2.5169e-01, 2.1455e-01, 5.7989e-01, 1.4356e-01, - 8.7386e-01, 6.7318e-01, 4.6051e-01, 1.5353e-01, - 9.0016e-01, 6.2262e-01, 3.1852e-01, 7.0931e-02, - 6.4187e-01, 9.5944e-01, 8.9126e-01, 2.8364e-01, - 9.9916e-01, 1.9702e-01, 2.2416e-01, 4.3509e-01, - 1.4583e-01, 1.4836e-01, 9.0843e-01, 7.9091e-02, - 8.2524e-01, 5.2035e-01, 1.1139e-01, 1.7207e-01, - 5.1057e-01, 4.3832e-01, 5.2320e-01, 1.3588e-01, - 7.2917e-01, 9.7882e-01, 5.3764e-01, 1.1408e-01, - 9.6750e-01, 5.2236e-02, 1.7656e-01, 6.3825e-01, - 4.2668e-02, 3.1796e-01, 4.0240e-02, 1.7456e-01, - 5.1237e-01, 8.3579e-01, 6.7024e-01, 6.4843e-01, - 7.7335e-01, 5.1312e-01, 1.0212e-01, 3.8262e-01, - 4.8249e-01, 8.2486e-02, 8.1760e-01, 9.7405e-01, - 1.3984e-01, 2.0012e-01, 5.7656e-01, 1.0390e-01, - 4.9240e-01, 7.8365e-01, 9.2216e-01, 7.6854e-01, - 6.6574e-01, 8.6726e-01, 3.9857e-01, 8.9556e-01, - 5.5728e-01, 3.7013e-01, 2.4661e-01, 7.1962e-02, - 2.1412e-01, 6.4604e-01, 9.0376e-01, 7.0492e-01, - 4.2638e-01, 5.8832e-01, 3.4304e-01, 6.3964e-01, - 8.6998e-01, 2.4576e-01, 8.2567e-01, 1.0762e-01, - 3.1310e-01, 8.9782e-01, 6.0560e-01, 2.1446e-02, - 5.3497e-02, 9.6383e-01, 7.7932e-01, 2.1853e-01, - 7.3464e-01, 2.3548e-01, 9.3060e-01, 5.7317e-03, - 7.4298e-01, 5.3537e-01, 4.8560e-01, 9.6730e-01, - 7.6250e-01, 6.9670e-02, 6.0534e-01, 5.0924e-01, - 2.0278e-01, 1.0778e-01, 9.0005e-01, 1.0226e-01, - 4.6301e-01, 1.3695e-01, 4.8460e-01, 5.5213e-01, - 4.9099e-01, 6.6150e-01, 1.4123e-01, 4.8495e-02, - 3.2005e-01, 9.2497e-01, 3.4661e-01, 2.2349e-01, - 6.0811e-01, 6.4216e-01, 5.9953e-01, 4.6989e-01, - 2.0930e-01, 2.5490e-01, 3.4212e-01, 5.8880e-01, - 8.9119e-01, 9.5757e-01, 7.8986e-01, 9.1785e-01, - 8.4240e-02, 7.0146e-01, 4.2999e-01, 8.7659e-01, - 6.6600e-01, 9.3144e-01, 6.6034e-01, 2.5563e-01, - 5.4123e-01, 2.4843e-01, 4.2146e-01, 2.4451e-01, - 5.9355e-02, 7.9812e-01, 9.9665e-01, 1.2106e-01, - 4.8230e-01, 1.6177e-01, 6.8532e-01, 2.3983e-01, - 3.0260e-01, 5.4610e-01, 1.8362e-01, 2.4822e-01, - 5.2604e-01, 1.7262e-01, 9.3319e-01, 5.2875e-01, - 4.4548e-01, 9.6324e-01, 9.3822e-01, 1.4101e-01, - 2.9004e-01, 9.8271e-01, 1.8284e-01, 6.5851e-01, - 5.8992e-01, 1.9685e-01, 7.9180e-02, 4.3183e-01, - 2.5144e-01, 7.4977e-01, 5.0010e-01, 5.3582e-01, - 8.5925e-01, 5.7023e-01, 7.6546e-01, 2.7229e-01, - 6.3567e-01, 1.9853e-02, 9.8156e-01, 5.1300e-01, - 7.0410e-01, 1.9884e-01, 8.2894e-01, 6.0498e-01, - 8.3068e-01, 4.5943e-01, 9.1785e-01, 7.5169e-01, - 6.0400e-02, 6.3811e-01, 4.3791e-01, 6.6424e-01, - 2.3959e-01, 6.6513e-01, 9.6708e-01, 6.6745e-01, - 7.8435e-01, 4.7840e-01, 1.4220e-01, 8.4696e-01, - 3.9492e-01, 1.7747e-01, 1.0384e-01, 3.4803e-01, - 4.4867e-01, 2.1118e-02, 5.1986e-01, 8.6635e-03, - 2.9422e-01, 6.5401e-01, 7.4585e-01, 6.1559e-01, - 9.5539e-02, 1.5754e-01, 5.3164e-01, 1.9059e-01, - 4.7617e-01, 8.1445e-01, 3.7421e-01, 3.5266e-01, - 2.7694e-01, 1.6864e-01, 4.1843e-01, 3.8316e-01, - 6.9262e-01, 6.9038e-01, 8.3762e-01, 6.6963e-01, - 3.0783e-01, 5.9280e-02, 2.5202e-01, 8.1615e-01, - 1.1046e-02, 2.1282e-01, 7.3311e-01, 2.8725e-01, - 7.6723e-01, 5.5518e-01, 2.1261e-01, 3.9670e-01, - 1.2823e-01, 8.5473e-01, 8.2945e-01, 6.0473e-03, - 8.1675e-01, 2.9842e-01, 2.7518e-01, 8.4889e-01, - 1.4176e-01, 2.2999e-01, 9.8103e-01, 6.1864e-01, - 1.7848e-01, 1.4930e-02, 9.0227e-01, 3.7526e-01, - 6.7379e-01, 7.4257e-01, 2.2657e-01, 1.3257e-01, - 6.5286e-02, 4.5247e-01, 1.1977e-02, 4.5326e-01, - 6.2023e-01, 2.9594e-01, 8.5625e-01, 3.2932e-01, - 8.5327e-01, 3.7474e-01, 5.2005e-01, 1.9805e-03, - 1.8896e-01, 2.2181e-01, 1.4462e-01, 2.7939e-01, - 3.3215e-01, 7.7546e-01, 9.2522e-01, 8.1760e-01, - 5.1863e-01, 7.7043e-01, 9.7478e-01, 5.0158e-01, - 4.9716e-01, 1.9079e-01, 3.1415e-01, 9.3979e-01, - 3.9996e-02, 5.6242e-01, 2.6871e-01, 3.4933e-04, - 6.5883e-01, 6.9675e-01, 4.0822e-01, 3.8498e-01, - 1.0241e-01, 8.6637e-01, 2.2285e-01, 7.4028e-01, - 1.7733e-01, 2.7094e-01, 4.5353e-01, 4.3450e-01, - 4.5140e-01, 4.4938e-01, 9.0305e-01, 6.1982e-01, - 1.4139e-01, 8.3423e-01, 9.4445e-01, 9.7407e-01, - 3.3746e-01, 1.2854e-01, 3.3241e-01, 8.0687e-01, - 4.7193e-01, 6.3117e-01, 6.1655e-01, 7.1355e-01, - 9.6168e-01, 4.7774e-01, 6.2907e-01, 8.0398e-02, - 7.1037e-01, 5.8516e-01, 8.8072e-01, 4.9747e-01, - 5.7621e-01, 5.3898e-01, 1.5911e-01, 3.2921e-01, - 3.7609e-01, 2.5010e-01, 4.9033e-01, 6.6828e-01, - 8.3216e-01, 3.2885e-01, 3.5639e-01, 6.1506e-01, - 3.9507e-01, 6.8564e-01, 9.3219e-01, 8.1971e-01, - 3.7975e-01, 6.2635e-02, 7.3499e-01, 8.3335e-01, - 9.6516e-01, 3.6389e-01, 1.4785e-01, 9.8734e-01, - 4.6517e-01, 4.7021e-01, 4.5035e-01, 8.5602e-01, - 8.8317e-01, 6.9377e-01, 1.0737e-01, 3.0491e-01, - 7.4477e-01, 2.7987e-01, 9.1324e-01, 4.3301e-01, - 1.0371e-01, 4.6262e-01, 7.3666e-01, 4.1720e-01, - 8.9850e-01, 7.7097e-01, 8.4133e-01, 9.0364e-01, - 3.7363e-01, 4.2931e-01, 4.3065e-01, 4.7899e-01, - 6.1030e-01, 9.2078e-01, 2.4479e-01, 6.3372e-01, - 7.5302e-01, 6.8533e-02, 2.4034e-01, 2.9799e-01, - 5.9541e-01, 8.0769e-01, 6.0826e-02, 4.9975e-01, - 6.7782e-01, 9.2380e-01, 6.5933e-01, 6.2628e-01, - 3.5499e-01, 7.7217e-01, 7.1395e-01, 8.3721e-01, - 8.8629e-01, 9.0689e-01, 2.0537e-01, 3.0803e-01, - 6.5047e-01, 7.1533e-01, 8.5211e-01, 6.9320e-01, - 9.0068e-01, 1.3613e-01, 7.8974e-01, 8.7644e-01, - 1.5286e-02, 5.1885e-02, 3.0124e-01, 3.2126e-02, - 8.9848e-01, 5.7828e-01, 9.6688e-01, 1.8717e-01, - 8.2536e-01, 6.1247e-01, 4.0347e-01, 4.7207e-01, - 7.2367e-01, 9.3299e-01, 4.6252e-01, 6.5511e-01, - 9.2534e-01, 7.0945e-01, 4.6262e-01, 9.6054e-01, - 9.4908e-01, 4.6484e-01, 9.9122e-01, 1.7594e-02, - 1.4339e-01, 7.1119e-02, 7.0679e-01, 7.2761e-01, - 4.9954e-01, 7.5889e-01, 1.8391e-01, 6.9856e-01, - 9.4138e-01, 9.0301e-01, 5.2853e-01, 5.0671e-01, - 8.2311e-01, 6.1262e-01, 6.5198e-01, 1.4274e-01, - 8.5886e-01, 2.5963e-02, 6.0455e-01, 9.0689e-01, - 4.6808e-02, 3.6223e-01, 3.6475e-01, 1.2150e-01, - 9.1193e-01, 9.1870e-01, 5.4367e-01, 2.4872e-01, - 8.8178e-01, 2.1879e-01, 4.8890e-01, 2.5284e-01, - 1.0187e-01, 3.9640e-02, 2.3500e-01, 1.6553e-01, - 3.0865e-01, 4.7595e-01, 2.7209e-01, 9.0120e-01, - 8.3193e-01, 1.3589e-01, 9.8747e-01, 9.9208e-01, - 3.3995e-01, 2.0806e-01, 4.9592e-02, 8.8307e-01, - 4.4688e-01, 8.3109e-01, 7.4699e-01, 8.7723e-01, - 2.2585e-01, 4.2030e-01, 2.1791e-01, 6.5667e-01, - 6.7550e-01, 8.1568e-01, 1.4914e-01, 8.1042e-01, - 2.8686e-01, 6.8659e-01, 4.8032e-01, 2.5172e-01, - 3.8307e-01, 5.3647e-01, 6.7392e-01, 3.3138e-01, - 4.2032e-02, 4.4574e-01, 1.6246e-01, 6.0810e-01, - 7.0306e-01, 2.1204e-01, 3.9409e-02, 2.2795e-01, - 8.4389e-01, 1.7474e-01, 8.9677e-01, 1.4316e-01, - 8.6058e-01, 6.1491e-01, 3.1661e-01, 9.2257e-01, - 7.4407e-01, 2.4658e-01, 2.5910e-01, 8.2101e-03, - 8.6598e-01, 7.2740e-01, 9.0937e-01, 6.4394e-01, - 4.9736e-01, 3.6977e-01, 6.7672e-01, 3.2331e-01, - 8.7654e-01, 5.8446e-01, 8.9775e-01, 1.7963e-01, - 7.2940e-01, 6.4353e-01, 5.5434e-01, 6.6167e-01, - 6.1351e-01, 3.2832e-01, 6.7163e-01, 6.7831e-01, - 5.1237e-01, 2.1545e-01, 3.3231e-01, 2.8996e-01, - 6.5585e-01, 2.0358e-02, 7.7431e-01, 9.8544e-01, - 9.1419e-01, 9.5741e-01, 7.0935e-01, 3.3533e-01, - 8.7434e-01, 6.1952e-01, 8.9673e-01, 1.8459e-01, - 2.2639e-02, 3.3693e-02, 1.4487e-01, 2.5814e-01, - 7.7431e-01, 3.5981e-01, 6.2179e-01, 2.9769e-01, - 1.7809e-01, 9.9498e-01, 3.6807e-01, 9.3312e-01, - 9.5690e-01, 6.4950e-01, 8.9949e-02, 6.1082e-01, - 5.5730e-04, 2.8380e-01, 1.5247e-01, 2.6631e-01, - 7.0591e-01, 9.1447e-01, 5.5121e-01, 8.2357e-01, - 3.1340e-01, 9.1639e-01, 5.7718e-02, 9.6115e-01, - 9.1335e-01, 7.8484e-01, 2.8462e-01, 7.3102e-01, - 3.0921e-01, 2.4763e-01, 1.7843e-01, 6.2335e-01, - 9.5565e-01, 3.9810e-01, 4.9350e-01, 3.6155e-01, - 9.9764e-01, 2.8685e-01, 3.0377e-01, 2.8867e-01]), + col_indices=tensor([5653, 663, 2356, 6335, 1601, 9179, 5758, 4032, 1184, + 1367, 4244, 4842, 4720, 9582, 4215, 5795, 5613, 5508, + 3150, 2956, 6349, 4941, 1636, 8225, 9972, 2582, 3679, + 1135, 9620, 6084, 6291, 4048, 7001, 5472, 7361, 7937, + 5298, 6533, 2776, 1036, 1344, 6057, 6180, 9014, 5073, + 6811, 5946, 5681, 492, 615, 6472, 4769, 5564, 541, + 800, 5736, 579, 8317, 7029, 3695, 499, 9654, 3281, + 205, 9052, 6707, 6645, 6832, 4626, 4664, 2914, 7622, + 9393, 3855, 9403, 5918, 5868, 9444, 851, 6317, 57, + 1210, 2172, 6037, 9204, 3658, 7620, 6983, 3781, 1735, + 686, 9439, 6244, 8175, 2372, 965, 2150, 8571, 4157, + 2512, 9938, 4043, 8875, 882, 623, 1012, 3731, 7589, + 9758, 4803, 9290, 1234, 774, 2176, 4572, 2018, 3222, + 7583, 187, 3819, 9911, 5564, 8603, 9156, 1382, 5716, + 6346, 5522, 2563, 4347, 9272, 9447, 4471, 4638, 4983, + 9892, 6310, 7516, 7140, 6156, 6553, 5760, 3519, 546, + 4365, 2039, 9350, 4877, 5823, 286, 7838, 4403, 5744, + 777, 1260, 2101, 8748, 5207, 3240, 7189, 894, 9416, + 9973, 7357, 198, 3268, 3546, 227, 3220, 2795, 5781, + 5660, 9451, 8013, 3655, 5219, 9893, 2124, 1825, 7217, + 2907, 3224, 62, 5333, 4441, 9222, 7187, 7656, 8345, + 6235, 4925, 4392, 9403, 7654, 8355, 5586, 6058, 1290, + 4866, 5435, 1268, 4572, 9153, 7079, 1500, 588, 4650, + 8759, 9375, 1661, 8828, 6616, 8199, 6392, 6520, 9958, + 2116, 3225, 5132, 9375, 4924, 4325, 419, 1259, 5490, + 8365, 3133, 1934, 496, 7648, 3510, 5952, 6594, 9025, + 8913, 3315, 3979, 9410, 9159, 3471, 5346, 7471, 2113, + 6014, 6280, 8075, 582, 6331, 2784, 6053, 556, 989, + 6164, 8084, 411, 640, 8216, 8772, 5075, 9022, 5692, + 7825, 9613, 5866, 5376, 3288, 3147, 9301, 386, 8701, + 9166, 418, 2003, 9005, 8592, 4188, 3735, 4232, 5238, + 8853, 7246, 2646, 1343, 9472, 371, 1177, 8422, 8202, + 5353, 9648, 2908, 8320, 6076, 6771, 8304, 1075, 2469, + 1568, 9483, 3555, 5266, 3405, 9971, 7501, 9677, 5813, + 5924, 7587, 1603, 937, 3813, 1988, 637, 2885, 5831, + 8209, 1869, 5873, 3244, 9083, 4316, 1649, 3716, 7799, + 3429, 9162, 139, 9399, 4042, 4832, 2298, 2896, 1054, + 6210, 4421, 1743, 2336, 4261, 861, 3352, 4781, 2982, + 219, 1594, 6937, 1157, 7791, 8670, 8548, 3732, 1054, + 1298, 3515, 1443, 4515, 5404, 1104, 8525, 7928, 1938, + 9321, 4093, 9617, 5069, 7691, 6537, 1777, 1177, 4792, + 3990, 8613, 2141, 639, 6090, 3243, 3817, 2379, 597, + 2806, 6242, 129, 4735, 1063, 7452, 9642, 1260, 4687, + 6658, 1205, 1033, 2603, 6809, 9426, 6394, 7227, 8386, + 557, 2634, 1874, 8349, 1415, 4690, 6198, 8318, 8284, + 9176, 6704, 6584, 6885, 9606, 1543, 7428, 2559, 2822, + 1315, 6792, 6148, 819, 6087, 4634, 2762, 1934, 3174, + 6495, 6068, 138, 6548, 3452, 8896, 8290, 5349, 4350, + 9259, 6574, 8978, 6459, 1935, 8370, 6950, 8071, 2398, + 2191, 2389, 8876, 4727, 5597, 1126, 3589, 4971, 4846, + 7348, 4101, 516, 7156, 3095, 2299, 76, 5189, 1287, + 7838, 754, 6520, 6985, 2666, 2752, 173, 2069, 2831, + 5027, 1950, 6590, 8614, 8084, 463, 9089, 1779, 3430, + 7584, 6410, 8803, 4256, 5381, 1369, 2241, 8259, 582, + 686, 2251, 1507, 161, 211, 57, 8365, 5950, 8733, + 1228, 3943, 7405, 6953, 7761, 3781, 1755, 2225, 1783, + 4413, 7802, 9456, 2432, 8496, 3459, 8182, 666, 5081, + 9711, 4750, 8408, 873, 1701, 3598, 2226, 4834, 8977, + 119, 1439, 6621, 8888, 6718, 4964, 6775, 8285, 756, + 683, 7518, 7638, 5345, 9636, 8272, 8196, 4295, 5469, + 9453, 7671, 9819, 4008, 1695, 8702, 5830, 9258, 6438, + 993, 8991, 6137, 5725, 8489, 6299, 3472, 1535, 8632, + 272, 8780, 428, 5376, 6220, 1971, 1957, 4900, 8226, + 2265, 5227, 4555, 7142, 7595, 5252, 4188, 1870, 3747, + 6218, 6943, 3294, 5178, 3131, 9628, 2154, 8397, 1648, + 9870, 9081, 2344, 6566, 3675, 241, 9683, 5548, 2030, + 2234, 794, 9927, 3501, 4995, 2027, 4834, 6504, 479, + 6498, 8648, 2353, 2297, 8318, 4623, 9046, 6743, 6207, + 8130, 1077, 656, 4641, 2263, 3756, 9177, 3120, 1083, + 8650, 1248, 6568, 6625, 7483, 1893, 6194, 8846, 7443, + 2988, 5728, 9652, 4567, 9731, 6950, 8352, 8733, 938, + 9915, 5254, 7482, 724, 5457, 2774, 5506, 2482, 1189, + 551, 4600, 2755, 6757, 1057, 9353, 4932, 8508, 5036, + 6380, 4795, 7526, 4341, 907, 8663, 6669, 4592, 373, + 6374, 4856, 4138, 2881, 1151, 2689, 4911, 1512, 2587, + 794, 786, 7277, 8732, 6720, 810, 9573, 2632, 8645, + 89, 2002, 9562, 5004, 8476, 9057, 6183, 1456, 8245, + 1499, 3084, 7040, 3182, 7421, 2467, 18, 1654, 8066, + 3892, 645, 1826, 817, 5601, 6826, 1555, 8599, 8669, + 4406, 2057, 9546, 4111, 3145, 7482, 8521, 8104, 237, + 1875, 7275, 1479, 6187, 44, 3298, 5094, 4170, 2205, + 4442, 3536, 2699, 1153, 3332, 7163, 1256, 2533, 733, + 9820, 3400, 1795, 1338, 589, 3332, 1481, 8779, 8377, + 2005, 3701, 6750, 2118, 9260, 2101, 7768, 3538, 2479, + 5907, 2574, 4797, 4045, 5900, 5465, 8521, 8466, 1697, + 6282, 1744, 352, 397, 1611, 969, 8666, 9409, 3614, + 8478, 8247, 8885, 3286, 5250, 7001, 3291, 6758, 8084, + 1510, 7677, 9604, 8489, 8033, 3291, 1326, 5168, 9323, + 8370, 8206, 1664, 6593, 8357, 8619, 1887, 4419, 5990, + 5278, 9128, 5485, 8058, 6566, 5979, 5259, 6492, 5081, + 936, 5810, 9231, 9063, 9410, 4079, 9581, 4526, 2618, + 2468, 2724, 6060, 1881, 3175, 4090, 6363, 9964, 4758, + 3486, 3202, 540, 9157, 4166, 1974, 5549, 3666, 6137, + 9665, 498, 6014, 2287, 6885, 214, 9852, 8570, 7221, + 3853, 5536, 8282, 2123, 5809, 8936, 7986, 6716, 392, + 7672, 9526, 804, 2336, 8391, 2208, 9067, 7291, 4449, + 8158, 6745, 6107, 8769, 3393, 1713, 4310, 2615, 7559, + 4259, 3361, 9493, 9346, 98, 563, 4262, 6949, 9119, + 7647, 5199, 7674, 6087, 4360, 6308, 3631, 1778, 7472, + 8582, 6581, 3300, 4228, 2667, 6631, 7759, 6925, 8155, + 5702, 2695, 9383, 27, 9479, 9275, 1062, 9608, 9103, + 7513, 9349, 1805, 2186, 8934, 7211, 8306, 3787, 426, + 7793, 8765, 7635, 8057, 8447, 3456, 7831, 5006, 7449, + 5886, 5800, 19, 5725, 37, 4869, 8698, 7941, 6598, + 1906, 369, 6708, 9109, 1374, 2105, 1459, 743, 1027, + 7047, 4135, 1279, 479, 3744, 992, 6277, 4043, 1901, + 9461, 1701, 183, 5585, 7539, 7017, 158, 3044, 4339, + 2736, 3950, 2868, 1786, 3760, 3722, 687, 8148, 3388, + 6, 185, 8988, 5275, 757, 2881, 8187, 3221, 1592, + 9830, 3889, 4101, 6097, 9491, 8094, 3893, 5702, 2525, + 4242, 6698, 2410, 2070, 2272, 8560, 3241, 7303, 1110, + 121]), + values=tensor([4.5491e-02, 3.2468e-01, 2.9501e-01, 6.3274e-01, + 1.0864e-02, 7.2561e-01, 2.7833e-01, 8.9921e-01, + 9.5712e-01, 7.8304e-01, 7.9990e-01, 5.7548e-02, + 2.6717e-01, 8.6061e-01, 8.3671e-01, 6.9606e-01, + 5.5892e-01, 5.1427e-01, 2.7201e-01, 7.5072e-01, + 7.9922e-01, 1.5437e-01, 8.2474e-01, 9.8408e-01, + 9.0110e-01, 9.1307e-01, 7.3003e-01, 2.2108e-01, + 5.0958e-01, 3.3159e-01, 9.0686e-01, 3.0235e-01, + 2.2882e-01, 1.5485e-01, 4.1432e-01, 7.3323e-01, + 5.5252e-01, 9.6080e-01, 7.7897e-01, 4.5968e-01, + 7.1762e-02, 9.5266e-01, 2.5126e-01, 3.6906e-01, + 7.1595e-01, 4.5846e-01, 2.7393e-01, 1.8774e-01, + 2.4921e-01, 3.1854e-01, 4.8334e-01, 1.4183e-02, + 3.9405e-01, 7.8745e-01, 1.2611e-01, 4.9633e-01, + 1.5275e-01, 9.4294e-03, 7.0521e-01, 8.7227e-01, + 5.8672e-01, 9.8469e-01, 7.4888e-01, 4.3210e-01, + 6.9548e-01, 4.5036e-01, 2.1180e-01, 6.2584e-01, + 6.8274e-01, 4.4221e-02, 3.7077e-01, 2.7813e-01, + 8.0969e-01, 4.9060e-01, 3.7621e-01, 9.4320e-02, + 6.5185e-03, 3.2050e-02, 9.6548e-01, 7.9314e-01, + 5.2607e-01, 8.4014e-01, 5.1244e-01, 4.5536e-01, + 2.9243e-01, 3.4722e-01, 2.6706e-01, 2.3069e-01, + 4.0751e-01, 6.0825e-01, 7.8459e-01, 8.8005e-01, + 5.0828e-01, 1.3147e-01, 2.6742e-01, 9.5215e-01, + 6.5675e-01, 5.9280e-01, 9.7954e-01, 6.0187e-01, + 3.3183e-01, 1.0600e-02, 7.6686e-01, 4.6235e-01, + 7.2398e-01, 1.2355e-01, 8.7187e-01, 1.5465e-01, + 5.5476e-02, 4.5871e-01, 6.3394e-01, 5.6422e-01, + 1.3625e-01, 7.8116e-01, 2.6024e-01, 6.0334e-01, + 2.2029e-01, 2.8339e-01, 2.5566e-01, 7.3421e-01, + 2.2859e-01, 6.0130e-01, 3.1568e-01, 6.6567e-01, + 5.6914e-02, 2.5935e-01, 5.8822e-02, 5.2249e-01, + 8.9886e-01, 9.2019e-01, 5.6993e-02, 7.7705e-01, + 2.3500e-01, 3.8917e-01, 7.0200e-01, 2.6454e-01, + 1.4123e-01, 5.5982e-01, 6.0613e-01, 8.7605e-01, + 4.7423e-01, 2.7690e-01, 5.0264e-02, 9.8423e-01, + 8.7080e-01, 8.7532e-01, 1.5537e-03, 5.7053e-01, + 3.2342e-01, 7.0720e-01, 7.0123e-01, 6.9576e-01, + 8.8003e-01, 1.1500e-01, 5.4614e-01, 2.4413e-01, + 8.5389e-01, 5.6087e-01, 9.5104e-01, 9.3133e-01, + 2.8049e-02, 5.6062e-01, 4.8771e-01, 3.9350e-01, + 5.7535e-01, 2.0051e-01, 9.2461e-02, 3.4240e-01, + 8.6489e-01, 1.7069e-01, 1.4503e-01, 8.2908e-01, + 8.4715e-01, 6.2055e-01, 6.0250e-01, 6.9669e-03, + 6.7451e-01, 9.1211e-01, 7.7998e-01, 9.1172e-01, + 6.4672e-01, 8.2260e-01, 2.9749e-01, 3.4357e-01, + 8.8965e-01, 1.6998e-01, 4.6817e-01, 5.6114e-01, + 2.9832e-03, 3.5782e-01, 2.3354e-01, 7.0389e-01, + 8.6494e-01, 2.9523e-01, 7.1093e-01, 8.5747e-01, + 3.9862e-01, 9.9242e-02, 6.3404e-01, 1.8341e-01, + 3.8813e-01, 4.5591e-01, 9.0054e-01, 9.5016e-01, + 4.2228e-01, 1.2941e-01, 3.8481e-01, 7.4146e-01, + 5.8461e-01, 5.7050e-01, 9.4103e-01, 9.6133e-01, + 2.7176e-01, 8.4268e-01, 9.5349e-01, 9.8518e-01, + 7.0966e-01, 1.9918e-01, 8.0055e-01, 4.7484e-01, + 7.3362e-01, 6.1497e-01, 6.4193e-02, 6.3662e-01, + 2.5121e-01, 8.4678e-01, 1.6158e-01, 1.1583e-01, + 7.4799e-01, 5.4370e-02, 2.4911e-01, 4.5653e-01, + 9.0328e-01, 2.0017e-01, 1.0692e-02, 8.5215e-01, + 3.1864e-01, 5.7972e-01, 9.9903e-01, 1.1865e-01, + 6.9590e-01, 9.8482e-01, 4.6454e-01, 6.6789e-02, + 2.3784e-01, 3.8803e-01, 9.5572e-01, 6.9527e-01, + 7.7733e-01, 3.0422e-01, 6.2625e-01, 2.4065e-01, + 2.9101e-01, 4.5591e-01, 7.5354e-01, 6.1629e-02, + 4.8890e-01, 6.2485e-01, 4.0109e-03, 5.2769e-01, + 5.1763e-01, 1.8385e-01, 1.3278e-01, 8.0457e-01, + 8.4173e-01, 9.2545e-01, 1.3840e-01, 7.6490e-01, + 1.4038e-01, 5.3636e-01, 6.8826e-01, 5.8422e-01, + 5.2788e-01, 9.1865e-01, 9.1687e-02, 9.4108e-01, + 7.7907e-01, 8.5824e-01, 6.6717e-01, 6.0324e-01, + 9.6810e-01, 8.6025e-01, 4.3579e-01, 5.0796e-01, + 7.7129e-01, 3.4580e-01, 6.2934e-01, 7.1330e-01, + 5.4797e-02, 2.5678e-01, 7.7649e-01, 1.6495e-02, + 6.2250e-01, 6.7574e-01, 1.4771e-01, 6.5970e-01, + 8.5384e-01, 1.0370e-01, 4.4441e-01, 8.2723e-01, + 6.6853e-01, 7.7504e-02, 3.9059e-01, 9.2182e-01, + 6.0726e-01, 9.3213e-01, 4.2342e-01, 1.5624e-01, + 6.0098e-02, 6.1591e-02, 5.0596e-01, 2.6916e-01, + 4.4467e-01, 1.9151e-01, 5.7240e-01, 4.4660e-02, + 2.8517e-01, 3.5291e-01, 6.3467e-01, 3.2806e-01, + 5.5092e-01, 8.7141e-01, 5.9479e-01, 1.1783e-01, + 6.7772e-01, 3.7557e-01, 2.1769e-01, 5.1208e-01, + 1.1902e-01, 1.9006e-01, 7.0631e-03, 6.8733e-01, + 6.8166e-01, 2.2002e-01, 7.1981e-01, 8.9421e-01, + 4.6344e-01, 9.9377e-01, 4.8175e-01, 6.3229e-01, + 3.8380e-01, 6.8316e-01, 5.8573e-01, 7.7837e-01, + 6.6946e-02, 2.3475e-01, 7.0140e-01, 1.2344e-01, + 2.8019e-01, 5.9542e-01, 7.5818e-01, 8.5928e-01, + 6.5466e-01, 2.9697e-01, 3.0249e-01, 5.1461e-01, + 3.0574e-02, 4.4546e-02, 5.1599e-02, 7.1790e-01, + 7.6362e-01, 2.5265e-01, 3.6333e-01, 5.7543e-01, + 4.4933e-01, 5.2750e-01, 1.8405e-01, 3.7109e-01, + 7.9021e-01, 6.3106e-01, 6.5594e-02, 5.0774e-01, + 7.9707e-01, 7.4175e-01, 3.6721e-01, 3.0570e-01, + 9.9687e-01, 3.7871e-01, 4.1780e-01, 9.4569e-01, + 7.8552e-01, 2.4122e-01, 6.0346e-01, 5.2993e-02, + 9.2558e-01, 1.4753e-01, 7.8873e-01, 3.8921e-02, + 9.4993e-01, 1.7318e-01, 1.0655e-01, 8.9241e-01, + 6.7408e-01, 3.1152e-01, 6.8847e-01, 4.4325e-01, + 4.8165e-01, 2.8106e-01, 3.5513e-01, 6.8342e-01, + 9.4058e-01, 1.0810e-01, 7.4330e-01, 9.5478e-01, + 1.4664e-01, 9.7611e-01, 4.7803e-01, 6.2009e-01, + 6.7093e-01, 4.0554e-01, 7.0217e-01, 5.0924e-01, + 4.5965e-01, 3.8968e-01, 9.5584e-01, 2.4917e-02, + 3.1405e-02, 3.1252e-01, 7.7549e-02, 9.5983e-01, + 1.6207e-01, 8.7600e-01, 5.6135e-01, 6.6565e-01, + 2.7677e-01, 9.8016e-01, 3.9823e-01, 7.3076e-01, + 6.4197e-01, 9.0002e-01, 4.2386e-01, 4.2004e-01, + 3.9384e-01, 2.3617e-01, 8.5746e-02, 4.3909e-01, + 3.1866e-01, 8.4048e-01, 3.8010e-01, 6.8652e-01, + 3.8902e-01, 7.5018e-01, 9.1396e-01, 1.6753e-01, + 8.1305e-01, 5.6153e-01, 7.6107e-01, 8.7271e-02, + 3.6435e-01, 1.5824e-01, 7.5343e-01, 1.3463e-02, + 4.7761e-01, 5.9639e-01, 4.4348e-01, 6.7500e-02, + 3.3355e-01, 8.6420e-01, 3.0451e-01, 4.0484e-01, + 6.4028e-01, 8.2717e-01, 4.6771e-01, 5.9629e-01, + 8.2755e-01, 5.9855e-01, 9.1490e-01, 3.9521e-01, + 4.4891e-02, 7.3522e-01, 8.1976e-01, 8.1029e-01, + 8.2031e-01, 2.7847e-01, 7.2509e-01, 4.1215e-01, + 9.0617e-01, 4.7701e-02, 4.0429e-01, 5.2302e-01, + 3.1622e-01, 6.8745e-01, 3.6865e-01, 7.9997e-01, + 6.0227e-01, 9.8518e-01, 3.9481e-01, 6.8483e-01, + 7.5143e-01, 6.1870e-01, 9.4705e-01, 1.8463e-01, + 7.7813e-01, 7.9055e-01, 3.9150e-01, 6.0592e-02, + 1.8376e-01, 3.3696e-01, 6.6841e-01, 1.6702e-01, + 2.8734e-01, 7.5517e-01, 3.7677e-01, 9.7449e-01, + 4.5025e-01, 2.0193e-01, 2.3506e-01, 1.5941e-02, + 9.6738e-01, 1.7205e-01, 6.2912e-01, 3.3533e-02, + 9.6599e-01, 5.5655e-01, 2.2836e-01, 3.1634e-01, + 9.4812e-03, 9.6607e-01, 4.7036e-01, 3.9668e-01, + 8.7698e-01, 8.9147e-02, 1.2127e-01, 5.5813e-01, + 1.7205e-01, 8.2425e-01, 8.7990e-01, 3.3131e-01, + 3.1468e-01, 5.9720e-01, 7.9343e-01, 3.9190e-01, + 4.2435e-01, 1.9421e-01, 4.9058e-01, 7.2547e-01, + 5.8086e-01, 7.0869e-01, 7.5320e-01, 3.4621e-01, + 5.0496e-01, 5.5857e-01, 6.6373e-01, 6.5189e-01, + 2.8694e-01, 1.2657e-01, 9.0532e-01, 9.5797e-01, + 4.0699e-01, 5.3664e-01, 2.8145e-01, 3.3471e-02, + 3.3984e-01, 7.2274e-02, 3.8278e-01, 6.3313e-02, + 1.2888e-01, 4.5359e-01, 8.4959e-01, 7.1804e-01, + 7.5488e-01, 9.4004e-01, 5.2125e-01, 2.7131e-01, + 6.1402e-01, 3.8144e-01, 8.7560e-01, 3.2266e-01, + 5.5649e-01, 7.3627e-01, 8.5755e-01, 7.2072e-03, + 2.8846e-01, 7.2430e-01, 5.3878e-01, 8.4452e-01, + 1.3342e-01, 3.8786e-01, 6.5924e-01, 2.7442e-01, + 8.6804e-02, 7.3956e-02, 1.0735e-02, 1.9793e-01, + 4.7212e-01, 1.6133e-01, 4.7390e-01, 2.2255e-01, + 7.5177e-01, 4.2505e-01, 3.0972e-01, 3.7468e-01, + 7.6238e-01, 9.4803e-01, 2.5885e-01, 2.9590e-01, + 5.0495e-01, 8.5311e-01, 8.1344e-01, 2.3873e-02, + 3.3971e-01, 4.1176e-01, 3.5343e-01, 6.8713e-01, + 2.5518e-01, 5.5269e-01, 1.9510e-01, 8.5124e-01, + 2.5981e-01, 2.1895e-01, 4.7009e-01, 1.9285e-01, + 4.7878e-01, 3.9918e-01, 8.3453e-01, 3.3741e-01, + 6.4600e-01, 6.7534e-01, 5.9816e-01, 4.3317e-01, + 1.6662e-01, 6.1035e-01, 5.5817e-01, 6.0579e-01, + 1.3883e-02, 1.7728e-01, 8.1614e-01, 8.4365e-01, + 9.6240e-01, 7.8831e-01, 1.7662e-01, 7.5505e-01, + 3.1979e-02, 7.5072e-01, 2.0440e-01, 3.6909e-01, + 2.5393e-01, 3.8386e-01, 2.0864e-01, 1.2775e-01, + 1.9641e-02, 7.4518e-01, 4.0993e-01, 4.4977e-01, + 8.1556e-01, 1.6905e-01, 5.3044e-02, 1.8450e-01, + 8.8919e-01, 6.9877e-01, 5.3492e-01, 9.8340e-01, + 3.0043e-01, 8.6934e-01, 5.0952e-01, 3.3501e-01, + 8.6829e-01, 9.5828e-01, 2.6305e-01, 6.8996e-01, + 9.7548e-01, 5.6316e-01, 8.0327e-01, 4.6951e-01, + 8.8042e-01, 9.6984e-01, 4.4366e-01, 1.3588e-01, + 6.2216e-01, 2.6858e-02, 1.8300e-01, 1.1829e-01, + 6.6395e-01, 8.3757e-01, 3.2313e-03, 5.4541e-01, + 2.8596e-01, 9.7916e-01, 9.8076e-02, 4.9537e-01, + 7.6776e-01, 1.2633e-01, 2.9367e-01, 8.2086e-01, + 8.4752e-01, 4.9738e-01, 1.2453e-01, 8.8421e-01, + 9.3727e-01, 5.0640e-01, 5.3707e-01, 1.8069e-01, + 9.2864e-01, 8.8341e-01, 3.8319e-01, 7.4187e-01, + 3.9035e-01, 4.2286e-01, 1.2256e-01, 9.7913e-01, + 2.7306e-02, 4.3080e-01, 2.8349e-01, 8.0193e-01, + 4.5756e-01, 7.5877e-01, 6.0580e-01, 3.6874e-01, + 1.3218e-01, 8.0418e-02, 2.7395e-01, 5.6953e-01, + 9.6882e-02, 9.2899e-01, 4.1200e-01, 6.4434e-01, + 6.0126e-01, 7.2073e-01, 1.2206e-02, 6.5301e-01, + 7.5807e-01, 4.5420e-01, 9.4519e-01, 1.2548e-01, + 4.0270e-01, 7.5287e-01, 2.7328e-01, 6.7114e-01, + 4.6869e-04, 6.0847e-02, 3.8973e-01, 1.6576e-01, + 6.9844e-01, 2.5031e-01, 2.6646e-01, 7.4219e-01, + 1.9507e-01, 5.4664e-01, 4.6153e-02, 9.0518e-01, + 9.0946e-01, 8.4267e-01, 8.0420e-01, 7.2293e-01, + 3.5989e-01, 4.2914e-01, 9.9099e-01, 6.2425e-01, + 1.3521e-01, 5.7729e-01, 5.8151e-01, 2.2300e-01, + 7.6972e-01, 4.8647e-01, 9.6036e-01, 3.5618e-01, + 1.0842e-01, 6.3586e-01, 1.0170e-01, 6.7330e-01, + 8.1168e-01, 6.2387e-01, 5.7831e-01, 4.7809e-01, + 1.1553e-01, 8.0639e-01, 2.6396e-02, 9.3913e-01, + 1.0144e-01, 7.0822e-03, 9.5812e-01, 6.1419e-01, + 5.5769e-01, 9.4462e-01, 2.4523e-01, 7.7377e-01, + 1.6828e-01, 9.6151e-01, 9.8418e-01, 4.1049e-01, + 8.0509e-01, 8.5756e-01, 4.0852e-01, 1.7004e-01, + 4.2725e-01, 2.6924e-01, 2.7568e-02, 6.4782e-01, + 2.3400e-01, 6.4486e-01, 7.4743e-01, 2.3534e-01, + 7.7784e-01, 2.9787e-01, 7.0881e-01, 3.6903e-01, + 9.8723e-01, 9.5179e-01, 6.6481e-01, 7.9932e-01, + 3.4256e-01, 6.3989e-01, 9.4136e-01, 6.3103e-01, + 9.7416e-01, 7.1185e-01, 4.3842e-01, 7.0811e-01, + 6.2721e-01, 2.3714e-01, 2.9081e-01, 8.3555e-01, + 9.0083e-01, 7.3734e-01, 3.2326e-02, 8.2624e-01, + 9.9258e-01, 9.0930e-01, 6.7614e-01, 5.4796e-02, + 8.5340e-01, 9.2531e-01, 7.6230e-01, 6.7267e-01, + 7.9031e-01, 4.7174e-01, 4.1194e-01, 1.8776e-01, + 6.0910e-01, 5.2252e-01, 2.1673e-01, 1.8473e-01, + 3.5143e-01, 7.0776e-01, 1.2603e-01, 5.0802e-01, + 4.5209e-01, 4.3706e-01, 7.6797e-02, 6.1164e-01, + 7.8143e-01, 1.3925e-01, 5.7531e-02, 8.7507e-01, + 5.7001e-01, 4.3906e-01, 9.2820e-01, 7.6700e-01, + 2.9968e-01, 5.9688e-01, 3.1468e-01, 4.2212e-01, + 7.9895e-02, 2.9011e-01, 1.8494e-01, 9.2888e-01, + 8.9319e-01, 8.2784e-01, 1.7488e-02, 8.1506e-01, + 5.8942e-01, 2.6648e-01, 7.2461e-01, 4.3524e-01, + 3.3783e-02, 3.0604e-01, 5.7524e-01, 1.2624e-01, + 1.8255e-01, 3.5454e-01, 3.6503e-01, 4.9452e-01, + 4.3380e-01, 1.0044e-01, 1.5676e-01, 7.4210e-01, + 7.7909e-01, 1.5953e-01, 9.7173e-01, 9.8386e-01, + 6.0732e-01, 4.4764e-01, 9.7740e-02, 2.5615e-01, + 4.6915e-01, 2.7954e-01, 5.9480e-01, 5.5372e-01, + 4.4142e-01, 7.4870e-01, 7.8726e-01, 5.3797e-01, + 7.5394e-01, 3.8676e-01, 9.0069e-01, 7.9215e-02, + 2.6986e-01, 7.4047e-01, 6.0732e-01, 3.5085e-01, + 9.1826e-01, 9.4896e-01, 6.8926e-03, 1.0309e-01, + 2.7614e-01, 5.7639e-01, 6.1056e-01, 6.5885e-01, + 7.8603e-01, 4.6450e-01, 7.5119e-01, 7.6419e-01, + 5.8530e-02, 5.5102e-01, 4.6821e-01, 4.6438e-01, + 3.5602e-01, 8.8841e-01, 5.1596e-01, 9.1532e-01, + 6.0969e-01, 4.8093e-01, 4.6162e-01, 9.1442e-03, + 8.7271e-01, 3.3897e-01, 1.0331e-01, 7.9221e-01, + 5.8692e-01, 6.9774e-01, 3.0172e-01, 1.0667e-01, + 1.0951e-02, 1.3612e-01, 7.8502e-02, 4.6719e-01, + 8.6153e-01, 2.2920e-01, 8.7337e-01, 2.1362e-02, + 8.6720e-01, 9.6225e-01, 3.2705e-01, 7.9640e-01, + 3.6238e-01, 3.5585e-01, 1.3643e-01, 3.7646e-01, + 5.3997e-01, 7.8911e-02, 9.4754e-01, 5.9488e-01, + 4.3521e-02, 6.4602e-01, 3.7163e-02, 1.4246e-01, + 9.2557e-01, 3.9489e-01, 9.4390e-01, 1.2061e-01, + 7.9882e-01, 5.0438e-01, 5.2005e-01, 9.3905e-01, + 4.3100e-01, 8.5312e-01, 7.6276e-01, 8.8805e-01, + 6.9230e-02, 7.5638e-01, 2.7686e-02, 6.4170e-01, + 6.8542e-01, 9.3072e-01, 9.1971e-02, 9.7074e-01, + 4.9244e-01, 9.7479e-01, 3.9805e-01, 6.5312e-01, + 2.7671e-01, 7.9289e-01, 4.7310e-01, 7.6491e-01, + 2.0056e-01, 9.8477e-01, 3.5288e-01, 1.5954e-01, + 9.1449e-01, 9.5312e-01, 6.0952e-01, 7.7001e-01, + 6.5414e-01, 3.7977e-01, 5.5246e-01, 8.1022e-01, + 4.6688e-01, 8.6118e-01, 7.9898e-01, 6.4956e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.3038, 0.6445, 0.5741, ..., 0.8215, 0.9151, 0.6540]) +tensor([0.7123, 0.9016, 0.3604, ..., 0.7264, 0.3786, 0.3585]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -1402,13 +1509,13 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 10.372447967529297 seconds +Time: 10.634347915649414 seconds -[18.31, 17.96, 18.02, 17.94, 18.39, 17.81, 17.97, 18.58, 17.92, 17.98] -[73.48] -13.651954174041748 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 282031, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.372447967529297, 'TIME_S_1KI': 0.036777687444037345, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1003.1455927085877, 'W': 73.48} -[18.31, 17.96, 18.02, 17.94, 18.39, 17.81, 17.97, 18.58, 17.92, 17.98, 18.3, 18.16, 17.88, 17.97, 17.91, 17.97, 18.81, 17.78, 18.15, 18.05] -325.53999999999996 -16.276999999999997 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 282031, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.372447967529297, 'TIME_S_1KI': 0.036777687444037345, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1003.1455927085877, 'W': 73.48, 'J_1KI': 3.5568628721969846, 'W_1KI': 0.26053873510358794, 'W_D': 57.203, 'J_D': 780.9327346177101, 'W_D_1KI': 0.20282522134091643, 'J_D_1KI': 0.0007191593170286829} +[18.41, 17.8, 19.45, 17.82, 18.17, 17.94, 18.34, 18.13, 18.06, 17.93] +[73.11] +13.703187704086304 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 286411, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.634347915649414, 'TIME_S_1KI': 0.03712967698743908, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1001.8400530457496, 'W': 73.11} +[18.41, 17.8, 19.45, 17.82, 18.17, 17.94, 18.34, 18.13, 18.06, 17.93, 18.35, 18.13, 18.05, 17.73, 17.88, 18.02, 18.72, 17.91, 17.93, 18.0] +326.42499999999995 +16.32125 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 286411, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.634347915649414, 'TIME_S_1KI': 0.03712967698743908, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1001.8400530457496, 'W': 73.11, 'J_1KI': 3.4979105308306933, 'W_1KI': 0.25526254229062434, 'W_D': 56.78875, 'J_D': 778.186900730431, 'W_D_1KI': 0.19827712622769378, 'J_D_1KI': 0.0006922818125969107} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_500000_1e-05.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_500000_1e-05.json index 49601d0..df20cd2 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_500000_1e-05.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_500000_1e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 8372, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.861924409866333, "TIME_S_1KI": 1.297410942411172, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1310.9924754476547, "W": 87.74, "J_1KI": 156.5925078174456, "W_1KI": 10.480172001911132, "W_D": 71.52799999999999, "J_D": 1068.7562090702056, "W_D_1KI": 8.543717152412803, "J_D_1KI": 1.0205108877702822} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 8417, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.80616807937622, "TIME_S_1KI": 1.2838503123887632, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1304.2690537071228, "W": 87.74, "J_1KI": 154.95652295439263, "W_1KI": 10.424141618153737, "W_D": 71.23675, "J_D": 1058.9456178672315, "W_D_1KI": 8.463437091600332, "J_D_1KI": 1.005517059712526} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_500000_1e-05.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_500000_1e-05.output index 5b5d205..2e9f271 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_500000_1e-05.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_500000_1e-05.output @@ -1,36 +1,15 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '500000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.2540500164031982} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.2474722862243652} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 6, ..., 2499994, - 2499996, 2500000]), - col_indices=tensor([ 37595, 347043, 415637, ..., 145391, 181131, - 323148]), - values=tensor([0.9304, 0.5156, 0.8153, ..., 0.0582, 0.6116, 0.3872]), - size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.4551, 0.3395, 0.9990, ..., 0.2154, 0.7020, 0.1344]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([500000, 500000]) -Rows: 500000 -Size: 250000000000 -NNZ: 2500000 -Density: 1e-05 -Time: 1.2540500164031982 seconds - -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '8372', '-ss', '500000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.861924409866333} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 10, ..., 2499992, +tensor(crow_indices=tensor([ 0, 6, 15, ..., 2499985, 2499994, 2500000]), - col_indices=tensor([140767, 212572, 418184, ..., 257460, 329048, - 350732]), - values=tensor([0.1302, 0.7593, 0.7287, ..., 0.1348, 0.8551, 0.2122]), + col_indices=tensor([131168, 178693, 230148, ..., 341937, 350836, + 404119]), + values=tensor([0.5017, 0.1065, 0.8260, ..., 0.9970, 0.9497, 0.3007]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.3362, 0.7821, 0.5665, ..., 0.5113, 0.4644, 0.7174]) +tensor([0.0502, 0.1581, 0.5974, ..., 0.5502, 0.6695, 0.7013]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -38,17 +17,20 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 10.861924409866333 seconds +Time: 1.2474722862243652 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '8417', '-ss', '500000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.80616807937622} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 10, ..., 2499992, - 2499994, 2500000]), - col_indices=tensor([140767, 212572, 418184, ..., 257460, 329048, - 350732]), - values=tensor([0.1302, 0.7593, 0.7287, ..., 0.1348, 0.8551, 0.2122]), +tensor(crow_indices=tensor([ 0, 8, 12, ..., 2499995, + 2499999, 2500000]), + col_indices=tensor([108465, 113027, 118372, ..., 354925, 391668, + 96483]), + values=tensor([0.8038, 0.4194, 0.3623, ..., 0.9532, 0.5964, 0.0297]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.3362, 0.7821, 0.5665, ..., 0.5113, 0.4644, 0.7174]) +tensor([0.4181, 0.0420, 0.6704, ..., 0.4969, 0.1289, 0.9173]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -56,13 +38,31 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 10.861924409866333 seconds +Time: 10.80616807937622 seconds -[18.33, 17.76, 18.07, 18.0, 18.02, 17.78, 17.96, 17.97, 18.01, 17.77] +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 8, 12, ..., 2499995, + 2499999, 2500000]), + col_indices=tensor([108465, 113027, 118372, ..., 354925, 391668, + 96483]), + values=tensor([0.8038, 0.4194, 0.3623, ..., 0.9532, 0.5964, 0.0297]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.4181, 0.0420, 0.6704, ..., 0.4969, 0.1289, 0.9173]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 10.80616807937622 seconds + +[18.27, 18.7, 18.19, 18.06, 20.48, 17.92, 18.42, 18.1, 18.5, 17.88] [87.74] -14.941787958145142 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8372, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.861924409866333, 'TIME_S_1KI': 1.297410942411172, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1310.9924754476547, 'W': 87.74} -[18.33, 17.76, 18.07, 18.0, 18.02, 17.78, 17.96, 17.97, 18.01, 17.77, 18.44, 18.07, 17.99, 17.91, 18.26, 18.15, 17.93, 17.77, 18.22, 18.2] -324.24 -16.212 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8372, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.861924409866333, 'TIME_S_1KI': 1.297410942411172, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1310.9924754476547, 'W': 87.74, 'J_1KI': 156.5925078174456, 'W_1KI': 10.480172001911132, 'W_D': 71.52799999999999, 'J_D': 1068.7562090702056, 'W_D_1KI': 8.543717152412803, 'J_D_1KI': 1.0205108877702822} +14.865159034729004 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8417, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.80616807937622, 'TIME_S_1KI': 1.2838503123887632, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1304.2690537071228, 'W': 87.74} +[18.27, 18.7, 18.19, 18.06, 20.48, 17.92, 18.42, 18.1, 18.5, 17.88, 18.52, 18.21, 18.12, 18.29, 18.48, 18.28, 17.96, 17.98, 18.11, 17.86] +330.06500000000005 +16.50325 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8417, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.80616807937622, 'TIME_S_1KI': 1.2838503123887632, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1304.2690537071228, 'W': 87.74, 'J_1KI': 154.95652295439263, 'W_1KI': 10.424141618153737, 'W_D': 71.23675, 'J_D': 1058.9456178672315, 'W_D_1KI': 8.463437091600332, 'J_D_1KI': 1.005517059712526} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.0001.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.0001.json index bafcc4c..3576579 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.0001.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.0001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 78280, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.048285484313965, "TIME_S_1KI": 0.12836338125081712, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1146.701253862381, "W": 82.44, "J_1KI": 14.648713002840841, "W_1KI": 1.053142565150741, "W_D": 66.134, "J_D": 919.8925366683006, "W_D_1KI": 0.8448390393459376, "J_D_1KI": 0.010792527329406458} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 79200, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.557747602462769, "TIME_S_1KI": 0.1333048939704895, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1194.9571797966958, "W": 76.09, "J_1KI": 15.087843179251209, "W_1KI": 0.9607323232323233, "W_D": 59.703, "J_D": 937.6071560704709, "W_D_1KI": 0.7538257575757576, "J_D_1KI": 0.009518001989592899} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.0001.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.0001.output index d22c5fa..9da61ce 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.0001.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.0001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '50000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.1482532024383545} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.14916634559631348} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 8, ..., 249991, 249997, +tensor(crow_indices=tensor([ 0, 5, 7, ..., 249992, 249995, 250000]), - col_indices=tensor([11188, 48325, 9835, ..., 16403, 16442, 24121]), - values=tensor([0.5273, 0.3289, 0.0892, ..., 0.0153, 0.8132, 0.4919]), + col_indices=tensor([14210, 18192, 24309, ..., 18863, 37423, 45495]), + values=tensor([0.9647, 0.6185, 0.9345, ..., 0.6478, 0.4104, 0.2751]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.4620, 0.4376, 0.8938, ..., 0.9801, 0.7388, 0.7080]) +tensor([0.7636, 0.2305, 0.9236, ..., 0.5850, 0.9097, 0.3088]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 0.1482532024383545 seconds +Time: 0.14916634559631348 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '70824', '-ss', '50000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.499845743179321} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '70391', '-ss', '50000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.332123041152954} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 12, ..., 249991, 249995, +tensor(crow_indices=tensor([ 0, 9, 13, ..., 249990, 249995, 250000]), - col_indices=tensor([ 9700, 17110, 17880, ..., 40636, 42079, 45237]), - values=tensor([0.5791, 0.9394, 0.7161, ..., 0.4792, 0.4698, 0.8140]), + col_indices=tensor([ 8823, 10157, 22008, ..., 15217, 25723, 27383]), + values=tensor([0.1165, 0.9082, 0.4420, ..., 0.1019, 0.9218, 0.7818]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.2158, 0.6632, 0.3616, ..., 0.9096, 0.8324, 0.6259]) +tensor([0.6996, 0.2341, 0.0689, ..., 0.7606, 0.0770, 0.0289]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,19 +36,19 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 9.499845743179321 seconds +Time: 9.332123041152954 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '78280', '-ss', '50000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.048285484313965} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '79200', '-ss', '50000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.557747602462769} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 11, ..., 249990, 249995, +tensor(crow_indices=tensor([ 0, 6, 9, ..., 249995, 249997, 250000]), - col_indices=tensor([ 1806, 10529, 23120, ..., 17166, 35800, 40447]), - values=tensor([0.3161, 0.7150, 0.6424, ..., 0.5169, 0.8858, 0.3422]), + col_indices=tensor([ 4540, 7121, 8304, ..., 4489, 19051, 41158]), + values=tensor([0.2192, 0.6581, 0.9045, ..., 0.0804, 0.2632, 0.1591]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.5244, 0.0456, 0.6715, ..., 0.9006, 0.5240, 0.6616]) +tensor([0.8844, 0.7148, 0.4526, ..., 0.9882, 0.2475, 0.5582]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -56,16 +56,16 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 10.048285484313965 seconds +Time: 10.557747602462769 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 11, ..., 249990, 249995, +tensor(crow_indices=tensor([ 0, 6, 9, ..., 249995, 249997, 250000]), - col_indices=tensor([ 1806, 10529, 23120, ..., 17166, 35800, 40447]), - values=tensor([0.3161, 0.7150, 0.6424, ..., 0.5169, 0.8858, 0.3422]), + col_indices=tensor([ 4540, 7121, 8304, ..., 4489, 19051, 41158]), + values=tensor([0.2192, 0.6581, 0.9045, ..., 0.0804, 0.2632, 0.1591]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.5244, 0.0456, 0.6715, ..., 0.9006, 0.5240, 0.6616]) +tensor([0.8844, 0.7148, 0.4526, ..., 0.9882, 0.2475, 0.5582]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -73,13 +73,13 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 10.048285484313965 seconds +Time: 10.557747602462769 seconds -[18.31, 18.07, 18.04, 17.95, 17.89, 18.22, 18.08, 17.87, 18.03, 18.16] -[82.44] -13.909525156021118 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 78280, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.048285484313965, 'TIME_S_1KI': 0.12836338125081712, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1146.701253862381, 'W': 82.44} -[18.31, 18.07, 18.04, 17.95, 17.89, 18.22, 18.08, 17.87, 18.03, 18.16, 18.33, 19.12, 18.1, 18.07, 17.96, 17.95, 18.08, 18.08, 18.21, 18.0] -326.12 -16.306 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 78280, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.048285484313965, 'TIME_S_1KI': 0.12836338125081712, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1146.701253862381, 'W': 82.44, 'J_1KI': 14.648713002840841, 'W_1KI': 1.053142565150741, 'W_D': 66.134, 'J_D': 919.8925366683006, 'W_D_1KI': 0.8448390393459376, 'J_D_1KI': 0.010792527329406458} +[18.33, 17.85, 17.91, 18.15, 17.99, 17.93, 17.79, 17.99, 18.26, 17.87] +[76.09] +15.70452332496643 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 79200, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.557747602462769, 'TIME_S_1KI': 0.1333048939704895, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1194.9571797966958, 'W': 76.09} +[18.33, 17.85, 17.91, 18.15, 17.99, 17.93, 17.79, 17.99, 18.26, 17.87, 18.24, 18.68, 17.78, 17.91, 20.54, 18.31, 17.97, 18.31, 18.26, 17.78] +327.74 +16.387 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 79200, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.557747602462769, 'TIME_S_1KI': 0.1333048939704895, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1194.9571797966958, 'W': 76.09, 'J_1KI': 15.087843179251209, 'W_1KI': 0.9607323232323233, 'W_D': 59.703, 'J_D': 937.6071560704709, 'W_D_1KI': 0.7538257575757576, 'J_D_1KI': 0.009518001989592899} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.001.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.001.json index 9c50222..fcc4d9e 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.001.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 17475, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.743166208267212, "TIME_S_1KI": 0.6147734597005557, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1295.2754010248184, "W": 87.46, "J_1KI": 74.12162523747172, "W_1KI": 5.004864091559369, "W_D": 70.91274999999999, "J_D": 1050.2119905559418, "W_D_1KI": 4.057954220314734, "J_D_1KI": 0.2322148337805284} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 17543, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.699259996414185, "TIME_S_1KI": 0.6098877042931189, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1300.7915307188034, "W": 87.71, "J_1KI": 74.14875053974825, "W_1KI": 4.9997149860343155, "W_D": 71.40625, "J_D": 1058.997209444642, "W_D_1KI": 4.070355697429174, "J_D_1KI": 0.2320216438140098} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.001.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.001.output index 95a3055..4d40449 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.001.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '50000', '-sd', '0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.6008265018463135} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.5985264778137207} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 42, 90, ..., 2499902, - 2499947, 2500000]), - col_indices=tensor([ 1236, 2335, 2455, ..., 44227, 44372, 44789]), - values=tensor([0.4453, 0.9405, 0.8001, ..., 0.3243, 0.3638, 0.0708]), +tensor(crow_indices=tensor([ 0, 56, 105, ..., 2499904, + 2499950, 2500000]), + col_indices=tensor([ 106, 3863, 5117, ..., 48831, 49457, 49843]), + values=tensor([0.6065, 0.7453, 0.1054, ..., 0.0788, 0.7875, 0.5947]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.5116, 0.1335, 0.5143, ..., 0.8718, 0.6117, 0.3765]) +tensor([0.1569, 0.4932, 0.6676, ..., 0.2477, 0.5860, 0.5432]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 0.6008265018463135 seconds +Time: 0.5985264778137207 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '17475', '-ss', '50000', '-sd', '0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.743166208267212} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '17543', '-ss', '50000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.699259996414185} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 49, 107, ..., 2499873, +tensor(crow_indices=tensor([ 0, 54, 99, ..., 2499881, 2499945, 2500000]), - col_indices=tensor([ 1803, 2168, 2288, ..., 48770, 49205, 49605]), - values=tensor([0.1814, 0.9281, 0.5481, ..., 0.9692, 0.2397, 0.8106]), + col_indices=tensor([ 1025, 3202, 3517, ..., 49482, 49487, 49789]), + values=tensor([0.3859, 0.1414, 0.1100, ..., 0.9363, 0.6699, 0.1002]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.8810, 0.5797, 0.1795, ..., 0.7146, 0.8135, 0.6945]) +tensor([0.4003, 0.0598, 0.2302, ..., 0.6994, 0.7206, 0.2744]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,16 +36,16 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 10.743166208267212 seconds +Time: 10.699259996414185 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 49, 107, ..., 2499873, +tensor(crow_indices=tensor([ 0, 54, 99, ..., 2499881, 2499945, 2500000]), - col_indices=tensor([ 1803, 2168, 2288, ..., 48770, 49205, 49605]), - values=tensor([0.1814, 0.9281, 0.5481, ..., 0.9692, 0.2397, 0.8106]), + col_indices=tensor([ 1025, 3202, 3517, ..., 49482, 49487, 49789]), + values=tensor([0.3859, 0.1414, 0.1100, ..., 0.9363, 0.6699, 0.1002]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.8810, 0.5797, 0.1795, ..., 0.7146, 0.8135, 0.6945]) +tensor([0.4003, 0.0598, 0.2302, ..., 0.6994, 0.7206, 0.2744]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -53,13 +53,13 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 10.743166208267212 seconds +Time: 10.699259996414185 seconds -[18.24, 17.93, 17.9, 17.93, 18.17, 17.82, 18.06, 21.0, 18.11, 19.61] -[87.46] -14.809917688369751 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 17475, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.743166208267212, 'TIME_S_1KI': 0.6147734597005557, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1295.2754010248184, 'W': 87.46} -[18.24, 17.93, 17.9, 17.93, 18.17, 17.82, 18.06, 21.0, 18.11, 19.61, 18.37, 18.2, 17.93, 17.83, 17.81, 18.07, 18.0, 17.85, 21.15, 18.15] -330.94500000000005 -16.547250000000002 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 17475, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.743166208267212, 'TIME_S_1KI': 0.6147734597005557, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1295.2754010248184, 'W': 87.46, 'J_1KI': 74.12162523747172, 'W_1KI': 5.004864091559369, 'W_D': 70.91274999999999, 'J_D': 1050.2119905559418, 'W_D_1KI': 4.057954220314734, 'J_D_1KI': 0.2322148337805284} +[18.49, 17.83, 18.02, 18.17, 18.01, 17.87, 18.35, 18.02, 18.01, 17.87] +[87.71] +14.83059549331665 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 17543, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.699259996414185, 'TIME_S_1KI': 0.6098877042931189, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1300.7915307188034, 'W': 87.71} +[18.49, 17.83, 18.02, 18.17, 18.01, 17.87, 18.35, 18.02, 18.01, 17.87, 18.45, 18.41, 18.09, 17.88, 18.15, 18.34, 17.89, 18.68, 17.87, 18.16] +326.075 +16.30375 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 17543, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.699259996414185, 'TIME_S_1KI': 0.6098877042931189, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1300.7915307188034, 'W': 87.71, 'J_1KI': 74.14875053974825, 'W_1KI': 4.9997149860343155, 'W_D': 71.40625, 'J_D': 1058.997209444642, 'W_D_1KI': 4.070355697429174, 'J_D_1KI': 0.2320216438140098} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_1e-05.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_1e-05.json index 598f4f1..8166250 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_1e-05.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_1e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 112560, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.602921962738037, "TIME_S_1KI": 0.0941979563143038, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1061.1344814062118, "W": 76.1, "J_1KI": 9.427278619458171, "W_1KI": 0.6760838663823738, "W_D": 59.91175, "J_D": 835.4063569827675, "W_D_1KI": 0.532265014214641, "J_D_1KI": 0.004728722585417919} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 109532, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.03889274597168, "TIME_S_1KI": 0.09165260148606508, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1026.1644780921936, "W": 75.74, "J_1KI": 9.368627233066078, "W_1KI": 0.6914874192016944, "W_D": 59.13049999999999, "J_D": 801.1304287276266, "W_D_1KI": 0.539846802760837, "J_D_1KI": 0.004928667446598592} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_1e-05.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_1e-05.output index fbbef6c..709a214 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_1e-05.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_1e-05.output @@ -1,13 +1,13 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '50000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.10953974723815918} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.12936139106750488} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 25000, 25000, 25000]), - col_indices=tensor([20679, 38088, 21453, ..., 14604, 22112, 37567]), - values=tensor([0.0203, 0.9911, 0.7304, ..., 0.1348, 0.2520, 0.4128]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 24998, 24998, 25000]), + col_indices=tensor([44477, 18295, 41758, ..., 46506, 28720, 46164]), + values=tensor([0.4132, 0.4608, 0.2599, ..., 0.0448, 0.1303, 0.6544]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.2000, 0.8382, 0.5478, ..., 0.6017, 0.0874, 0.6263]) +tensor([0.0039, 0.4422, 0.0639, ..., 0.1130, 0.9521, 0.1334]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -15,18 +15,18 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 0.10953974723815918 seconds +Time: 0.12936139106750488 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '95855', '-ss', '50000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 8.94165301322937} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '81167', '-ss', '50000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 7.780844211578369} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 25000, 25000, 25000]), - col_indices=tensor([31700, 16272, 20084, ..., 46363, 9221, 39878]), - values=tensor([0.3577, 0.1970, 0.2573, ..., 0.9498, 0.8667, 0.9638]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 24999, 24999, 25000]), + col_indices=tensor([38361, 15493, 29627, ..., 27733, 22368, 35508]), + values=tensor([0.9149, 0.3524, 0.3637, ..., 0.0393, 0.5821, 0.3741]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.7704, 0.6386, 0.5878, ..., 0.7750, 0.3511, 0.4334]) +tensor([0.5874, 0.0444, 0.7896, ..., 0.3503, 0.3177, 0.2388]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -34,18 +34,18 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 8.94165301322937 seconds +Time: 7.780844211578369 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '112560', '-ss', '50000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.602921962738037} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '109532', '-ss', '50000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.03889274597168} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 25000, 25000, 25000]), - col_indices=tensor([11228, 2410, 48293, ..., 48555, 29403, 27641]), - values=tensor([0.9662, 0.4123, 0.9370, ..., 0.4524, 0.0602, 0.8924]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 24997, 24997, 25000]), + col_indices=tensor([17005, 6306, 21289, ..., 8288, 8622, 19411]), + values=tensor([0.2779, 0.9469, 0.2610, ..., 0.9922, 0.2668, 0.6005]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.4903, 0.0715, 0.0009, ..., 0.3750, 0.8526, 0.7709]) +tensor([0.1194, 0.4917, 0.3228, ..., 0.2258, 0.0044, 0.3600]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -53,15 +53,15 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 10.602921962738037 seconds +Time: 10.03889274597168 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 25000, 25000, 25000]), - col_indices=tensor([11228, 2410, 48293, ..., 48555, 29403, 27641]), - values=tensor([0.9662, 0.4123, 0.9370, ..., 0.4524, 0.0602, 0.8924]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 24997, 24997, 25000]), + col_indices=tensor([17005, 6306, 21289, ..., 8288, 8622, 19411]), + values=tensor([0.2779, 0.9469, 0.2610, ..., 0.9922, 0.2668, 0.6005]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.4903, 0.0715, 0.0009, ..., 0.3750, 0.8526, 0.7709]) +tensor([0.1194, 0.4917, 0.3228, ..., 0.2258, 0.0044, 0.3600]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -69,13 +69,13 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 10.602921962738037 seconds +Time: 10.03889274597168 seconds -[18.19, 18.01, 17.81, 17.69, 18.0, 18.19, 18.15, 17.85, 17.89, 17.99] -[76.1] -13.94394850730896 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 112560, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.602921962738037, 'TIME_S_1KI': 0.0941979563143038, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1061.1344814062118, 'W': 76.1} -[18.19, 18.01, 17.81, 17.69, 18.0, 18.19, 18.15, 17.85, 17.89, 17.99, 18.3, 17.89, 18.04, 18.07, 17.99, 17.83, 18.01, 18.29, 17.73, 18.17] -323.765 -16.18825 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 112560, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.602921962738037, 'TIME_S_1KI': 0.0941979563143038, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1061.1344814062118, 'W': 76.1, 'J_1KI': 9.427278619458171, 'W_1KI': 0.6760838663823738, 'W_D': 59.91175, 'J_D': 835.4063569827675, 'W_D_1KI': 0.532265014214641, 'J_D_1KI': 0.004728722585417919} +[19.15, 18.96, 18.11, 18.03, 19.81, 17.96, 18.06, 18.12, 18.35, 17.94] +[75.74] +13.548514366149902 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 109532, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.03889274597168, 'TIME_S_1KI': 0.09165260148606508, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1026.1644780921936, 'W': 75.74} +[19.15, 18.96, 18.11, 18.03, 19.81, 17.96, 18.06, 18.12, 18.35, 17.94, 18.12, 21.35, 17.92, 18.62, 18.1, 18.19, 17.98, 17.91, 18.07, 18.09] +332.19000000000005 +16.609500000000004 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 109532, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.03889274597168, 'TIME_S_1KI': 0.09165260148606508, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1026.1644780921936, 'W': 75.74, 'J_1KI': 9.368627233066078, 'W_1KI': 0.6914874192016944, 'W_D': 59.13049999999999, 'J_D': 801.1304287276266, 'W_D_1KI': 0.539846802760837, 'J_D_1KI': 0.004928667446598592} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.0001.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.0001.json new file mode 100644 index 0000000..3b1a4d8 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 334616, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.390317678451538, "TIME_S_1KI": 0.03105146699037565, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1009.4583482408524, "W": 73.59, "J_1KI": 3.0167665271261757, "W_1KI": 0.21992373347359362, "W_D": 57.37650000000001, "J_D": 787.052410896063, "W_D_1KI": 0.17146968465345352, "J_D_1KI": 0.0005124371956315703} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.0001.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.0001.output new file mode 100644 index 0000000..ab60bf4 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.0001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '5000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.0494227409362793} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 2499, 2500, 2500]), + col_indices=tensor([2191, 1647, 4069, ..., 3482, 688, 2162]), + values=tensor([0.7127, 0.2553, 0.3133, ..., 0.9149, 0.5638, 0.5628]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.0865, 0.0532, 0.7203, ..., 0.4777, 0.7863, 0.0162]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 0.0494227409362793 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '212452', '-ss', '5000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 6.666574239730835} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 2500, 2500, 2500]), + col_indices=tensor([2208, 2123, 4174, ..., 2091, 42, 2382]), + values=tensor([0.8755, 0.2371, 0.7047, ..., 0.2373, 0.9261, 0.2864]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.3651, 0.6415, 0.7426, ..., 0.3371, 0.9910, 0.6174]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 6.666574239730835 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '334616', '-ss', '5000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.390317678451538} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 2498, 2498, 2500]), + col_indices=tensor([1385, 3626, 3706, ..., 891, 2896, 4403]), + values=tensor([0.8264, 0.4439, 0.4297, ..., 0.4171, 0.8922, 0.6160]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.2966, 0.7201, 0.1357, ..., 0.1499, 0.6981, 0.8153]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.390317678451538 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 2498, 2498, 2500]), + col_indices=tensor([1385, 3626, 3706, ..., 891, 2896, 4403]), + values=tensor([0.8264, 0.4439, 0.4297, ..., 0.4171, 0.8922, 0.6160]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.2966, 0.7201, 0.1357, ..., 0.1499, 0.6981, 0.8153]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.390317678451538 seconds + +[18.39, 18.14, 17.96, 18.23, 18.13, 17.78, 18.17, 18.05, 18.11, 18.01] +[73.59] +13.71733045578003 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 334616, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.390317678451538, 'TIME_S_1KI': 0.03105146699037565, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1009.4583482408524, 'W': 73.59} +[18.39, 18.14, 17.96, 18.23, 18.13, 17.78, 18.17, 18.05, 18.11, 18.01, 18.29, 17.99, 18.0, 17.86, 17.97, 18.01, 17.96, 17.81, 17.79, 17.93] +324.27 +16.2135 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 334616, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.390317678451538, 'TIME_S_1KI': 0.03105146699037565, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1009.4583482408524, 'W': 73.59, 'J_1KI': 3.0167665271261757, 'W_1KI': 0.21992373347359362, 'W_D': 57.37650000000001, 'J_D': 787.052410896063, 'W_D_1KI': 0.17146968465345352, 'J_D_1KI': 0.0005124371956315703} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.001.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.001.json new file mode 100644 index 0000000..e6fec4b --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 248893, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.503267288208008, "TIME_S_1KI": 0.04219993044484179, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1056.0920481681824, "W": 74.8, "J_1KI": 4.243156891387795, "W_1KI": 0.3005307501617161, "W_D": 58.488749999999996, "J_D": 825.7955051109194, "W_D_1KI": 0.2349955603411908, "J_D_1KI": 0.0009441629951070973} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.001.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.001.output new file mode 100644 index 0000000..d474848 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '5000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.0580594539642334} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 7, ..., 24994, 24998, 25000]), + col_indices=tensor([ 985, 1057, 218, ..., 4882, 1671, 4380]), + values=tensor([0.5160, 0.3498, 0.0303, ..., 0.2263, 0.8538, 0.6441]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.8672, 0.0025, 0.6942, ..., 0.2074, 0.2932, 0.8728]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 0.0580594539642334 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '180849', '-ss', '5000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 7.629418849945068} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 12, ..., 24984, 24994, 25000]), + col_indices=tensor([ 206, 438, 1117, ..., 3589, 4561, 4654]), + values=tensor([0.7806, 0.0093, 0.9775, ..., 0.2394, 0.5986, 0.1036]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.9079, 0.6440, 0.7990, ..., 0.4243, 0.2944, 0.4838]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 7.629418849945068 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '248893', '-ss', '5000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.503267288208008} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 8, 11, ..., 24987, 24992, 25000]), + col_indices=tensor([ 263, 1234, 1436, ..., 3199, 3400, 4091]), + values=tensor([0.7110, 0.3838, 0.4652, ..., 0.0537, 0.9297, 0.5811]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.4941, 0.0109, 0.4935, ..., 0.1517, 0.7151, 0.3544]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.503267288208008 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 8, 11, ..., 24987, 24992, 25000]), + col_indices=tensor([ 263, 1234, 1436, ..., 3199, 3400, 4091]), + values=tensor([0.7110, 0.3838, 0.4652, ..., 0.0537, 0.9297, 0.5811]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.4941, 0.0109, 0.4935, ..., 0.1517, 0.7151, 0.3544]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.503267288208008 seconds + +[18.2, 18.48, 18.33, 18.03, 18.1, 18.08, 18.14, 18.78, 18.15, 17.99] +[74.8] +14.118877649307251 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 248893, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.503267288208008, 'TIME_S_1KI': 0.04219993044484179, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1056.0920481681824, 'W': 74.8} +[18.2, 18.48, 18.33, 18.03, 18.1, 18.08, 18.14, 18.78, 18.15, 17.99, 18.23, 17.98, 17.97, 18.01, 17.96, 18.02, 17.91, 18.17, 17.92, 17.97] +326.225 +16.31125 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 248893, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.503267288208008, 'TIME_S_1KI': 0.04219993044484179, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1056.0920481681824, 'W': 74.8, 'J_1KI': 4.243156891387795, 'W_1KI': 0.3005307501617161, 'W_D': 58.488749999999996, 'J_D': 825.7955051109194, 'W_D_1KI': 0.2349955603411908, 'J_D_1KI': 0.0009441629951070973} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.01.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.01.json new file mode 100644 index 0000000..6c317ff --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 167260, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.636158227920532, "TIME_S_1KI": 0.0635905669491841, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1183.6357300853729, "W": 83.74, "J_1KI": 7.076621607589219, "W_1KI": 0.5006576587349038, "W_D": 67.26599999999999, "J_D": 950.7814786233901, "W_D_1KI": 0.40216429510941043, "J_D_1KI": 0.002404426014046457} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.01.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.01.output new file mode 100644 index 0000000..72ef990 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.01.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '5000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.0799260139465332} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 35, 85, ..., 249913, 249959, + 250000]), + col_indices=tensor([ 50, 52, 142, ..., 3906, 4174, 4757]), + values=tensor([0.0913, 0.8215, 0.1970, ..., 0.8521, 0.9478, 0.8405]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.5132, 0.5547, 0.3014, ..., 0.6656, 0.4241, 0.0798]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 0.0799260139465332 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '131371', '-ss', '5000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 8.24699854850769} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 34, 94, ..., 249884, 249936, + 250000]), + col_indices=tensor([ 2, 398, 450, ..., 4930, 4969, 4985]), + values=tensor([0.5923, 0.5022, 0.7915, ..., 0.6018, 0.8801, 0.8622]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.1968, 0.0295, 0.9143, ..., 0.4064, 0.2286, 0.1114]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 8.24699854850769 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '167260', '-ss', '5000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.636158227920532} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 41, 97, ..., 249903, 249957, + 250000]), + col_indices=tensor([ 6, 32, 62, ..., 4630, 4959, 4982]), + values=tensor([0.7649, 0.1722, 0.7795, ..., 0.2616, 0.2192, 0.2761]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.3363, 0.3219, 0.7361, ..., 0.7182, 0.1290, 0.5403]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.636158227920532 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 41, 97, ..., 249903, 249957, + 250000]), + col_indices=tensor([ 6, 32, 62, ..., 4630, 4959, 4982]), + values=tensor([0.7649, 0.1722, 0.7795, ..., 0.2616, 0.2192, 0.2761]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.3363, 0.3219, 0.7361, ..., 0.7182, 0.1290, 0.5403]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.636158227920532 seconds + +[18.39, 18.16, 18.03, 17.97, 18.05, 19.85, 18.01, 18.29, 18.12, 18.31] +[83.74] +14.13465166091919 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 167260, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.636158227920532, 'TIME_S_1KI': 0.0635905669491841, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1183.6357300853729, 'W': 83.74} +[18.39, 18.16, 18.03, 17.97, 18.05, 19.85, 18.01, 18.29, 18.12, 18.31, 17.95, 19.62, 17.88, 18.37, 18.2, 18.02, 18.09, 18.08, 18.42, 17.99] +329.48 +16.474 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 167260, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.636158227920532, 'TIME_S_1KI': 0.0635905669491841, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1183.6357300853729, 'W': 83.74, 'J_1KI': 7.076621607589219, 'W_1KI': 0.5006576587349038, 'W_D': 67.26599999999999, 'J_D': 950.7814786233901, 'W_D_1KI': 0.40216429510941043, 'J_D_1KI': 0.002404426014046457} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.05.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.05.json new file mode 100644 index 0000000..cee5ebd --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 46485, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.648370742797852, "TIME_S_1KI": 0.2290711141830236, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1255.2527691650391, "W": 87.87, "J_1KI": 27.003393980101947, "W_1KI": 1.8902871894159408, "W_D": 71.29950000000001, "J_D": 1018.5375533752442, "W_D_1KI": 1.5338173604388514, "J_D_1KI": 0.03299596343850385} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.05.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.05.output new file mode 100644 index 0000000..cd79c3c --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.05.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '5000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.24121379852294922} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 267, 541, ..., 1249516, + 1249748, 1250000]), + col_indices=tensor([ 43, 75, 121, ..., 4958, 4960, 4986]), + values=tensor([0.9222, 0.1508, 0.6151, ..., 0.6191, 0.5090, 0.9494]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.9528, 0.4494, 0.6520, ..., 0.1607, 0.1619, 0.1321]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 0.24121379852294922 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '43529', '-ss', '5000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 9.83215594291687} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 232, 495, ..., 1249518, + 1249779, 1250000]), + col_indices=tensor([ 48, 77, 155, ..., 4840, 4912, 4927]), + values=tensor([0.7412, 0.4704, 0.5361, ..., 0.0050, 0.3320, 0.0792]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.0891, 0.9943, 0.3145, ..., 0.1784, 0.0363, 0.2532]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 9.83215594291687 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '46485', '-ss', '5000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.648370742797852} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 231, 510, ..., 1249526, + 1249780, 1250000]), + col_indices=tensor([ 32, 71, 112, ..., 4895, 4929, 4940]), + values=tensor([0.5396, 0.2475, 0.0729, ..., 0.2451, 0.2187, 0.9449]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.6746, 0.7318, 0.7509, ..., 0.9415, 0.3905, 0.0197]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.648370742797852 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 231, 510, ..., 1249526, + 1249780, 1250000]), + col_indices=tensor([ 32, 71, 112, ..., 4895, 4929, 4940]), + values=tensor([0.5396, 0.2475, 0.0729, ..., 0.2451, 0.2187, 0.9449]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.6746, 0.7318, 0.7509, ..., 0.9415, 0.3905, 0.0197]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.648370742797852 seconds + +[18.37, 17.89, 18.14, 18.27, 18.28, 18.81, 17.93, 18.0, 17.89, 19.89] +[87.87] +14.28533935546875 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 46485, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.648370742797852, 'TIME_S_1KI': 0.2290711141830236, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1255.2527691650391, 'W': 87.87} +[18.37, 17.89, 18.14, 18.27, 18.28, 18.81, 17.93, 18.0, 17.89, 19.89, 18.31, 18.14, 18.17, 17.98, 21.39, 18.55, 17.96, 18.39, 18.34, 17.99] +331.40999999999997 +16.5705 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 46485, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.648370742797852, 'TIME_S_1KI': 0.2290711141830236, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1255.2527691650391, 'W': 87.87, 'J_1KI': 27.003393980101947, 'W_1KI': 1.8902871894159408, 'W_D': 71.29950000000001, 'J_D': 1018.5375533752442, 'W_D_1KI': 1.5338173604388514, 'J_D_1KI': 0.03299596343850385} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.1.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.1.json new file mode 100644 index 0000000..0fd50b0 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 19767, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.566095352172852, "TIME_S_1KI": 0.5345320661796353, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1281.5856591796876, "W": 87.51, "J_1KI": 64.8346061202857, "W_1KI": 4.427075428744878, "W_D": 71.21225000000001, "J_D": 1042.904792114258, "W_D_1KI": 3.6025825871401835, "J_D_1KI": 0.18225236946123255} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.1.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.1.output new file mode 100644 index 0000000..df7022e --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.1.output @@ -0,0 +1,65 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '5000', '-sd', '0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 0.5311744213104248} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 483, 993, ..., 2498991, + 2499491, 2500000]), + col_indices=tensor([ 15, 20, 28, ..., 4987, 4988, 4995]), + values=tensor([0.8912, 0.6515, 0.2376, ..., 0.2173, 0.7300, 0.9523]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.3817, 0.2295, 0.0793, ..., 0.5917, 0.1851, 0.3088]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 0.5311744213104248 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '19767', '-ss', '5000', '-sd', '0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.566095352172852} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 521, 1020, ..., 2499029, + 2499506, 2500000]), + col_indices=tensor([ 2, 18, 32, ..., 4991, 4992, 4995]), + values=tensor([0.1206, 0.3118, 0.4014, ..., 0.4488, 0.4763, 0.9896]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.0140, 0.3283, 0.7098, ..., 0.4613, 0.1962, 0.1627]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.566095352172852 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 521, 1020, ..., 2499029, + 2499506, 2500000]), + col_indices=tensor([ 2, 18, 32, ..., 4991, 4992, 4995]), + values=tensor([0.1206, 0.3118, 0.4014, ..., 0.4488, 0.4763, 0.9896]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.0140, 0.3283, 0.7098, ..., 0.4613, 0.1962, 0.1627]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.566095352172852 seconds + +[18.51, 17.88, 18.11, 18.97, 18.57, 17.75, 18.11, 17.81, 18.02, 17.72] +[87.51] +14.64501953125 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 19767, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.566095352172852, 'TIME_S_1KI': 0.5345320661796353, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1281.5856591796876, 'W': 87.51} +[18.51, 17.88, 18.11, 18.97, 18.57, 17.75, 18.11, 17.81, 18.02, 17.72, 18.1, 18.14, 18.04, 18.33, 18.06, 17.95, 18.01, 18.09, 18.02, 17.86] +325.95500000000004 +16.29775 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 19767, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.566095352172852, 'TIME_S_1KI': 0.5345320661796353, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1281.5856591796876, 'W': 87.51, 'J_1KI': 64.8346061202857, 'W_1KI': 4.427075428744878, 'W_D': 71.21225000000001, 'J_D': 1042.904792114258, 'W_D_1KI': 3.6025825871401835, 'J_D_1KI': 0.18225236946123255} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_1e-05.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_1e-05.json new file mode 100644 index 0000000..4be18ec --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 355144, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.430037498474121, "TIME_S_1KI": 0.02936847447366173, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 997.3673194527627, "W": 73.03, "J_1KI": 2.808346246741498, "W_1KI": 0.20563489739373325, "W_D": 56.462250000000004, "J_D": 771.1023268899322, "W_D_1KI": 0.15898410222332351, "J_D_1KI": 0.0004476609550585777} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_1e-05.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_1e-05.output new file mode 100644 index 0000000..1e00532 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_1e-05.output @@ -0,0 +1,383 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1000', '-ss', '5000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.07530069351196289} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 249, 249, 250]), + col_indices=tensor([1366, 2183, 387, 4785, 591, 3875, 1782, 3853, 3491, + 1111, 4311, 1391, 2949, 4195, 1174, 98, 1356, 809, + 1785, 447, 2538, 4572, 2460, 1800, 303, 1931, 4013, + 4968, 4004, 1588, 1643, 1967, 3906, 4748, 1447, 2599, + 629, 3538, 4520, 4776, 4758, 2464, 1751, 3806, 96, + 198, 731, 3443, 3712, 4600, 4270, 2744, 4125, 400, + 468, 107, 2682, 4704, 252, 1804, 2511, 1911, 162, + 2509, 972, 3478, 980, 1895, 2935, 3965, 2890, 3988, + 2804, 3654, 1037, 4790, 2965, 394, 3461, 2942, 2671, + 4602, 851, 2319, 1925, 2531, 2262, 2466, 138, 3192, + 4165, 2776, 2205, 2786, 1112, 4160, 4088, 4917, 1466, + 32, 4695, 2757, 3360, 3218, 455, 480, 4012, 3928, + 3689, 1276, 1963, 1058, 3861, 2863, 4421, 4459, 4424, + 4964, 4366, 2158, 3511, 768, 3822, 1025, 3276, 1349, + 1095, 2928, 2660, 1067, 2626, 893, 4611, 4619, 1553, + 2755, 3328, 4431, 1950, 4722, 1972, 4066, 2996, 4851, + 2711, 2693, 4611, 1116, 4304, 1246, 2511, 2934, 4826, + 2926, 3416, 3468, 2846, 4286, 3701, 3015, 2373, 3319, + 2586, 1704, 3671, 1535, 4335, 3487, 2710, 3432, 1408, + 2336, 4517, 3976, 4761, 1747, 150, 3884, 4390, 3319, + 3373, 3574, 3662, 1429, 4058, 1144, 1909, 4439, 1862, + 343, 1833, 2363, 3001, 1926, 4696, 409, 4669, 2313, + 1538, 3220, 3305, 493, 2975, 4619, 1565, 4245, 1991, + 380, 1379, 2494, 2025, 851, 1740, 171, 2270, 2261, + 2794, 4072, 4453, 4823, 695, 669, 3117, 1730, 3920, + 4849, 3714, 1313, 3918, 1033, 1224, 3117, 2450, 3021, + 3892, 3817, 1313, 2580, 4367, 3947, 3099, 4651, 3006, + 4264, 712, 4793, 3855, 4618, 272, 4548]), + values=tensor([0.5356, 0.5172, 0.5088, 0.7213, 0.3478, 0.1053, 0.9439, + 0.9314, 0.4347, 0.5009, 0.9214, 0.0299, 0.2703, 0.5553, + 0.3016, 0.4455, 0.2361, 0.8920, 0.7432, 0.6139, 0.7733, + 0.3556, 0.1748, 0.8314, 0.8776, 0.8348, 0.1485, 0.4702, + 0.4810, 0.8748, 0.6149, 0.8907, 0.9641, 0.0939, 0.1055, + 0.6954, 0.2399, 0.1624, 0.3696, 0.9614, 0.3594, 0.5972, + 0.9819, 0.0645, 0.3543, 0.1275, 0.6800, 0.3878, 0.7605, + 0.6525, 0.7013, 0.5154, 0.4064, 0.1554, 0.5527, 0.2023, + 0.3691, 0.5797, 0.9886, 0.9941, 0.9352, 0.7550, 0.0819, + 0.3616, 0.7623, 0.6193, 0.3361, 0.9681, 0.4246, 0.6029, + 0.5772, 0.0561, 0.2661, 0.5456, 0.2304, 0.3887, 0.2381, + 0.3730, 0.7517, 0.6162, 0.2738, 0.4697, 0.7504, 0.9515, + 0.7210, 0.4160, 0.4959, 0.5300, 0.2485, 0.7381, 0.3695, + 0.4257, 0.1829, 0.0551, 0.7619, 0.8081, 0.4964, 0.4779, + 0.0357, 0.2681, 0.0521, 0.0389, 0.0434, 0.3566, 0.7098, + 0.1066, 0.0800, 0.4058, 0.5388, 0.9446, 0.2771, 0.5488, + 0.8493, 0.4334, 0.8666, 0.8039, 0.2616, 0.8733, 0.8412, + 0.6075, 0.0051, 0.7165, 0.9628, 0.7661, 0.4765, 0.6812, + 0.1095, 0.7697, 0.6192, 0.6769, 0.9349, 0.0052, 0.1322, + 0.1324, 0.9038, 0.2020, 0.6337, 0.8080, 0.2834, 0.0511, + 0.6009, 0.2042, 0.5100, 0.6688, 0.2408, 0.9657, 0.8116, + 0.8985, 0.0972, 0.8199, 0.3158, 0.7270, 0.0200, 0.2146, + 0.9137, 0.0484, 0.2512, 0.2305, 0.1410, 0.9701, 0.3767, + 0.1641, 0.2509, 0.4147, 0.6141, 0.4403, 0.2333, 0.3371, + 0.6103, 0.2630, 0.2671, 0.0768, 0.8063, 0.8867, 0.9092, + 0.7796, 0.9853, 0.4951, 0.2086, 0.4307, 0.0119, 0.1662, + 0.8220, 0.7333, 0.1521, 0.6924, 0.6584, 0.6936, 0.1717, + 0.0561, 0.9517, 0.6184, 0.4753, 0.7656, 0.9019, 0.5502, + 0.9529, 0.5922, 0.4037, 0.0988, 0.7843, 0.0649, 0.2485, + 0.3469, 0.9377, 0.6160, 0.3297, 0.1479, 0.3514, 0.4560, + 0.6809, 0.0681, 0.5510, 0.6925, 0.2032, 0.7181, 0.5101, + 0.1339, 0.8347, 0.2363, 0.9076, 0.1946, 0.5622, 0.8947, + 0.8049, 0.7599, 0.8724, 0.5959, 0.8922, 0.7182, 0.4477, + 0.5685, 0.4980, 0.5565, 0.2995, 0.7747, 0.8395, 0.0020, + 0.6022, 0.0279, 0.4498, 0.0752, 0.1893, 0.3529, 0.6947, + 0.9277, 0.8241, 0.1856, 0.0213, 0.6132]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.4529, 0.0478, 0.6057, ..., 0.4541, 0.9032, 0.3518]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 0.07530069351196289 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '139440', '-ss', '5000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 4.12260627746582} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([4955, 3285, 1092, 4534, 4976, 442, 2522, 4514, 4006, + 1710, 2609, 275, 2553, 192, 68, 4509, 517, 1487, + 4557, 2975, 2588, 4021, 2076, 3240, 3988, 435, 2254, + 2223, 4880, 3865, 3818, 4642, 3945, 4353, 601, 3917, + 1880, 3877, 3791, 4777, 2081, 3917, 4502, 1438, 2426, + 3349, 29, 2250, 3660, 1858, 600, 2889, 2272, 1956, + 751, 3677, 3364, 2676, 4496, 2911, 2638, 552, 4753, + 3313, 3375, 308, 4658, 3893, 1495, 4737, 3323, 2703, + 2397, 4058, 1153, 4577, 3965, 4609, 1999, 4032, 95, + 1807, 3734, 3107, 2958, 2169, 4822, 1527, 3639, 620, + 4908, 4406, 564, 2813, 4923, 3870, 2382, 1337, 4050, + 4071, 2788, 1336, 4894, 4067, 1978, 1895, 498, 3798, + 1258, 549, 714, 3988, 3759, 3303, 1452, 1683, 4641, + 1837, 2644, 1353, 3988, 2550, 2364, 1794, 4541, 4681, + 337, 2800, 2585, 3617, 3880, 1843, 1947, 4694, 2266, + 1169, 161, 1385, 2852, 400, 463, 723, 4116, 753, + 2537, 98, 4403, 28, 338, 2803, 1599, 1013, 1557, + 4407, 177, 1191, 1815, 3966, 3511, 451, 3265, 291, + 1243, 392, 4068, 163, 3991, 4311, 3328, 960, 4017, + 4646, 1831, 817, 2890, 3530, 2708, 719, 2605, 1261, + 4102, 4791, 1478, 1213, 90, 923, 4372, 3587, 2492, + 1793, 3735, 793, 3175, 4362, 3857, 3311, 3724, 615, + 3226, 2202, 4290, 2384, 657, 2313, 1172, 518, 1645, + 899, 4853, 1109, 2856, 2859, 137, 3910, 650, 1455, + 3154, 3652, 1672, 4613, 1991, 246, 2555, 4, 2614, + 2633, 1294, 2903, 1660, 4703, 2866, 3053, 1012, 3045, + 4172, 3476, 296, 4197, 2675, 2071, 2677, 1326, 2255, + 468, 4989, 2355, 4824, 996, 43, 2583]), + values=tensor([0.3090, 0.2901, 0.9593, 0.2041, 0.3894, 0.4919, 0.4096, + 0.8215, 0.1866, 0.7740, 0.2336, 0.6944, 0.1434, 0.9450, + 0.5954, 0.3044, 0.5006, 0.3429, 0.4467, 0.0518, 0.6871, + 0.3725, 0.7034, 0.7486, 0.8746, 0.3907, 0.1517, 0.4997, + 0.1845, 0.7706, 0.6244, 0.6342, 0.6033, 0.6938, 0.2438, + 0.1144, 0.3513, 0.6893, 0.7703, 0.3523, 0.2076, 0.7465, + 0.4913, 0.9688, 0.0028, 0.1578, 0.0568, 0.7822, 0.7028, + 0.3600, 0.2439, 0.4360, 0.7037, 0.4050, 0.8531, 0.5414, + 0.4773, 0.3671, 0.4547, 0.2754, 0.4488, 0.0085, 0.3071, + 0.4601, 0.4770, 0.5158, 0.4421, 0.5651, 0.5805, 0.4433, + 0.3995, 0.5205, 0.7157, 0.7315, 0.6363, 0.9589, 0.7223, + 0.9785, 0.4132, 0.5851, 0.7482, 0.0942, 0.2741, 0.5798, + 0.8967, 0.4132, 0.5974, 0.3338, 0.4602, 0.6811, 0.5641, + 0.0144, 0.5238, 0.0767, 0.8325, 0.0088, 0.0767, 0.2907, + 0.8996, 0.8420, 0.5348, 0.2313, 0.0781, 0.9045, 0.3083, + 0.9636, 0.2543, 0.6828, 0.1620, 0.2858, 0.1124, 0.3208, + 0.6389, 0.9267, 0.6353, 0.0688, 0.9267, 0.9566, 0.7499, + 0.7412, 0.4162, 0.5378, 0.6296, 0.9489, 0.6620, 0.4205, + 0.9920, 0.8509, 0.1746, 0.9154, 0.0320, 0.1367, 0.7287, + 0.4725, 0.2424, 0.3738, 0.1897, 0.9348, 0.6165, 0.7516, + 0.3874, 0.0970, 0.8851, 0.3148, 0.3850, 0.4337, 0.7076, + 0.4992, 0.1955, 0.2344, 0.3528, 0.9558, 0.2944, 0.6120, + 0.9024, 0.3017, 0.3837, 0.0724, 0.3520, 0.1259, 0.2545, + 0.1286, 0.8847, 0.1428, 0.4622, 0.0540, 0.3001, 0.6109, + 0.7042, 0.7070, 0.7848, 0.3801, 0.3847, 0.7723, 0.6446, + 0.9716, 0.3773, 0.8839, 0.4889, 0.3169, 0.6431, 0.7083, + 0.1827, 0.5140, 0.9487, 0.5911, 0.8204, 0.6180, 0.4421, + 0.3128, 0.9545, 0.2240, 0.5569, 0.0329, 0.3919, 0.3248, + 0.2245, 0.3333, 0.9672, 0.9062, 0.0547, 0.3239, 0.2321, + 0.0070, 0.4820, 0.4051, 0.2674, 0.7057, 0.7544, 0.3960, + 0.7548, 0.0492, 0.5769, 0.2071, 0.4627, 0.2573, 0.4606, + 0.6077, 0.9484, 0.5943, 0.5295, 0.3192, 0.6949, 0.6336, + 0.2976, 0.4421, 0.9484, 0.4080, 0.0752, 0.8220, 0.3509, + 0.7514, 0.8530, 0.4354, 0.9063, 0.8031, 0.3178, 0.2957, + 0.6220, 0.2051, 0.4848, 0.8340, 0.8353, 0.5340, 0.0238, + 0.3897, 0.4510, 0.4716, 0.8420, 0.2532]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.2666, 0.0606, 0.0325, ..., 0.3347, 0.5904, 0.3218]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 4.12260627746582 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '355144', '-ss', '5000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.430037498474121} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([ 576, 858, 411, 4414, 2665, 376, 3054, 3322, 4372, + 4279, 4109, 1090, 4955, 1792, 2761, 3831, 3980, 529, + 2775, 617, 4117, 1022, 2357, 2558, 3577, 3578, 3958, + 4584, 2525, 1559, 1731, 3457, 3297, 1685, 2202, 1452, + 491, 1458, 4726, 3575, 1883, 2403, 3952, 4222, 1553, + 2911, 2279, 1175, 2336, 4753, 2819, 4436, 482, 3199, + 4976, 1797, 1610, 3205, 1638, 3687, 4164, 2284, 2312, + 3201, 1175, 2223, 2205, 1659, 1685, 3876, 4867, 4503, + 2508, 1070, 2370, 1257, 578, 3738, 3473, 1417, 2544, + 2056, 4843, 1000, 3228, 4837, 4943, 1171, 1607, 3883, + 537, 4674, 2976, 4953, 4244, 3122, 4003, 2726, 2176, + 3401, 3187, 4115, 3515, 94, 3353, 4307, 545, 4985, + 4583, 3489, 3066, 4121, 3459, 1522, 677, 4486, 147, + 3866, 1597, 3765, 2455, 4064, 1457, 3132, 4642, 3434, + 4882, 2125, 3414, 394, 3741, 3553, 2336, 1556, 4256, + 1078, 4010, 148, 4755, 1924, 2289, 4358, 4904, 1449, + 2494, 2907, 2566, 4673, 214, 1941, 3465, 4474, 2630, + 2169, 4563, 4405, 2613, 3633, 1231, 2935, 3998, 3861, + 1642, 586, 3529, 1226, 3435, 3242, 4352, 3913, 4066, + 3077, 2516, 4422, 1989, 692, 2984, 4096, 402, 4733, + 2105, 3134, 4775, 589, 4044, 4752, 4541, 3171, 2469, + 3653, 4657, 4456, 2233, 2803, 4834, 4936, 3017, 295, + 4978, 3056, 4089, 3884, 2193, 857, 3649, 854, 903, + 28, 3897, 555, 2344, 28, 2417, 2346, 4647, 1068, + 320, 3342, 2217, 2395, 4836, 4346, 3869, 1532, 3168, + 2904, 3224, 1957, 350, 1919, 1414, 1439, 2678, 3944, + 694, 4893, 4079, 3781, 2587, 2843, 2494, 2488, 824, + 1995, 2151, 656, 824, 1220, 2366, 1835]), + values=tensor([3.0955e-01, 7.8676e-01, 4.1266e-01, 3.7203e-01, + 7.1730e-01, 3.7179e-01, 6.0963e-01, 1.0902e-02, + 1.1230e-01, 2.1823e-01, 1.9100e-01, 8.5284e-01, + 3.9664e-01, 7.2699e-01, 1.5904e-01, 3.5501e-01, + 7.5722e-01, 5.6198e-01, 5.1816e-01, 6.4843e-01, + 9.7108e-01, 5.2337e-01, 4.5987e-01, 1.8356e-01, + 3.1359e-01, 2.0336e-01, 9.3922e-01, 6.3176e-01, + 5.5921e-01, 9.2083e-01, 3.8441e-01, 4.1891e-01, + 4.9039e-02, 2.5835e-01, 1.4251e-01, 8.7986e-02, + 1.9179e-01, 4.9636e-02, 9.9221e-01, 8.8195e-01, + 3.6211e-01, 7.7986e-01, 8.8005e-01, 5.3709e-01, + 6.1723e-01, 2.3666e-01, 6.4046e-01, 7.4852e-01, + 8.6162e-01, 6.4736e-02, 6.4638e-01, 6.8790e-01, + 7.7258e-02, 9.2613e-01, 4.5329e-01, 3.8429e-01, + 4.4778e-01, 5.4974e-01, 7.1635e-02, 9.9247e-01, + 6.0152e-01, 9.9716e-01, 7.7326e-02, 6.0941e-01, + 4.9490e-01, 7.1856e-01, 9.5478e-01, 7.3740e-01, + 7.1156e-01, 7.7724e-01, 6.8908e-01, 8.4478e-01, + 5.3169e-01, 3.1838e-01, 6.4893e-01, 3.6731e-01, + 9.6217e-01, 9.5642e-01, 3.3310e-01, 8.0468e-01, + 4.4419e-01, 9.9457e-01, 9.4870e-01, 5.1652e-01, + 2.2471e-01, 4.9478e-02, 7.7952e-01, 3.1317e-01, + 4.6028e-01, 9.9118e-01, 2.1805e-01, 7.6144e-01, + 5.8009e-01, 5.8921e-01, 9.6946e-01, 3.7819e-02, + 8.9083e-01, 3.9045e-01, 4.6997e-01, 7.7548e-01, + 7.6016e-01, 9.9749e-01, 2.2222e-01, 8.7022e-01, + 1.7241e-01, 5.1297e-01, 5.3356e-01, 7.6400e-01, + 4.5765e-01, 9.3983e-01, 7.4746e-01, 2.2337e-02, + 4.6779e-01, 4.1228e-02, 4.0470e-01, 5.8279e-01, + 3.9830e-01, 7.9952e-01, 2.1413e-01, 6.9695e-01, + 8.4451e-01, 7.5133e-01, 6.1979e-01, 1.0235e-01, + 2.3922e-01, 9.7618e-01, 2.7859e-01, 9.1245e-01, + 1.8747e-01, 1.3708e-01, 4.3286e-01, 4.5125e-01, + 7.7463e-01, 6.6460e-01, 4.6171e-01, 5.2632e-01, + 1.3309e-01, 4.8984e-01, 6.6220e-01, 3.7532e-01, + 2.3458e-01, 9.8677e-01, 1.8606e-01, 5.8578e-01, + 2.0218e-01, 8.1884e-01, 1.6790e-01, 8.2955e-01, + 8.0990e-01, 7.9230e-01, 5.7415e-04, 1.5263e-01, + 3.0153e-02, 4.3910e-01, 1.1145e-01, 8.2933e-01, + 4.2403e-01, 9.4143e-01, 1.1893e-01, 2.2950e-01, + 4.0652e-01, 5.3859e-02, 3.4042e-01, 3.0550e-01, + 7.4631e-01, 2.0289e-01, 2.7832e-01, 9.2428e-02, + 8.1994e-01, 6.1876e-01, 8.1655e-01, 3.3884e-01, + 8.1926e-01, 3.0647e-01, 2.5277e-02, 6.7292e-01, + 6.3249e-01, 3.0699e-01, 8.3683e-02, 1.1258e-01, + 5.7451e-01, 9.9511e-01, 3.5203e-01, 6.1419e-01, + 7.8849e-01, 2.6274e-01, 6.6338e-01, 2.1944e-01, + 5.0745e-01, 9.4340e-02, 4.8396e-02, 5.6132e-01, + 9.5395e-01, 7.8119e-01, 2.9298e-01, 9.8647e-01, + 4.1870e-03, 7.2546e-01, 1.3543e-01, 1.4547e-01, + 9.5808e-01, 3.2689e-01, 3.3868e-01, 4.7652e-01, + 8.8370e-01, 6.0302e-01, 7.9645e-01, 6.6784e-01, + 5.1333e-01, 1.1003e-01, 1.8848e-01, 9.5891e-01, + 5.8130e-01, 8.9461e-01, 5.9679e-01, 7.2510e-01, + 6.8221e-01, 6.6161e-01, 2.4940e-01, 6.6307e-01, + 2.4001e-02, 4.4766e-02, 2.4703e-01, 5.2095e-02, + 8.5216e-01, 3.2978e-01, 6.8601e-01, 2.3333e-01, + 6.2542e-01, 6.6716e-01, 6.3532e-01, 9.7031e-01, + 2.6179e-01, 5.9241e-01, 6.1379e-01, 8.7532e-01, + 5.8130e-01, 3.7637e-01, 4.6468e-01, 2.0496e-01, + 7.4431e-01, 7.1477e-02, 8.7938e-01, 4.5946e-01, + 4.6023e-01, 7.9786e-01, 2.4383e-01, 3.7799e-01, + 1.9335e-01, 7.4334e-01]), size=(5000, 5000), nnz=250, + layout=torch.sparse_csr) +tensor([0.5879, 0.8514, 0.6272, ..., 0.2435, 0.3582, 0.3734]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.430037498474121 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([ 576, 858, 411, 4414, 2665, 376, 3054, 3322, 4372, + 4279, 4109, 1090, 4955, 1792, 2761, 3831, 3980, 529, + 2775, 617, 4117, 1022, 2357, 2558, 3577, 3578, 3958, + 4584, 2525, 1559, 1731, 3457, 3297, 1685, 2202, 1452, + 491, 1458, 4726, 3575, 1883, 2403, 3952, 4222, 1553, + 2911, 2279, 1175, 2336, 4753, 2819, 4436, 482, 3199, + 4976, 1797, 1610, 3205, 1638, 3687, 4164, 2284, 2312, + 3201, 1175, 2223, 2205, 1659, 1685, 3876, 4867, 4503, + 2508, 1070, 2370, 1257, 578, 3738, 3473, 1417, 2544, + 2056, 4843, 1000, 3228, 4837, 4943, 1171, 1607, 3883, + 537, 4674, 2976, 4953, 4244, 3122, 4003, 2726, 2176, + 3401, 3187, 4115, 3515, 94, 3353, 4307, 545, 4985, + 4583, 3489, 3066, 4121, 3459, 1522, 677, 4486, 147, + 3866, 1597, 3765, 2455, 4064, 1457, 3132, 4642, 3434, + 4882, 2125, 3414, 394, 3741, 3553, 2336, 1556, 4256, + 1078, 4010, 148, 4755, 1924, 2289, 4358, 4904, 1449, + 2494, 2907, 2566, 4673, 214, 1941, 3465, 4474, 2630, + 2169, 4563, 4405, 2613, 3633, 1231, 2935, 3998, 3861, + 1642, 586, 3529, 1226, 3435, 3242, 4352, 3913, 4066, + 3077, 2516, 4422, 1989, 692, 2984, 4096, 402, 4733, + 2105, 3134, 4775, 589, 4044, 4752, 4541, 3171, 2469, + 3653, 4657, 4456, 2233, 2803, 4834, 4936, 3017, 295, + 4978, 3056, 4089, 3884, 2193, 857, 3649, 854, 903, + 28, 3897, 555, 2344, 28, 2417, 2346, 4647, 1068, + 320, 3342, 2217, 2395, 4836, 4346, 3869, 1532, 3168, + 2904, 3224, 1957, 350, 1919, 1414, 1439, 2678, 3944, + 694, 4893, 4079, 3781, 2587, 2843, 2494, 2488, 824, + 1995, 2151, 656, 824, 1220, 2366, 1835]), + values=tensor([3.0955e-01, 7.8676e-01, 4.1266e-01, 3.7203e-01, + 7.1730e-01, 3.7179e-01, 6.0963e-01, 1.0902e-02, + 1.1230e-01, 2.1823e-01, 1.9100e-01, 8.5284e-01, + 3.9664e-01, 7.2699e-01, 1.5904e-01, 3.5501e-01, + 7.5722e-01, 5.6198e-01, 5.1816e-01, 6.4843e-01, + 9.7108e-01, 5.2337e-01, 4.5987e-01, 1.8356e-01, + 3.1359e-01, 2.0336e-01, 9.3922e-01, 6.3176e-01, + 5.5921e-01, 9.2083e-01, 3.8441e-01, 4.1891e-01, + 4.9039e-02, 2.5835e-01, 1.4251e-01, 8.7986e-02, + 1.9179e-01, 4.9636e-02, 9.9221e-01, 8.8195e-01, + 3.6211e-01, 7.7986e-01, 8.8005e-01, 5.3709e-01, + 6.1723e-01, 2.3666e-01, 6.4046e-01, 7.4852e-01, + 8.6162e-01, 6.4736e-02, 6.4638e-01, 6.8790e-01, + 7.7258e-02, 9.2613e-01, 4.5329e-01, 3.8429e-01, + 4.4778e-01, 5.4974e-01, 7.1635e-02, 9.9247e-01, + 6.0152e-01, 9.9716e-01, 7.7326e-02, 6.0941e-01, + 4.9490e-01, 7.1856e-01, 9.5478e-01, 7.3740e-01, + 7.1156e-01, 7.7724e-01, 6.8908e-01, 8.4478e-01, + 5.3169e-01, 3.1838e-01, 6.4893e-01, 3.6731e-01, + 9.6217e-01, 9.5642e-01, 3.3310e-01, 8.0468e-01, + 4.4419e-01, 9.9457e-01, 9.4870e-01, 5.1652e-01, + 2.2471e-01, 4.9478e-02, 7.7952e-01, 3.1317e-01, + 4.6028e-01, 9.9118e-01, 2.1805e-01, 7.6144e-01, + 5.8009e-01, 5.8921e-01, 9.6946e-01, 3.7819e-02, + 8.9083e-01, 3.9045e-01, 4.6997e-01, 7.7548e-01, + 7.6016e-01, 9.9749e-01, 2.2222e-01, 8.7022e-01, + 1.7241e-01, 5.1297e-01, 5.3356e-01, 7.6400e-01, + 4.5765e-01, 9.3983e-01, 7.4746e-01, 2.2337e-02, + 4.6779e-01, 4.1228e-02, 4.0470e-01, 5.8279e-01, + 3.9830e-01, 7.9952e-01, 2.1413e-01, 6.9695e-01, + 8.4451e-01, 7.5133e-01, 6.1979e-01, 1.0235e-01, + 2.3922e-01, 9.7618e-01, 2.7859e-01, 9.1245e-01, + 1.8747e-01, 1.3708e-01, 4.3286e-01, 4.5125e-01, + 7.7463e-01, 6.6460e-01, 4.6171e-01, 5.2632e-01, + 1.3309e-01, 4.8984e-01, 6.6220e-01, 3.7532e-01, + 2.3458e-01, 9.8677e-01, 1.8606e-01, 5.8578e-01, + 2.0218e-01, 8.1884e-01, 1.6790e-01, 8.2955e-01, + 8.0990e-01, 7.9230e-01, 5.7415e-04, 1.5263e-01, + 3.0153e-02, 4.3910e-01, 1.1145e-01, 8.2933e-01, + 4.2403e-01, 9.4143e-01, 1.1893e-01, 2.2950e-01, + 4.0652e-01, 5.3859e-02, 3.4042e-01, 3.0550e-01, + 7.4631e-01, 2.0289e-01, 2.7832e-01, 9.2428e-02, + 8.1994e-01, 6.1876e-01, 8.1655e-01, 3.3884e-01, + 8.1926e-01, 3.0647e-01, 2.5277e-02, 6.7292e-01, + 6.3249e-01, 3.0699e-01, 8.3683e-02, 1.1258e-01, + 5.7451e-01, 9.9511e-01, 3.5203e-01, 6.1419e-01, + 7.8849e-01, 2.6274e-01, 6.6338e-01, 2.1944e-01, + 5.0745e-01, 9.4340e-02, 4.8396e-02, 5.6132e-01, + 9.5395e-01, 7.8119e-01, 2.9298e-01, 9.8647e-01, + 4.1870e-03, 7.2546e-01, 1.3543e-01, 1.4547e-01, + 9.5808e-01, 3.2689e-01, 3.3868e-01, 4.7652e-01, + 8.8370e-01, 6.0302e-01, 7.9645e-01, 6.6784e-01, + 5.1333e-01, 1.1003e-01, 1.8848e-01, 9.5891e-01, + 5.8130e-01, 8.9461e-01, 5.9679e-01, 7.2510e-01, + 6.8221e-01, 6.6161e-01, 2.4940e-01, 6.6307e-01, + 2.4001e-02, 4.4766e-02, 2.4703e-01, 5.2095e-02, + 8.5216e-01, 3.2978e-01, 6.8601e-01, 2.3333e-01, + 6.2542e-01, 6.6716e-01, 6.3532e-01, 9.7031e-01, + 2.6179e-01, 5.9241e-01, 6.1379e-01, 8.7532e-01, + 5.8130e-01, 3.7637e-01, 4.6468e-01, 2.0496e-01, + 7.4431e-01, 7.1477e-02, 8.7938e-01, 4.5946e-01, + 4.6023e-01, 7.9786e-01, 2.4383e-01, 3.7799e-01, + 1.9335e-01, 7.4334e-01]), size=(5000, 5000), nnz=250, + layout=torch.sparse_csr) +tensor([0.5879, 0.8514, 0.6272, ..., 0.2435, 0.3582, 0.3734]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.430037498474121 seconds + +[18.28, 21.02, 18.58, 17.81, 18.18, 17.91, 18.02, 17.91, 18.05, 17.86] +[73.03] +13.656953573226929 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 355144, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.430037498474121, 'TIME_S_1KI': 0.02936847447366173, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 997.3673194527627, 'W': 73.03} +[18.28, 21.02, 18.58, 17.81, 18.18, 17.91, 18.02, 17.91, 18.05, 17.86, 18.18, 17.95, 18.17, 18.08, 18.7, 17.95, 18.01, 17.92, 21.06, 17.75] +331.35499999999996 +16.567749999999997 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 355144, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.430037498474121, 'TIME_S_1KI': 0.02936847447366173, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 997.3673194527627, 'W': 73.03, 'J_1KI': 2.808346246741498, 'W_1KI': 0.20563489739373325, 'W_D': 56.462250000000004, 'J_D': 771.1023268899322, 'W_D_1KI': 0.15898410222332351, 'J_D_1KI': 0.0004476609550585777} diff --git a/pytorch/synthetic_sizes b/pytorch/synthetic_sizes index fc7c6bc..5b351e3 100644 --- a/pytorch/synthetic_sizes +++ b/pytorch/synthetic_sizes @@ -1,5 +1,5 @@ +5000 10000 50000 100000 500000 -1000000