72 #include "petscversion.h" 697 INTEGER(INTG),
INTENT(IN) ::
VALUE 698 INTEGER(INTG),
INTENT(OUT) :: ERR
703 enters(
"DISTRIBUTED_MATRIX_ALL_VALUES_SET_INTG",err,error,*999)
705 IF(
ASSOCIATED(distributed_matrix))
THEN 706 IF(distributed_matrix%MATRIX_FINISHED)
THEN 707 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
709 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 712 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
715 CALL flagerror(
"Cannot set all values for an integer PETSc distributed matrix.",err,error,*999)
717 local_error=
"The distributed matrix library type of "// &
719 CALL flagerror(local_error,err,error,*999)
722 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
725 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
728 exits(
"DISTRIBUTED_MATRIX_ALL_VALUES_SET_INTG")
730 999 errorsexits(
"DISTRIBUTED_MATRIX_ALL_VALUES_SET_INTG",err,error)
743 REAL(SP),
INTENT(IN) ::
VALUE 744 INTEGER(INTG),
INTENT(OUT) :: ERR
749 enters(
"DISTRIBUTED_MATRIX_ALL_VALUES_SET_SP",err,error,*999)
751 IF(
ASSOCIATED(distributed_matrix))
THEN 752 IF(distributed_matrix%MATRIX_FINISHED)
THEN 753 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
755 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 758 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
761 CALL flagerror(
"Cannot set all values for a single precision PETSc distributed matrix.",err,error,*999)
763 local_error=
"The distributed matrix library type of "// &
765 CALL flagerror(local_error,err,error,*999)
768 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
771 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
774 exits(
"DISTRIBUTED_MATRIX_ALL_VALUES_SET_SP")
776 999 errorsexits(
"DISTRIBUTED_MATRIX_ALL_VALUES_SET_SP",err,error)
789 REAL(DP),
INTENT(IN) ::
VALUE 790 INTEGER(INTG),
INTENT(OUT) :: ERR
795 enters(
"DISTRIBUTED_MATRIX_ALL_VALUES_SET_DP",err,error,*999)
797 IF(
ASSOCIATED(distributed_matrix))
THEN 798 IF(distributed_matrix%MATRIX_FINISHED)
THEN 799 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
801 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 804 CALL flagerror(
"Distributed matrix cmiss is not associated.",err,error,*999)
807 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 808 IF(abs(
VALUE)<=zero_tolerance)
THEN 809 IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX)
THEN 815 CALL flagerror(
"Not implemented.",err,error,*999)
818 CALL flagerror(
"Distributed matrix petsc is not associated.",err,error,*999)
821 local_error=
"The distributed matrix library type of "// &
823 CALL flagerror(local_error,err,error,*999)
826 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
829 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
832 exits(
"DISTRIBUTED_MATRIX_ALL_VALUES_SET_DP")
834 999 errorsexits(
"DISTRIBUTED_MATRIX_ALL_VALUES_SET_DP",err,error)
847 LOGICAL,
INTENT(IN) ::
VALUE 848 INTEGER(INTG),
INTENT(OUT) :: ERR
853 enters(
"DISTRIBUTED_MATRIX_ALL_VALUES_SET_L",err,error,*999)
855 IF(
ASSOCIATED(distributed_matrix))
THEN 856 IF(distributed_matrix%MATRIX_FINISHED)
THEN 857 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
859 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 862 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
865 CALL flagerror(
"Cannot set all values for a logical PETSc distributed matrix.",err,error,*999)
867 local_error=
"The distributed matrix library type of "// &
869 CALL flagerror(local_error,err,error,*999)
872 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
875 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
878 exits(
"DISTRIBUTED_MATRIX_ALL_VALUES_SET_L")
880 999 errorsexits(
"DISTRIBUTED_MATRIX_ALL_VALUES_SET_L",err,error)
893 INTEGER(INTG),
INTENT(OUT) :: ERR
897 INTEGER(INTG) :: DUMMY_ERR
900 enters(
"DISTRIBUTED_MATRIX_CMISS_CREATE_FINISH",err,error,*998)
902 IF(
ASSOCIATED(cmiss_matrix))
THEN 904 domain_mapping=>cmiss_matrix%DISTRIBUTED_MATRIX%ROW_DOMAIN_MAPPING
905 IF(
ASSOCIATED(domain_mapping))
THEN 906 IF(domain_mapping%NUMBER_OF_DOMAINS==1)
THEN 910 & domain_mapping%ADJACENT_DOMAINS_PTR(domain_mapping%NUMBER_OF_DOMAINS)
914 CALL flagerror(
"Distributed matrix row domain mapping is not associated.",err,error,*998)
917 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*998)
920 exits(
"DISTRIBUTED_MATRIX_CMISS_CREATE_FINISH")
923 998 errorsexits(
"DISTRIBUTED_MATRIX_CMISS_CREATE_FINISH",err,error)
936 INTEGER(INTG),
INTENT(OUT) :: ERR
940 enters(
"DISTRIBUTED_MATRIX_CMISS_FINALISE",err,error,*999)
942 IF(
ASSOCIATED(cmiss_matrix))
THEN 944 DEALLOCATE(cmiss_matrix)
947 exits(
"DISTRIBUTED_MATRIX_CMISS_FINALSE")
949 999 errorsexits(
"DISTRIBUTED_MATRIX_CMISS_FINALISE",err,error)
962 INTEGER(INTG),
INTENT(OUT) :: ERR
965 INTEGER(INTG) :: DUMMY_ERR
969 enters(
"DISTRIBUTED_MATRIX_CMISS_INITIALISE",err,error,*998)
971 IF(
ASSOCIATED(distributed_matrix))
THEN 972 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 973 CALL flagerror(
"CMISS is already associated for this distributed matrix.",err,error,*998)
975 row_domain_mapping=>distributed_matrix%ROW_DOMAIN_MAPPING
976 column_domain_mapping=>distributed_matrix%COLUMN_DOMAIN_MAPPING
977 IF(
ASSOCIATED(row_domain_mapping))
THEN 978 IF(
ASSOCIATED(column_domain_mapping))
THEN 979 ALLOCATE(distributed_matrix%CMISS,stat=err)
980 IF(err/=0)
CALL flagerror(
"Could not allocate CMISS distributed matrix.",err,error,*999)
981 distributed_matrix%CMISS%DISTRIBUTED_MATRIX=>distributed_matrix
983 NULLIFY(distributed_matrix%CMISS%MATRIX)
988 SELECT CASE(distributed_matrix%GHOSTING_TYPE)
990 CALL matrix_size_set(distributed_matrix%CMISS%MATRIX,row_domain_mapping%TOTAL_NUMBER_OF_LOCAL, &
991 & column_domain_mapping%NUMBER_OF_GLOBAL,err,error,*999)
993 CALL matrix_size_set(distributed_matrix%CMISS%MATRIX,row_domain_mapping%NUMBER_OF_LOCAL, &
994 & column_domain_mapping%NUMBER_OF_GLOBAL,err,error,*999)
996 local_error=
"The distributed matrix ghosting type of "// &
998 CALL flagerror(local_error,err,error,*999)
1001 CALL flagerror(
"Distributed matrix column domain mapping is not associated.",err,error,*998)
1004 CALL flagerror(
"Distributed matrix row domain mapping is not associated.",err,error,*998)
1008 CALL flagerror(
"Distributed matrix is not associated.",err,error,*998)
1011 exits(
"DISTRIBUTED_MATRIX_CMISS_INITIALSE")
1013 999
IF(
ASSOCIATED(distributed_matrix%CMISS)) &
1015 998 errorsexits(
"DISTRIBUTED_MATRIX_CMISS_INITIALISE",err,error)
1028 INTEGER(INTG),
INTENT(OUT) :: ERR
1031 INTEGER(INTG) :: DUMMY_ERR
1034 enters(
"DISTRIBUTED_MATRIX_CREATE_FINISH",err,error,*998)
1036 IF(
ASSOCIATED(distributed_matrix))
THEN 1037 IF(distributed_matrix%MATRIX_FINISHED)
THEN 1038 CALL flagerror(
"The distributed matrix has been finished.",err,error,*998)
1040 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1046 local_error=
"The distributed matrix library type of "// &
1048 CALL flagerror(local_error,err,error,*999)
1050 distributed_matrix%MATRIX_FINISHED=.true.
1053 CALL flagerror(
"Distributed matrix is not associated.",err,error,*998)
1056 exits(
"DISTRIBUTED_MATRIX_CREATE_FINISH")
1059 998 errorsexits(
"DISTRIBUTED_MATRIX_CREATE_FINISH",err,error)
1074 INTEGER(INTG),
INTENT(OUT) :: ERR
1077 INTEGER(INTG) :: DUMMY_ERR
1080 enters(
"DISTRIBUTED_MATRIX_CREATE_START",err,error,*998)
1082 IF(
ASSOCIATED(row_domain_mapping))
THEN 1083 IF(
ASSOCIATED(column_domain_mapping))
THEN 1084 IF(
ASSOCIATED(distributed_matrix))
THEN 1085 CALL flagerror(
"Distributed matrix is already associated.",err,error,*998)
1087 IF(row_domain_mapping%NUMBER_OF_DOMAINS==column_domain_mapping%NUMBER_OF_DOMAINS)
THEN 1091 local_error=
"The number of domains in the row domain mapping ("// &
1093 &
") does not match the number of domains in the column domain mapping ("// &
1095 CALL flagerror(local_error,err,error,*999)
1099 CALL flagerror(
"Column domain mapping is not associated.",err,error,*999)
1102 CALL flagerror(
"Row domain mapping is not associated.",err,error,*998)
1105 exits(
"DISTRIBUTED_MATRIX_CREATE_START")
1108 998 errorsexits(
"DISTRIBUTED_MATRIX_CREATE_START",err,error)
1121 INTEGER(INTG),
POINTER :: DATA(:)
1122 INTEGER(INTG),
INTENT(OUT) :: ERR
1127 enters(
"DISTRIBUTED_MATRIX_DATA_GET_INTG",err,error,*999)
1129 IF(
ASSOCIATED(distributed_matrix))
THEN 1130 IF(
ASSOCIATED(data))
THEN 1131 CALL flagerror(
"Data is already associated",err,error,*999)
1134 IF(distributed_matrix%MATRIX_FINISHED)
THEN 1135 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1137 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 1138 CALL matrix_data_get(distributed_matrix%CMISS%MATRIX,
DATA,err,error,*999)
1140 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
1143 CALL flagerror(
"Cannot get data for an integer PETSc distributed matrix.",err,error,*999)
1145 local_error=
"The distributed matrix library type of "// &
1147 CALL flagerror(local_error,err,error,*999)
1150 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
1154 CALL flagerror(
"Distributed matrix is not associated",err,error,*999)
1157 exits(
"DISTRIBUTED_MATRIX_DATA_GET_INTG")
1159 999 errorsexits(
"DISTRIBUTED_MATRIX_DATA_GET_INTG",err,error)
1172 REAL(SP),
POINTER :: DATA(:)
1173 INTEGER(INTG),
INTENT(OUT) :: ERR
1178 enters(
"DISTRIBUTED_MATRIX_DATA_GET_SP",err,error,*999)
1180 IF(
ASSOCIATED(distributed_matrix))
THEN 1181 IF(
ASSOCIATED(data))
THEN 1182 CALL flagerror(
"Data is already associated.",err,error,*999)
1185 IF(distributed_matrix%MATRIX_FINISHED)
THEN 1186 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1188 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 1189 CALL matrix_data_get(distributed_matrix%CMISS%MATRIX,
DATA,err,error,*999)
1191 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
1194 CALL flagerror(
"Cannot get data for a single precision PETSc distributed matrix.",err,error,*999)
1196 local_error=
"The distributed matrix library type of "// &
1198 CALL flagerror(local_error,err,error,*999)
1201 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
1205 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
1208 exits(
"DISTRIBUTED_MATRIX_DATA_GET_SP")
1210 999 errorsexits(
"DISTRIBUTED_MATRIX_DATA_GET_SP",err,error)
1223 REAL(DP),
POINTER :: DATA(:)
1224 INTEGER(INTG),
INTENT(OUT) :: ERR
1227 REAL(DP),
POINTER :: petscData(:,:)
1229 TYPE(c_ptr) :: TEMP_ME
1231 enters(
"DISTRIBUTED_MATRIX_DATA_GET_DP",err,error,*999)
1233 IF(
ASSOCIATED(distributed_matrix))
THEN 1234 IF(
ASSOCIATED(data))
THEN 1235 CALL flagerror(
"Data is already associated.",err,error,*999)
1238 IF(distributed_matrix%MATRIX_FINISHED)
THEN 1239 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1241 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 1242 CALL matrix_data_get(distributed_matrix%CMISS%MATRIX,
DATA,err,error,*999)
1244 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
1247 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 1248 IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX)
THEN 1249 SELECT CASE(distributed_matrix%PETSC%STORAGE_TYPE)
1253 CALL flagerror(
"Diagonal storage is not implemented for PETSc matrices.",err,error,*999)
1255 CALL flagerror(
"Column major storage is not implemented for PETSc matrices.",err,error,*999)
1257 CALL flagerror(
"Row major storage is not implemented for PETSc matrices.",err,error,*999)
1261 CALL flagerror(
"Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
1263 CALL flagerror(
"Row column storage is not implemented for PETSc matrices.",err,error,*999)
1266 & distributed_matrix%PETSC%STORAGE_TYPE,
"*",err,error))//
" is invalid." 1267 CALL flagerror(local_error,err,error,*999)
1270 SELECT CASE(distributed_matrix%PETSC%STORAGE_TYPE)
1274 CALL flagerror(
"Diagonal storage is not implemented for PETSc matrices.",err,error,*999)
1276 CALL flagerror(
"Column major storage is not implemented for PETSc matrices.",err,error,*999)
1278 CALL flagerror(
"Row major storage is not implemented for PETSc matrices.",err,error,*999)
1282 CALL flagerror(
"Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
1284 CALL flagerror(
"Row column storage is not implemented for PETSc matrices.",err,error,*999)
1287 & distributed_matrix%PETSC%STORAGE_TYPE,
"*",err,error))//
" is invalid." 1288 CALL flagerror(local_error,err,error,*999)
1293 SELECT CASE(distributed_matrix%PETSC%STORAGE_TYPE)
1295 temp_me = c_loc(petscdata(1,1))
1296 CALL c_f_pointer(temp_me,
DATA,[distributed_matrix%PETSC%M*distributed_matrix%PETSC%N])
1298 CALL flagerror(
"Diagonal storage is not implemented for PETSc matrices.",err,error,*999)
1300 CALL flagerror(
"Column major storage is not implemented for PETSc matrices.",err,error,*999)
1302 CALL flagerror(
"Row major storage is not implemented for PETSc matrices.",err,error,*999)
1307 temp_me = c_loc(petscdata(1,1))
1308 CALL c_f_pointer(temp_me,
DATA,[distributed_matrix%PETSC%NUMBER_NON_ZEROS])
1310 CALL flagerror(
"Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
1312 CALL flagerror(
"Row column storage is not implemented for PETSc matrices.",err,error,*999)
1315 & distributed_matrix%PETSC%STORAGE_TYPE,
"*",err,error))//
" is invalid." 1316 CALL flagerror(local_error,err,error,*999)
1319 CALL flagerror(
"Distributed matris PETSc is not associated.",err,error,*999)
1322 local_error=
"The distributed matrix library type of "// &
1324 CALL flagerror(local_error,err,error,*999)
1327 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
1331 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
1334 exits(
"DISTRIBUTED_MATRIX_DATA_GET_DP")
1336 999 errorsexits(
"DISTRIBUTED_MATRIX_DATA_GET_DP",err,error)
1350 LOGICAL,
POINTER :: DATA(:)
1351 INTEGER(INTG),
INTENT(OUT) :: ERR
1356 enters(
"DISTRIBUTED_MATRIX_DATA_GET_L",err,error,*999)
1358 IF(
ASSOCIATED(distributed_matrix))
THEN 1359 IF(
ASSOCIATED(data))
THEN 1360 CALL flagerror(
"Data is already associated",err,error,*999)
1363 IF(distributed_matrix%MATRIX_FINISHED)
THEN 1364 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1366 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 1367 CALL matrix_data_get(distributed_matrix%CMISS%MATRIX,
DATA,err,error,*999)
1369 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
1372 CALL flagerror(
"Cannot get data for a logical PETSc distributed matrix.",err,error,*999)
1374 local_error=
"The distributed matrix library type of "// &
1376 CALL flagerror(local_error,err,error,*999)
1379 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
1383 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
1386 exits(
"DISTRIBUTED_MATRIX_DATA_GET_L")
1388 999 errorsexits(
"DISTRIBUTED_MATRIX_DATA_GET_L",err,error)
1401 INTEGER(INTG),
POINTER :: DATA(:)
1402 INTEGER(INTG),
INTENT(OUT) :: ERR
1407 enters(
"DISTRIBUTED_MATRIX_DATA_RESTORE_INTG",err,error,*999)
1409 IF(
ASSOCIATED(distributed_matrix))
THEN 1410 IF(
ASSOCIATED(data))
THEN 1411 IF(distributed_matrix%MATRIX_FINISHED)
THEN 1412 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1416 CALL flagerror(
"Cannot restore data for an integer PETSc distributed matrix.",err,error,*999)
1418 local_error=
"The distributed matrix library type of "// &
1420 CALL flagerror(local_error,err,error,*999)
1423 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
1426 CALL flagerror(
"Data is not associated.",err,error,*999)
1429 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
1432 exits(
"DISTRIBUTED_MATRIX_DATA_RESTORE_INTG")
1434 999 errorsexits(
"DISTRIBUTED_MATRIX_DATA_RESTORE_INTG",err,error)
1447 REAL(SP),
POINTER :: DATA(:)
1448 INTEGER(INTG),
INTENT(OUT) :: ERR
1453 enters(
"DISTRIBUTED_MATRIX_DATA_RESTORE_SP",err,error,*999)
1455 IF(
ASSOCIATED(distributed_matrix))
THEN 1456 IF(
ASSOCIATED(data))
THEN 1457 IF(distributed_matrix%MATRIX_FINISHED)
THEN 1458 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1462 CALL flagerror(
"Cannot restore data for a single precision PETSc distributed matrix.",err,error,*999)
1464 local_error=
"The distributed matrix library type of "// &
1466 CALL flagerror(local_error,err,error,*999)
1469 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
1472 CALL flagerror(
"Data is not associated.",err,error,*999)
1475 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
1478 exits(
"DISTRIBUTED_MATRIX_DATA_RESTORE_SP")
1480 999 errorsexits(
"DISTRIBUTED_MATRIX_DATA_RESTORE_SP",err,error)
1493 REAL(DP),
POINTER :: DATA(:)
1494 INTEGER(INTG),
INTENT(OUT) :: ERR
1497 REAL(DP),
POINTER :: petscData(:,:)
1499 TYPE(c_ptr) :: TEMP_ME
1501 enters(
"DISTRIBUTED_MATRIX_DATA_RESTORE_DP",err,error,*999)
1503 IF(
ASSOCIATED(distributed_matrix))
THEN 1504 IF(
ASSOCIATED(data))
THEN 1505 IF(distributed_matrix%MATRIX_FINISHED)
THEN 1506 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1510 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 1511 SELECT CASE(distributed_matrix%PETSC%STORAGE_TYPE)
1514 temp_me = c_loc(
DATA(1))
1515 CALL c_f_pointer(temp_me,petscdata,[distributed_matrix%PETSC%M,distributed_matrix%PETSC%N])
1517 CALL flagerror(
"Diagonal storage is not implemented for PETSc matrices.",err,error,*999)
1519 CALL flagerror(
"Column major storage is not implemented for PETSc matrices.",err,error,*999)
1521 CALL flagerror(
"Row major storage is not implemented for PETSc matrices.",err,error,*999)
1525 temp_me = c_loc(
DATA(1))
1526 CALL c_f_pointer(temp_me,petscdata,[distributed_matrix%PETSC%M,distributed_matrix%PETSC%N])
1528 CALL flagerror(
"Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
1530 CALL flagerror(
"Row column storage is not implemented for PETSc matrices.",err,error,*999)
1533 & distributed_matrix%PETSC%STORAGE_TYPE,
"*",err,error))//
" is invalid." 1534 CALL flagerror(local_error,err,error,*999)
1536 IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX)
THEN 1537 SELECT CASE(distributed_matrix%PETSC%STORAGE_TYPE)
1541 CALL flagerror(
"Diagonal storage is not implemented for PETSc matrices.",err,error,*999)
1543 CALL flagerror(
"Column major storage is not implemented for PETSc matrices.",err,error,*999)
1545 CALL flagerror(
"Row major storage is not implemented for PETSc matrices.",err,error,*999)
1549 CALL flagerror(
"Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
1551 CALL flagerror(
"Row column storage is not implemented for PETSc matrices.",err,error,*999)
1554 & distributed_matrix%PETSC%STORAGE_TYPE,
"*",err,error))//
" is invalid." 1555 CALL flagerror(local_error,err,error,*999)
1558 SELECT CASE(distributed_matrix%PETSC%STORAGE_TYPE)
1562 CALL flagerror(
"Diagonal storage is not implemented for PETSc matrices.",err,error,*999)
1564 CALL flagerror(
"Column major storage is not implemented for PETSc matrices.",err,error,*999)
1566 CALL flagerror(
"Row major storage is not implemented for PETSc matrices.",err,error,*999)
1570 CALL flagerror(
"Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
1572 CALL flagerror(
"Row column storage is not implemented for PETSc matrices.",err,error,*999)
1575 & distributed_matrix%PETSC%STORAGE_TYPE,
"*",err,error))//
" is invalid." 1576 CALL flagerror(local_error,err,error,*999)
1580 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
1583 local_error=
"The distributed matrix library type of "// &
1585 CALL flagerror(local_error,err,error,*999)
1588 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
1591 CALL flagerror(
"Data is not associated.",err,error,*999)
1594 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
1597 exits(
"DISTRIBUTED_MATRIX_DATA_RESTORE_DP")
1599 999 errorsexits(
"DISTRIBUTED_MATRIX_DATA_RESTORE_DP",err,error)
1613 LOGICAL,
POINTER :: DATA(:)
1614 INTEGER(INTG),
INTENT(OUT) :: ERR
1619 enters(
"DISTRIBUTED_MATRIX_DATA_RESTORE_L",err,error,*999)
1621 IF(
ASSOCIATED(distributed_matrix))
THEN 1622 IF(
ASSOCIATED(data))
THEN 1623 IF(distributed_matrix%MATRIX_FINISHED)
THEN 1624 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1628 CALL flagerror(
"Cannot restore data for a logical PETSc distributed matrix.",err,error,*999)
1630 local_error=
"The distributed matrix library type of "// &
1632 CALL flagerror(local_error,err,error,*999)
1635 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
1638 CALL flagerror(
"Data is not associated.",err,error,*999)
1641 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
1644 exits(
"DISTRIBUTED_MATRIX_DATA_RESTORE_L")
1646 999 errorsexits(
"DISTRIBUTED_MATRIX_DATA_RESTORE_L",err,error)
1659 INTEGER(INTG),
INTENT(OUT) :: dataType
1660 INTEGER(INTG),
INTENT(OUT) :: err
1663 enters(
"DistributedMatrix_DataTypeGet",err,error,*999)
1665 IF(
ASSOCIATED(matrix))
THEN 1666 IF(.NOT.matrix%matrix_finished)
THEN 1667 CALL flag_error(
"The matrix has not been finished.",err,error,*999)
1669 datatype=matrix%data_type
1672 CALL flag_error(
"Distributed matrix is not associated.",err,error,*999)
1675 exits(
"DistributedMatrix_DataTypeGet")
1677 999 errorsexits(
"DistributedMatrix_DataTypeGet",err,error)
1690 INTEGER(INTG),
INTENT(IN) :: DATA_TYPE
1691 INTEGER(INTG),
INTENT(OUT) :: ERR
1696 enters(
"DISTRIBUTED_MATRIX_DATA_TYPE_SET",err,error,*999)
1698 IF(
ASSOCIATED(distributed_matrix))
THEN 1699 IF(distributed_matrix%MATRIX_FINISHED)
THEN 1700 CALL flagerror(
"The distributed matrix has been finished.",err,error,*999)
1702 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1704 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 1706 distributed_matrix%DATA_TYPE=data_type
1708 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
1711 SELECT CASE(data_type)
1713 CALL flagerror(
"An integer distributed PETSc matrix is not implemented.",err,error,*999)
1715 CALL flagerror(
"A single precision distributed PETSc matrix is not implemented.",err,error,*999)
1719 CALL flagerror(
"A logical distributed PETSc matrix is not implemented.",err,error,*999)
1721 local_error=
"The specified data type of "//
trim(
numbertovstring(data_type,
"*",err,error))//
" is invalid." 1722 CALL flagerror(local_error,err,error,*999)
1725 local_error=
"The distributed matrix library type of "// &
1727 CALL flagerror(local_error,err,error,*999)
1731 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
1734 exits(
"DISTRIBUTED_MATRIX_DATA_TYPE_SET")
1736 999 errorsexits(
"DISTRIBUTED_MATRIX_DATA_TYPE_SET",err,error)
1749 INTEGER(INTG),
INTENT(OUT) :: m
1750 INTEGER(INTG),
INTENT(OUT) :: n
1751 INTEGER(INTG),
INTENT(OUT) :: err
1758 enters(
"DistributedMatrix_DimensionsGet",err,error,*999)
1760 IF(
ASSOCIATED(distributedmatrix))
THEN 1761 SELECT CASE(distributedmatrix%library_type)
1763 IF(
ASSOCIATED(distributedmatrix%cmiss))
THEN 1764 matrix=>distributedmatrix%cmiss%matrix
1765 IF(
ASSOCIATED(matrix))
THEN 1766 IF(.NOT.matrix%matrix_finished)
THEN 1767 CALL flag_error(
"The matrix has not been finished.",err,error,*999)
1773 CALL flag_error(
"Distributed matrix CMISS matrix is not associated.",err,error,*999)
1776 CALL flag_error(
"Distributed matrix CMISS is not associated.",err,error,*999)
1779 petscmatrix=>distributedmatrix%petsc
1780 IF(
ASSOCIATED(petscmatrix))
THEN 1784 CALL flag_error(
"Distributed matrix PETSc is not associated.",err,error,*999)
1787 localerror=
"The distributed matrix library type of "// &
1792 CALL flag_error(
"Distributed matrix is not associated.",err,error,*999)
1795 exits(
"DistributedMatrix_DimensionsGet")
1797 999 errorsexits(
"DistributedMatrix_DimensionsGet",err,error)
1810 INTEGER(INTG),
INTENT(OUT) :: ERR
1814 enters(
"DISTRIBUTED_MATRIX_DESTROY",err,error,*999)
1816 IF(
ASSOCIATED(distributed_matrix))
THEN 1819 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
1822 exits(
"DISTRIBUTED_MATRIX_DESTROY")
1824 999 errorsexits(
"DISTRIBUTED_MATRIX_DESTROY",err,error)
1838 INTEGER(INTG),
INTENT(OUT) :: ERR
1841 INTEGER(INTG) :: DUMMY_ERR
1844 enters(
"DISTRIBUTED_MATRIX_DUPLICATE",err,error,*998)
1846 IF(
ASSOCIATED(distributed_matrix))
THEN 1847 IF(
ASSOCIATED(new_distributed_matrix))
THEN 1848 CALL flagerror(
"New distributed matrix is already associated.",err,error,*998)
1850 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1852 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 1854 & new_distributed_matrix,err,error,*999)
1855 CALL matrix_duplicate(distributed_matrix%CMISS%MATRIX,new_distributed_matrix%CMISS%MATRIX,err,error,*999)
1858 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
1862 & new_distributed_matrix,err,error,*999)
1866 local_error=
"The distributed matrix library type of "// &
1868 CALL flagerror(local_error,err,error,*999)
1872 CALL flagerror(
"Distributed matrix is not associated.",err,error,*998)
1875 exits(
"DISTRIBUTED_MATRIX_DUPLICATE")
1878 998 errorsexits(
"DISTRIBUTED_MATRIX_DUPLICATE",err,error)
1891 INTEGER(INTG),
INTENT(OUT) :: ERR
1895 enters(
"DISTRIBUTED_MATRIX_FINALISE",err,error,*999)
1897 IF(
ASSOCIATED(distributed_matrix))
THEN 1900 DEALLOCATE(distributed_matrix)
1903 exits(
"DISTRIBUTED_MATRIX_FINALISE")
1905 999 errorsexits(
"DISTRIBUTED_MATRIX_FINALISE",err,error)
1918 INTEGER(INTG),
INTENT(OUT) :: ERR
1921 INTEGER(INTG) :: column_idx,row
1925 enters(
"DISTRIBUTED_MATRIX_FORM",err,error,*999)
1927 IF(
ASSOCIATED(distributed_matrix))
THEN 1928 IF(distributed_matrix%MATRIX_FINISHED)
THEN 1929 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1931 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 1932 CALL flagerror(
"Not implemented.",err,error,*999)
1934 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
1937 petsc_matrix=>distributed_matrix%PETSC
1938 IF(
ASSOCIATED(petsc_matrix))
THEN 1939 SELECT CASE(petsc_matrix%STORAGE_TYPE)
1943 CALL flagerror(
"Diagonal storage is not implemented for PETSc matrices.",err,error,*999)
1945 CALL flagerror(
"Column major storage is not implemented for PETSc matrices.",err,error,*999)
1947 CALL flagerror(
"Row major storage is not implemented for PETSc matrices.",err,error,*999)
1949 IF(petsc_matrix%USE_OVERRIDE_MATRIX)
THEN 1950 DO row=1,petsc_matrix%M
1951 DO column_idx=petsc_matrix%ROW_INDICES(row),petsc_matrix%ROW_INDICES(row+1)-1
1952 CALL petsc_matsetvalue(petsc_matrix%OVERRIDE_MATRIX,petsc_matrix%GLOBAL_ROW_NUMBERS(row), &
1953 & petsc_matrix%COLUMN_INDICES(column_idx)-1,0.0_dp,petsc_insert_values, &
1958 DO row=1,petsc_matrix%M
1959 DO column_idx=petsc_matrix%ROW_INDICES(row),petsc_matrix%ROW_INDICES(row+1)-1
1960 CALL petsc_matsetvalue(petsc_matrix%MATRIX,petsc_matrix%GLOBAL_ROW_NUMBERS(row), &
1961 & petsc_matrix%COLUMN_INDICES(column_idx)-1,0.0_dp,petsc_insert_values, &
1967 CALL flagerror(
"Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
1969 CALL flagerror(
"Row column storage is not implemented for PETSc matrices.",err,error,*999)
1971 local_error=
"The PETSc matrix storage type of "//
trim(
numbertovstring(petsc_matrix%STORAGE_TYPE,
"*",err,error))// &
1973 CALL flagerror(local_error,err,error,*999)
1975 IF(petsc_matrix%USE_OVERRIDE_MATRIX)
THEN 1977 CALL petsc_matassemblyend(petsc_matrix%OVERRIDE_MATRIX,petsc_mat_final_assembly,err,error,*999)
1983 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
1986 local_error=
"The distributed matrix library type of "// &
1988 CALL flagerror(local_error,err,error,*999)
1991 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
1995 exits(
"DISTRIBUTED_MATRIX_FORM")
1997 999 errorsexits(
"DISTRIBUTED_MATRIX_FORM",err,error)
2010 INTEGER(INTG),
INTENT(IN) :: GHOSTING_TYPE
2011 INTEGER(INTG),
INTENT(OUT) :: ERR
2017 enters(
"DISTRIBUTED_MATRIX_GHOSTING_TYPE_SET",err,error,*999)
2019 IF(
ASSOCIATED(distributed_matrix))
THEN 2020 IF(distributed_matrix%MATRIX_FINISHED)
THEN 2021 CALL flagerror(
"The distributed matrix has already been finished.",err,error,*999)
2023 row_domain_mapping=>distributed_matrix%ROW_DOMAIN_MAPPING
2024 column_domain_mapping=>distributed_matrix%COLUMN_DOMAIN_MAPPING
2025 IF(
ASSOCIATED(row_domain_mapping))
THEN 2026 IF(
ASSOCIATED(column_domain_mapping))
THEN 2027 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2029 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 2030 SELECT CASE(ghosting_type)
2032 CALL matrix_size_set(distributed_matrix%CMISS%MATRIX,row_domain_mapping%TOTAL_NUMBER_OF_LOCAL, &
2033 & column_domain_mapping%NUMBER_OF_GLOBAL,err,error,*999)
2035 CALL matrix_size_set(distributed_matrix%CMISS%MATRIX,row_domain_mapping%NUMBER_OF_LOCAL, &
2036 & column_domain_mapping%NUMBER_OF_GLOBAL,err,error,*999)
2038 local_error=
"The given ghosting type of "//
trim(
numbertovstring(ghosting_type,
"*",err,error))//
" is invalid." 2039 CALL flagerror(local_error,err,error,*999)
2042 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
2045 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 2046 SELECT CASE(ghosting_type)
2048 distributed_matrix%PETSC%N=row_domain_mapping%TOTAL_NUMBER_OF_LOCAL
2050 distributed_matrix%PETSC%N=row_domain_mapping%NUMBER_OF_LOCAL
2052 local_error=
"The given ghosting type of "//
trim(
numbertovstring(ghosting_type,
"*",err,error))//
" is invalid." 2053 CALL flagerror(local_error,err,error,*999)
2056 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
2059 local_error=
"The distributed matrix library type of "// &
2061 CALL flagerror(local_error,err,error,*999)
2063 distributed_matrix%GHOSTING_TYPE=ghosting_type
2065 CALL flagerror(
"Distributed matrix column domain mapping is not associated.",err,error,*999)
2068 CALL flagerror(
"Distributed matrix row domain mapping is not associated.",err,error,*999)
2072 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
2075 exits(
"DISTRIBUTED_MATRIX_GHOSTING_TYPE_SET")
2077 999 errorsexits(
"DISTRIBUTED_MATRIX_GHOSTING_TYPE_SET",err,error)
2090 INTEGER(INTG),
INTENT(IN) :: LIBRARY_TYPE
2091 INTEGER(INTG),
INTENT(OUT) :: ERR
2094 INTEGER(INTG) :: DUMMY_ERR,OLD_LIBRARY_TYPE
2097 enters(
"DISTRIBUTED_MATRIX_LIBRARY_TYPE_SET",err,error,*998)
2099 IF(
ASSOCIATED(distributed_matrix))
THEN 2100 IF(distributed_matrix%MATRIX_FINISHED)
THEN 2101 CALL flagerror(
"The distributed matrix has already been finished.",err,error,*998)
2103 old_library_type=distributed_matrix%LIBRARY_TYPE
2104 IF(library_type/=old_library_type)
THEN 2106 SELECT CASE(library_type)
2112 local_error=
"The library type of "//
trim(
numbertovstring(library_type,
"*",err,error))//
" is invalid." 2113 CALL flagerror(local_error,err,error,*999)
2116 SELECT CASE(old_library_type)
2122 local_error=
"The distributed matrix library type of "// &
2124 CALL flagerror(local_error,err,error,*999)
2126 distributed_matrix%LIBRARY_TYPE=library_type
2130 CALL flagerror(
"Distributed matrix is not associated.",err,error,*998)
2133 exits(
"DISTRIBUTED_MATRIX_LIBRARY_TYPE_SET")
2135 999
SELECT CASE(library_type)
2141 998 errorsexits(
"DISTRIBUTED_MATRIX_LIBRARY_TYPE_SET",err,error)
2156 INTEGER(INTG),
INTENT(OUT) :: ERR
2159 INTEGER(INTG) :: DUMMY_ERR
2162 enters(
"DISTRIBUTED_MATRIX_INITIALISE",err,error,*998)
2164 IF(
ASSOCIATED(row_domain_mapping))
THEN 2165 IF(
ASSOCIATED(column_domain_mapping))
THEN 2166 IF(
ASSOCIATED(distributed_matrix))
THEN 2167 CALL flagerror(
"Distributed matrix is already associated.",err,error,*998)
2169 ALLOCATE(distributed_matrix,stat=err)
2170 IF(err/=0)
CALL flagerror(
"Could not allocated the distributed matrix.",err,error,*999)
2171 distributed_matrix%MATRIX_FINISHED=.false.
2172 distributed_matrix%LIBRARY_TYPE=0
2174 distributed_matrix%ROW_DOMAIN_MAPPING=>row_domain_mapping
2175 distributed_matrix%COLUMN_DOMAIN_MAPPING=>column_domain_mapping
2177 NULLIFY(distributed_matrix%CMISS)
2178 NULLIFY(distributed_matrix%PETSC)
2182 CALL flagerror(
"Column domain mapping is not associated.",err,error,*999)
2185 CALL flagerror(
"Row domain mapping is not associated.",err,error,*998)
2188 exits(
"DISTRIBUTED_MATRIX_INITIALSE")
2191 998 errorsexits(
"DISTRIBUTED_MATRIX_INITIALISE",err,error)
2204 INTEGER(INTG),
INTENT(OUT) :: MAX_COLUMNS_PER_ROW
2205 INTEGER(INTG),
INTENT(OUT) :: ERR
2210 enters(
"DISTRIBUTED_MATRIX_MAX_COLUMNS_PER_ROW_GET",err,error,*999)
2212 IF(
ASSOCIATED(distributed_matrix))
THEN 2213 IF(distributed_matrix%MATRIX_FINISHED)
THEN 2214 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2216 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 2219 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
2222 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 2223 max_columns_per_row=distributed_matrix%PETSC%MAXIMUM_COLUMN_INDICES_PER_ROW
2225 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
2228 local_error=
"The distributed matrix library type of "// &
2230 CALL flagerror(local_error,err,error,*999)
2233 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
2236 CALL flagerror(
"Distributed mtrix is not associated.",err,error,*999)
2239 exits(
"DISTRIBUTED_MATRIX_MAX_COLUMNS_PER_ROW_GET")
2241 999 errorsexits(
"DISTRIBUTED_MATRIX_MAX_COLUMNS_PER_ROW_GET",err,error)
2254 INTEGER(INTG),
INTENT(IN) :: NUMBER_NON_ZEROS
2255 INTEGER(INTG),
INTENT(OUT) :: ERR
2260 enters(
"DISTRIBUTED_MATRIX_NUMBER_NON_ZEROS_SET",err,error,*999)
2262 IF(
ASSOCIATED(distributed_matrix))
THEN 2263 IF(distributed_matrix%MATRIX_FINISHED)
THEN 2264 CALL flagerror(
"The distributed matrix has been finished.",err,error,*999)
2266 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2268 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 2271 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
2274 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 2275 IF(number_non_zeros>0)
THEN 2276 distributed_matrix%PETSC%NUMBER_NON_ZEROS=number_non_zeros
2278 local_error=
"The specified number of non zeros ("//
trim(
numbertovstring(number_non_zeros,
"*",err,error))// &
2279 &
") is invalid. The number must be > 0." 2280 CALL flagerror(local_error,err,error,*999)
2283 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
2286 local_error=
"The distributed matrix library type of "// &
2288 CALL flagerror(local_error,err,error,*999)
2292 CALL flagerror(
"Distributed mtrix is not associated.",err,error,*999)
2295 exits(
"DISTRIBUTED_MATRIX_NUMBER_NON_ZEROS_SET")
2297 999 errorsexits(
"DISTRIBUTED_MATRIX_NUMBER_NON_ZEROS_SET",err,error)
2310 INTEGER(INTG),
INTENT(OUT) :: NUMBER_NON_ZEROS
2311 INTEGER(INTG),
INTENT(OUT) :: ERR
2316 enters(
"DISTRIBUTED_MATRIX_NUMBER_NON_ZEROS_GET",err,error,*999)
2318 IF(
ASSOCIATED(distributed_matrix))
THEN 2319 IF(distributed_matrix%MATRIX_FINISHED)
THEN 2320 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2322 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 2325 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
2328 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 2329 number_non_zeros=distributed_matrix%PETSC%NUMBER_NON_ZEROS
2331 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
2334 local_error=
"The distributed matrix library type of "// &
2336 CALL flagerror(local_error,err,error,*999)
2339 CALL flagerror(
"The distributed matrix is not finished.",err,error,*999)
2342 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
2345 exits(
"DISTRIBUTED_MATRIX_NUMBER_NON_ZEROS_GET")
2347 999 errorsexits(
"DISTRIBUTED_MATRIX_NUMBER_NON_ZEROS_GET",err,error)
2364 INTEGER(INTG),
INTENT(OUT) :: ERR
2369 enters(
"DISTRIBUTED_MATRIX_LINKLIST_SET",err,error,*999)
2371 IF(
ASSOCIATED(distributed_matrix))
THEN 2372 IF(distributed_matrix%MATRIX_FINISHED)
THEN 2373 CALL flagerror(
"The distributed matrix has been finished.",err,error,*999)
2375 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2377 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 2381 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
2384 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 2387 distributed_matrix%PETSC%list=>list
2394 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
2397 local_error=
"The distributed matrix library type of "// &
2399 CALL flagerror(local_error,err,error,*999)
2403 CALL flagerror(
"Distributed mtrix is not associated.",err,error,*999)
2406 exits(
"DISTRIBUTED_MATRIX_LIKLIST_SET")
2408 999 errorsexits(
"DISTRIBUTED_MATRIX_LINKLIST_SET",err,error)
2423 INTEGER(INTG),
INTENT(OUT) :: ERR
2428 enters(
"DISTRIBUTED_MATRIX_NUMBER_NON_ZEROS_GET",err,error,*999)
2430 IF(
ASSOCIATED(distributed_matrix))
THEN 2431 IF(distributed_matrix%MATRIX_FINISHED)
THEN 2432 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2434 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 2438 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
2441 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 2442 list=>distributed_matrix%PETSC%list
2444 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
2447 local_error=
"The distributed matrix library type of "// &
2449 CALL flagerror(local_error,err,error,*999)
2452 CALL flagerror(
"The distributed matrix is not finished.",err,error,*999)
2455 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
2458 exits(
"DISTRIBUTED_MATRIX_LINKLIST_GET")
2460 999 errorsexits(
"DISTRIBUTED_MATRIX_LINKLIST_GET",err,error)
2471 INTEGER(INTG),
INTENT(IN) :: ID
2473 INTEGER(INTG),
INTENT(OUT) :: ERR
2483 enters(
"DISTRIBUTED_MATRIX_OUTPUT",err,error,*999)
2485 IF(
ASSOCIATED(distributed_matrix))
THEN 2486 IF(distributed_matrix%MATRIX_FINISHED)
THEN 2487 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2489 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 2490 CALL matrix_output(id,distributed_matrix%CMISS%MATRIX,err,error,*999)
2492 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
2495 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 2496 IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX)
THEN 2497 CALL petsc_matview(distributed_matrix%PETSC%OVERRIDE_MATRIX,petsc_viewer_stdout_world,err,error,*999)
2499 CALL petsc_matview(distributed_matrix%PETSC%MATRIX,petsc_viewer_stdout_world,err,error,*999)
2526 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
2529 local_error=
"The distributed matrix library type of "// &
2531 CALL flagerror(local_error,err,error,*999)
2534 CALL flagerror(
"Distributed matrix has not been finished.",err,error,*999)
2537 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
2540 exits(
"DISTRIBUTED_MATRIX_OUTPUT")
2544 999 errorsexits(
"DISTRIBUTED_MATRIX_OUTPUT",err,error)
2557 TYPE(petscmattype),
INTENT(IN) :: OVERRIDE_MATRIX
2558 INTEGER(INTG),
INTENT(OUT) :: ERR
2563 enters(
"DISTRIBUTED_MATRIX_OVERRIDE_SET_ON",err,error,*999)
2565 IF(
ASSOCIATED(distributed_matrix))
THEN 2566 IF(distributed_matrix%MATRIX_FINISHED)
THEN 2567 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2569 CALL flagerror(
"Not implemented.",err,error,*999)
2571 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 2572 distributed_matrix%PETSC%USE_OVERRIDE_MATRIX=.true.
2573 distributed_matrix%PETSC%OVERRIDE_MATRIX=override_matrix
2575 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
2578 local_error=
"The distributed matrix library type of "// &
2580 CALL flagerror(local_error,err,error,*999)
2583 CALL flagerror(
"Distributed matrix has not been finished.",err,error,*999)
2586 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
2589 exits(
"DISTRIBUTED_MATRIX_OVERRIDE_SET_ON")
2591 999 errorsexits(
"DISTRIBUTED_MATRIX_OVERRIDE_SET_ON",err,error)
2604 INTEGER(INTG),
INTENT(OUT) :: ERR
2609 enters(
"DISTRIBUTED_MATRIX_OVERRIDE_SET_OFF",err,error,*999)
2611 IF(
ASSOCIATED(distributed_matrix))
THEN 2612 IF(distributed_matrix%MATRIX_FINISHED)
THEN 2613 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2615 CALL flagerror(
"Not implemented.",err,error,*999)
2617 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 2618 distributed_matrix%PETSC%USE_OVERRIDE_MATRIX=.false.
2621 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
2624 local_error=
"The distributed matrix library type of "// &
2626 CALL flagerror(local_error,err,error,*999)
2629 CALL flagerror(
"Distributed matrix has not been finished.",err,error,*999)
2632 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
2635 exits(
"DISTRIBUTED_MATRIX_OVERRIDE_SET_OFF")
2637 999 errorsexits(
"DISTRIBUTED_MATRIX_OVERRIDE_SET_OFF",err,error)
2650 INTEGER(INTG),
INTENT(OUT) :: ERR
2653 INTEGER(INTG) :: DUMMY_ERR,i
2659 enters(
"DISTRIBUTED_MATRIX_PETSC_CREATE_FINISH",err,error,*999)
2661 IF(
ASSOCIATED(petsc_matrix))
THEN 2662 distributed_matrix=>petsc_matrix%DISTRIBUTED_MATRIX
2663 IF(
ASSOCIATED(distributed_matrix))
THEN 2664 row_domain_mapping=>distributed_matrix%ROW_DOMAIN_MAPPING
2665 column_domain_mapping=>distributed_matrix%COLUMN_DOMAIN_MAPPING
2666 IF(
ASSOCIATED(row_domain_mapping))
THEN 2667 IF(
ASSOCIATED(column_domain_mapping))
THEN 2668 SELECT CASE(petsc_matrix%STORAGE_TYPE)
2670 petsc_matrix%NUMBER_NON_ZEROS=petsc_matrix%M*petsc_matrix%GLOBAL_N
2671 petsc_matrix%MAXIMUM_COLUMN_INDICES_PER_ROW=petsc_matrix%GLOBAL_N
2672 petsc_matrix%DATA_SIZE=petsc_matrix%NUMBER_NON_ZEROS
2674 ALLOCATE(petsc_matrix%GLOBAL_ROW_NUMBERS(petsc_matrix%M),stat=err)
2675 IF(err/=0)
CALL flagerror(
"Could not allocate global row numbers for PETSc distributed matrix.",err,error,*999)
2676 DO i=1,petsc_matrix%M
2677 petsc_matrix%GLOBAL_ROW_NUMBERS(i)=row_domain_mapping%LOCAL_TO_GLOBAL_MAP(i)-1
2680 ALLOCATE(petsc_matrix%DATA_DP(petsc_matrix%DATA_SIZE),stat=err)
2681 IF(err/=0)
CALL flagerror(
"Could not allocate PETSc matrix data.",err,error,*999)
2683 & petsc_matrix%GLOBAL_M,petsc_matrix%GLOBAL_N,petsc_matrix%DATA_DP,petsc_matrix%MATRIX,err,error,*999)
2685 petsc_matrix%NUMBER_NON_ZEROS=petsc_matrix%M
2686 petsc_matrix%MAXIMUM_COLUMN_INDICES_PER_ROW=1
2687 petsc_matrix%DATA_SIZE=petsc_matrix%NUMBER_NON_ZEROS
2689 ALLOCATE(petsc_matrix%GLOBAL_ROW_NUMBERS(petsc_matrix%M),stat=err)
2690 IF(err/=0)
CALL flagerror(
"Could not allocate global row numbers for PETSc distributed matrix.",err,error,*999)
2691 DO i=1,petsc_matrix%M
2692 petsc_matrix%GLOBAL_ROW_NUMBERS(i)=row_domain_mapping%LOCAL_TO_GLOBAL_MAP(i)-1
2695 ALLOCATE(petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS(petsc_matrix%N),stat=err)
2696 IF(err/=0)
CALL flagerror(
"Could not allocate diagonal number of non zeros.",err,error,*999)
2697 ALLOCATE(petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS(petsc_matrix%N),stat=err)
2698 IF(err/=0)
CALL flagerror(
"Could not allocate off diagonal number of non zeros.",err,error,*999)
2699 petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS=1
2700 petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS=0
2703 & petsc_matrix%GLOBAL_M,petsc_matrix%GLOBAL_N,petsc_null_integer,petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS, &
2704 & petsc_null_integer,petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS,petsc_matrix%MATRIX,err,error,*999)
2706 CALL flagerror(
"Column major storage is not implemented for PETSc matrices.",err,error,*999)
2708 CALL flagerror(
"Row major storage is not implemented for PETSc matrices.",err,error,*999)
2710 IF(
ALLOCATED(petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS))
THEN 2711 IF(
ALLOCATED(petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS))
THEN 2714 & petsc_matrix%GLOBAL_M,petsc_matrix%GLOBAL_N,petsc_null_integer,petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS, &
2715 & petsc_null_integer,petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS,petsc_matrix%MATRIX,err,error,*999)
2717 CALL petsc_matsetoption(petsc_matrix%MATRIX,petsc_mat_new_nonzero_location_err,.true.,err,error,*999)
2718 CALL petsc_matsetoption(petsc_matrix%MATRIX,petsc_mat_new_nonzero_allocation_err,.true.,err,error,*999)
2719 CALL petsc_matsetoption(petsc_matrix%MATRIX,petsc_mat_unused_nonzero_location_err,.true.,err,error,*999)
2721 ALLOCATE(petsc_matrix%GLOBAL_ROW_NUMBERS(petsc_matrix%M),stat=err)
2722 IF(err/=0)
CALL flagerror(
"Could not allocate global row numbers for PETSc distributed matrix.",err,error,*999)
2723 petsc_matrix%MAXIMUM_COLUMN_INDICES_PER_ROW=0
2724 DO i=1,petsc_matrix%M
2725 petsc_matrix%GLOBAL_ROW_NUMBERS(i)=row_domain_mapping%LOCAL_TO_GLOBAL_MAP(i)-1
2726 IF((petsc_matrix%ROW_INDICES(i+1)-petsc_matrix%ROW_INDICES(i))>petsc_matrix%MAXIMUM_COLUMN_INDICES_PER_ROW) &
2727 & petsc_matrix%MAXIMUM_COLUMN_INDICES_PER_ROW=petsc_matrix%ROW_INDICES(i+1)-petsc_matrix%ROW_INDICES(i)
2730 CALL flagerror(
"Matrix off diagonal storage locations have not been set.",err,error,*999)
2733 CALL flagerror(
"Matrix diagonal storage locations have not been set.",err,error,*999)
2736 CALL flagerror(
"Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
2738 CALL flagerror(
"Row column storage is not implemented for PETSc matrices.",err,error,*999)
2740 local_error=
"The PETSc matrix storage type of "//
trim(
numbertovstring(petsc_matrix%STORAGE_TYPE,
"*",err,error))// &
2742 CALL flagerror(local_error,err,error,*999)
2745 CALL flagerror(
"PETSc matrix distributed matrix column domain mapping is not associated.",err,error,*999)
2748 CALL flagerror(
"PETSc matrix distributed matrix row domain mapping is not associated.",err,error,*999)
2752 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*998)
2755 exits(
"DISTRIBUTED_MATRIX_PETSC_CREATE_FINISH")
2758 998 errorsexits(
"DISTRIBUTED_MATRIX_PETSC_CREATE_FINISH",err,error)
2771 INTEGER(INTG),
INTENT(OUT) :: ERR
2775 enters(
"DISTRIBUTED_MATRIX_PETSC_FINALISE",err,error,*999)
2777 IF(
ASSOCIATED(petsc_matrix))
THEN 2778 IF(
ALLOCATED(petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS))
DEALLOCATE(petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS)
2779 IF(
ALLOCATED(petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS))
DEALLOCATE(petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS)
2780 IF(
ALLOCATED(petsc_matrix%ROW_INDICES))
DEALLOCATE(petsc_matrix%ROW_INDICES)
2781 IF(
ALLOCATED(petsc_matrix%COLUMN_INDICES))
DEALLOCATE(petsc_matrix%COLUMN_INDICES)
2782 IF(
ALLOCATED(petsc_matrix%GLOBAL_ROW_NUMBERS))
DEALLOCATE(petsc_matrix%GLOBAL_ROW_NUMBERS)
2786 DEALLOCATE(petsc_matrix)
2789 exits(
"DISTRIBUTED_MATRIX_PETSC_FINALSE")
2791 999 errorsexits(
"DISTRIBUTED_MATRIX_PETSC_FINALISE",err,error)
2804 INTEGER(INTG),
INTENT(OUT) :: ERR
2807 INTEGER(INTG) :: DUMMY_ERR
2811 enters(
"DISTRIBUTED_MATRIX_PETSC_INITIALISE",err,error,*998)
2813 IF(
ASSOCIATED(distributed_matrix))
THEN 2814 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 2815 CALL flagerror(
"PETSc is already associated for this distributed matrix",err,error,*998)
2817 row_domain_mapping=>distributed_matrix%ROW_DOMAIN_MAPPING
2818 column_domain_mapping=>distributed_matrix%COLUMN_DOMAIN_MAPPING
2819 IF(
ASSOCIATED(row_domain_mapping))
THEN 2820 IF(
ASSOCIATED(column_domain_mapping))
THEN 2821 ALLOCATE(distributed_matrix%PETSC,stat=err)
2822 IF(err/=0)
CALL flagerror(
"Could not allocate PETSc distributed matrix.",err,error,*999)
2823 distributed_matrix%PETSC%DISTRIBUTED_MATRIX=>distributed_matrix
2826 SELECT CASE(distributed_matrix%GHOSTING_TYPE)
2828 distributed_matrix%PETSC%M=row_domain_mapping%TOTAL_NUMBER_OF_LOCAL
2830 distributed_matrix%PETSC%M=row_domain_mapping%NUMBER_OF_LOCAL
2832 local_error=
"The distributed matrix ghosting type of "// &
2834 CALL flagerror(local_error,err,error,*999)
2836 distributed_matrix%PETSC%N=column_domain_mapping%TOTAL_NUMBER_OF_LOCAL
2837 distributed_matrix%PETSC%GLOBAL_M=row_domain_mapping%NUMBER_OF_GLOBAL
2838 distributed_matrix%PETSC%GLOBAL_N=column_domain_mapping%NUMBER_OF_GLOBAL
2840 distributed_matrix%PETSC%DATA_SIZE=0
2841 distributed_matrix%PETSC%MAXIMUM_COLUMN_INDICES_PER_ROW=0
2842 distributed_matrix%PETSC%USE_OVERRIDE_MATRIX=.false.
2846 CALL flagerror(
"Distributed matrix column domain mapping is not associated.",err,error,*998)
2849 CALL flagerror(
"Distributed matrix row domain mapping is not associated.",err,error,*998)
2853 CALL flagerror(
"Distributed matrix is not associated.",err,error,*998)
2856 exits(
"DISTRIBUTED_MATRIX_PETSC_INITIALSE")
2858 999
IF(
ASSOCIATED(distributed_matrix%PETSC)) &
2860 998 errorsexits(
"DISTRIBUTED_MATRIX_PETSC_INITIALISE",err,error)
2873 INTEGER(INTG),
POINTER :: ROW_INDICES(:)
2874 INTEGER(INTG),
POINTER :: COLUMN_INDICES(:)
2875 INTEGER(INTG),
INTENT(OUT) :: ERR
2882 enters(
"DISTRIBUTED_MATRIX_STORAGE_LOCATIONS_GET",err,error,*999)
2884 IF(
ASSOCIATED(distributed_matrix))
THEN 2885 IF(distributed_matrix%MATRIX_FINISHED)
THEN 2886 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2888 cmiss_matrix=>distributed_matrix%CMISS
2889 IF(
ASSOCIATED(cmiss_matrix))
THEN 2892 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
2895 petsc_matrix=>distributed_matrix%PETSC
2896 IF(
ASSOCIATED(petsc_matrix))
THEN 2897 SELECT CASE(petsc_matrix%STORAGE_TYPE)
2899 CALL flagerror(
"Cannot get matrix locations for a block storage matrix.",err,error,*999)
2901 CALL flagerror(
"Diagonal storage is not implemented for PETSc matrices.",err,error,*999)
2903 CALL flagerror(
"Column major storage is not implemented for PETSc matrices.",err,error,*999)
2905 CALL flagerror(
"Row major storage is not implemented for PETSc matrices.",err,error,*999)
2907 row_indices=>distributed_matrix%PETSC%ROW_INDICES
2908 column_indices=>distributed_matrix%PETSC%COLUMN_INDICES
2910 CALL flagerror(
"Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
2912 CALL flagerror(
"Row column storage is not implemented for PETSc matrices.",err,error,*999)
2914 local_error=
"The matrix storage type of "// &
2916 CALL flagerror(local_error,err,error,*999)
2919 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
2922 local_error=
"The distributed matrix library type of "// &
2924 CALL flagerror(local_error,err,error,*999)
2927 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
2930 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
2933 exits(
"DISTRIBUTED_MATRIX_STORAGE_LOCATIONS_GET")
2935 999 errorsexits(
"DISTRIBUTED_MATRIX_STORAGE_LOCATIONS_GET",err,error)
2948 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
2949 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
2950 INTEGER(INTG),
INTENT(OUT) :: ERR
2953 INTEGER(INTG) :: i,j,k,global_row_start,global_row_finish
2959 NULLIFY(cmiss_matrix)
2960 NULLIFY(petsc_matrix)
2962 enters(
"DISTRIBUTED_MATRIX_STORAGE_LOCATIONS_SET",err,error,*999)
2964 IF(
ASSOCIATED(distributed_matrix))
THEN 2965 IF(distributed_matrix%MATRIX_FINISHED)
THEN 2966 CALL flagerror(
"The distributed matrix has been finished.",err,error,*999)
2968 row_domain_mapping=>distributed_matrix%ROW_DOMAIN_MAPPING
2969 column_domain_mapping=>distributed_matrix%COLUMN_DOMAIN_MAPPING
2970 IF(
ASSOCIATED(row_domain_mapping))
THEN 2971 IF(
ASSOCIATED(column_domain_mapping))
THEN 2972 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2974 cmiss_matrix=>distributed_matrix%CMISS
2975 IF(
ASSOCIATED(cmiss_matrix))
THEN 2978 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
2981 petsc_matrix=>distributed_matrix%PETSC
2982 IF(
ASSOCIATED(petsc_matrix))
THEN 2983 SELECT CASE(petsc_matrix%STORAGE_TYPE)
2987 CALL flagerror(
"Diagonal storage is not implemented for PETSc matrices.",err,error,*999)
2989 CALL flagerror(
"Column major storage is not implemented for PETSc matrices.",err,error,*999)
2991 CALL flagerror(
"Row major storage is not implemented for PETSc matrices.",err,error,*999)
2993 IF(
SIZE(row_indices,1)==petsc_matrix%M+1)
THEN 2994 IF(
SIZE(column_indices,1)==petsc_matrix%NUMBER_NON_ZEROS)
THEN 2995 IF(row_indices(1)==1)
THEN 2997 IF(row_indices(petsc_matrix%M+1)==petsc_matrix%NUMBER_NON_ZEROS+1)
THEN 2998 DO i=2,petsc_matrix%M+1
2999 IF(row_indices(i)<row_indices(i-1))
THEN 3004 CALL flagerror(local_error,err,error,*999)
3008 ALLOCATE(petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS(petsc_matrix%M),stat=err)
3009 IF(err/=0)
CALL flagerror(
"Could not allocate PETSc matrix diagonal number of non zeros.",err,error,*999)
3010 petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS=0
3011 ALLOCATE(petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS(petsc_matrix%M),stat=err)
3012 IF(err/=0)
CALL flagerror(
"Could not allocate PETSc matrix off diagonal number of non zeros.", &
3014 petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS=0
3015 ALLOCATE(petsc_matrix%ROW_INDICES(petsc_matrix%M+1),stat=err)
3016 IF(err/=0)
CALL flagerror(
"Could not allocate PETSc matrix row indices.",err,error,*999)
3017 petsc_matrix%ROW_INDICES(1:petsc_matrix%M+1)=row_indices(1:petsc_matrix%M+1)
3018 ALLOCATE(petsc_matrix%COLUMN_INDICES(petsc_matrix%NUMBER_NON_ZEROS),stat=err)
3019 IF(err/=0)
CALL flagerror(
"Could not allocate PETSc matrix column indices.",err,error,*999)
3020 petsc_matrix%COLUMN_INDICES(1:petsc_matrix%NUMBER_NON_ZEROS)= &
3021 & column_indices(1:petsc_matrix%NUMBER_NON_ZEROS)
3023 global_row_start=row_domain_mapping%LOCAL_TO_GLOBAL_MAP(1)
3024 global_row_finish=row_domain_mapping%LOCAL_TO_GLOBAL_MAP(petsc_matrix%M)
3025 DO i=1,petsc_matrix%M
3026 DO j=row_indices(i),row_indices(i+1)-1
3029 IF(k>petsc_matrix%GLOBAL_N)
THEN 3030 local_error=
"Invalid column indices. Column index "//
trim(
numbertovstring(j,
"*",err,error))// &
3032 &
") is greater than the number of columns ("// &
3034 CALL flagerror(local_error,err,error,*999)
3036 IF(k>=global_row_start.AND.k<=global_row_finish)
THEN 3037 petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS(i)=petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS(i)+1
3039 petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS(i)=petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS(i)+1
3042 local_error=
"Invalid column indices. Column index "//
trim(
numbertovstring(j,
"*",err,error))// &
3044 CALL flagerror(local_error,err,error,*999)
3048 IF(petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS(i)==0) petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS(i)=1
3059 & number_non_zeros,err,error,*999)
3061 & diagonal_number_non_zeros,
'(" Diagonal number non zeros :",8(X,I10))',
'(33X,8(X,I10))', &
3064 & offdiagonal_number_non_zeros,
'(" Off-diagonal number non zeros :",8(X,I10))',
'(33X,8(X,I10))', &
3067 & row_indices,
'(" Row indices :",8(X,I10))',
'(33X,8(X,I10))', &
3070 & column_indices,
'(" Column indices :",8(X,I10))',
'(33X,8(X,I10))', &
3074 local_error=
"Invalid row indices. The last row index ("// &
3076 &
") does not equal the number of non-zeros + 1 ("// &
3078 CALL flagerror(local_error,err,error,*999)
3081 local_error=
"Invalid row indices. The first row index ("// &
3083 CALL flagerror(local_error,err,error,*999)
3086 local_error=
"The supplied number of column indices ("// &
3088 &
") does not match the number of non-zeros in the matrix ("// &
3090 CALL flagerror(local_error,err,error,*999)
3093 local_error=
"The supplied number of row indices ("// &
3095 &
") does not match the number of rows in the matrix + 1 ("// &
3097 CALL flagerror(local_error,err,error,*999)
3100 CALL flagerror(
"Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
3102 CALL flagerror(
"Row column storage is not implemented for PETSc matrices.",err,error,*999)
3104 local_error=
"The specified matrix storage type of "// &
3106 CALL flagerror(local_error,err,error,*999)
3109 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
3112 local_error=
"The distributed matrix library type of "// &
3114 CALL flagerror(local_error,err,error,*999)
3117 CALL flagerror(
"Distributed matrix column domain mapping is not associated.",err,error,*999)
3120 CALL flagerror(
"Distributed matrix row domain mapping is not associated.",err,error,*999)
3124 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
3127 exits(
"DISTRIBUTED_MATRIX_STORAGE_LOCATIONS_SET")
3129 999
IF(
ASSOCIATED(petsc_matrix))
THEN 3130 IF(
ALLOCATED(petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS))
DEALLOCATE(petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS)
3131 IF(
ALLOCATED(petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS))
DEALLOCATE(petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS)
3133 errorsexits(
"DISTRIBUTED_MATRIX_STORAGE_LOCATIONS_SET",err,error)
3146 INTEGER(INTG),
INTENT(OUT) :: STORAGE_TYPE
3147 INTEGER(INTG),
INTENT(OUT) :: ERR
3152 enters(
"DISTRIBUTED_MATRIX_STORAGE_TYPE_GET",err,error,*999)
3154 IF(
ASSOCIATED(distributed_matrix))
THEN 3155 IF(distributed_matrix%MATRIX_FINISHED)
THEN 3156 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3158 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 3161 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
3164 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 3165 storage_type=distributed_matrix%PETSC%STORAGE_TYPE
3167 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
3170 local_error=
"The distributed matrix library type of "// &
3172 CALL flagerror(local_error,err,error,*999)
3175 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
3178 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
3181 exits(
"DISTRIBUTED_MATRIX_STORAGE_TYPE_GET")
3183 999 errorsexits(
"DISTRIBUTED_MATRIX_STORAGE_TYPE_GET",err,error)
3196 INTEGER(INTG),
INTENT(IN) :: STORAGE_TYPE
3197 INTEGER(INTG),
INTENT(OUT) :: ERR
3202 enters(
"DISTRIBUTED_MATRIX_STORAGE_TYPE_SET",err,error,*999)
3204 IF(
ASSOCIATED(distributed_matrix))
THEN 3205 IF(distributed_matrix%MATRIX_FINISHED)
THEN 3206 CALL flagerror(
"The distributed matrix has been finished.",err,error,*999)
3208 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3210 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 3213 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
3216 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 3217 SELECT CASE(storage_type)
3223 CALL flagerror(
"Column major storage is not implemented for PETSc matrices.",err,error,*999)
3225 CALL flagerror(
"Row major storage is not implemented for PETSc matrices.",err,error,*999)
3229 CALL flagerror(
"Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
3231 CALL flagerror(
"Row column storage is not implemented for PETSc matrices.",err,error,*999)
3233 local_error=
"The specified matrix storage type of "//
trim(
numbertovstring(storage_type,
"*",err,error))// &
3235 CALL flagerror(local_error,err,error,*999)
3238 CALL flagerror(
"Distributed matrix PETSc is not implemented.",err,error,*999)
3241 local_error=
"The distributed matrix library type of "// &
3243 CALL flagerror(local_error,err,error,*999)
3247 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
3250 exits(
"DISTRIBUTED_MATRIX_STORAGE_TYPE_SET")
3252 999 errorsexits(
"DISTRIBUTED_MATRIX_STORAGE_TYPE_SET",err,error)
3265 INTEGER(INTG),
INTENT(OUT) :: ERR
3270 enters(
"DISTRIBUTED_MATRIX_UPDATE_FINISH",err,error,*999)
3272 IF(
ASSOCIATED(distributed_matrix))
THEN 3273 IF(distributed_matrix%MATRIX_FINISHED)
THEN 3274 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3278 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 3279 IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX)
THEN 3280 CALL petsc_matassemblyend(distributed_matrix%PETSC%OVERRIDE_MATRIX,petsc_mat_final_assembly,err,error,*999)
3282 CALL petsc_matassemblyend(distributed_matrix%PETSC%MATRIX,petsc_mat_final_assembly,err,error,*999)
3285 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
3288 local_error=
"The distributed matrix library type of "// &
3290 CALL flagerror(local_error,err,error,*999)
3293 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
3296 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
3299 exits(
"DISTRIBUTED_MATRIX_UPDATE_FINISH")
3301 999 errorsexits(
"DISTRIBUTED_MATRIX_UPDATE_FINISH",err,error)
3314 LOGICAL,
INTENT(OUT) :: ISFINISHED
3315 INTEGER(INTG),
INTENT(OUT) :: ERR
3319 enters(
"DISTRIBUTED_MATRIX_UPDATE_ISFINISHED",err,error,*999)
3322 IF(
ASSOCIATED(distributed_matrix))
THEN 3323 IF(distributed_matrix%MATRIX_FINISHED)
THEN 3327 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
3330 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
3333 exits(
"DISTRIBUTED_MATRIX_UPDATE_ISFINISHED")
3335 999 errorsexits(
"DISTRIBUTED_MATRIX_UPDATE_ISFINISHED",err,error)
3348 INTEGER(INTG),
INTENT(OUT) :: ERR
3352 enters(
"DISTRIBUTED_MATRIX_UPDATE_WAITFINISHED",err,error,*999)
3354 IF(
ASSOCIATED(distributed_matrix))
THEN 3355 IF(distributed_matrix%MATRIX_FINISHED)
THEN 3358 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
3361 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
3364 exits(
"DISTRIBUTED_MATRIX_UPDATE_WAITFINISHED")
3366 999 errorsexits(
"DISTRIBUTED_MATRIX_UPDATE_WAITFINISHED",err,error)
3379 INTEGER(INTG),
INTENT(OUT) :: ERR
3384 enters(
"DISTRIBUTED_MATRIX_UPDATE_START",err,error,*999)
3386 IF(
ASSOCIATED(distributed_matrix))
THEN 3387 IF(distributed_matrix%MATRIX_FINISHED)
THEN 3388 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3392 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 3393 IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX)
THEN 3394 CALL petsc_matassemblybegin(distributed_matrix%PETSC%OVERRIDE_MATRIX,petsc_mat_final_assembly,err,error,*999)
3399 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
3402 local_error=
"The distributed matrix library type of "// &
3404 CALL flagerror(local_error,err,error,*999)
3407 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
3410 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
3413 exits(
"DISTRIBUTED_MATRIX_UPDATE_START")
3415 999 errorsexits(
"DISTRIBUTED_MATRIX_UPDATE_START",err,error)
3428 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
3429 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
3430 INTEGER(INTG),
INTENT(IN) :: VALUES(:)
3431 INTEGER(INTG),
INTENT(OUT) :: ERR
3436 enters(
"DISTRIBUTED_MATRIX_VALUES_ADD_INTG",err,error,*999)
3438 IF(
ASSOCIATED(distributed_matrix))
THEN 3439 IF(distributed_matrix%MATRIX_FINISHED)
THEN 3440 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3442 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 3443 CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
3445 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
3448 CALL flagerror(
"Adding values to an integer PETSc distributed matrix is not implemented.",err,error,*999)
3450 local_error=
"The distributed matrix library type of "// &
3452 CALL flagerror(local_error,err,error,*999)
3455 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
3458 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
3461 exits(
"DISTRIBUTED_MATRIX_VALUES_ADD_INTG")
3463 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_ADD_INTG",err,error)
3476 INTEGER(INTG),
INTENT(IN) :: ROW_INDEX
3477 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDEX
3478 INTEGER(INTG),
INTENT(IN) ::
VALUE 3479 INTEGER(INTG),
INTENT(OUT) :: ERR
3484 enters(
"DISTRIBUTED_MATRIX_VALUES_ADD_INTG1",err,error,*999)
3486 IF(
ASSOCIATED(distributed_matrix))
THEN 3487 IF(distributed_matrix%MATRIX_FINISHED)
THEN 3488 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3490 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 3491 CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_index,column_index,
VALUE,err,error,*999)
3493 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
3496 CALL flagerror(
"Adding values to an integer PETSc distributed matrix is not implemented.",err,error,*999)
3498 local_error=
"The distributed matrix library type of "// &
3500 CALL flagerror(local_error,err,error,*999)
3503 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
3506 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
3509 exits(
"DISTRIBUTED_MATRIX_VALUES_ADD_INTG1")
3511 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_ADD_INTG1",err,error)
3524 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
3525 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
3526 INTEGER(INTG),
INTENT(IN) :: VALUES(:,:)
3527 INTEGER(INTG),
INTENT(OUT) :: ERR
3532 enters(
"DISTRIBUTED_MATRIX_VALUES_ADD_INTG2",err,error,*999)
3534 IF(
ASSOCIATED(distributed_matrix))
THEN 3535 IF(distributed_matrix%MATRIX_FINISHED)
THEN 3536 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3538 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 3539 CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
3541 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
3544 CALL flagerror(
"Adding values to an integer PETSc distributed matrix is not implemented.",err,error,*999)
3546 local_error=
"The distributed matrix library type of "// &
3548 CALL flagerror(local_error,err,error,*999)
3551 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
3554 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
3557 exits(
"DISTRIBUTED_MATRIX_VALUES_ADD_INTG2")
3559 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_ADD_INTG2",err,error)
3572 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
3573 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
3574 REAL(SP),
INTENT(IN) :: VALUES(:)
3575 INTEGER(INTG),
INTENT(OUT) :: ERR
3580 enters(
"DISTRIBUTED_MATRIX_VALUES_ADD_SP",err,error,*999)
3582 IF(
ASSOCIATED(distributed_matrix))
THEN 3583 IF(distributed_matrix%MATRIX_FINISHED)
THEN 3584 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3586 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 3587 CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
3589 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
3592 CALL flagerror(
"Adding values to a single precision PETSc distributed matrix is not implemented.",err,error,*999)
3594 local_error=
"The distributed matrix library type of "// &
3596 CALL flagerror(local_error,err,error,*999)
3599 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
3602 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
3605 exits(
"DISTRIBUTED_MATRIX_VALUES_ADD_SP")
3607 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_ADD_SP",err,error)
3620 INTEGER(INTG),
INTENT(IN) :: ROW_INDEX
3621 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDEX
3622 REAL(SP),
INTENT(IN) ::
VALUE 3623 INTEGER(INTG),
INTENT(OUT) :: ERR
3628 enters(
"DISTRIBUTED_MATRIX_VALUES_ADD_SP1",err,error,*999)
3630 IF(
ASSOCIATED(distributed_matrix))
THEN 3631 IF(distributed_matrix%MATRIX_FINISHED)
THEN 3632 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3634 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 3635 CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_index,column_index,
VALUE,err,error,*999)
3637 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
3640 CALL flagerror(
"Adding values to a single precision PETSc distributed matrix is not implemented.",err,error,*999)
3642 local_error=
"The distributed matrix library type of "// &
3644 CALL flagerror(local_error,err,error,*999)
3647 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
3650 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
3653 exits(
"DISTRIBUTED_MATRIX_VALUES_ADD_SP1")
3655 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_ADD_SP1",err,error)
3668 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
3669 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
3670 REAL(SP),
INTENT(IN) :: VALUES(:,:)
3671 INTEGER(INTG),
INTENT(OUT) :: ERR
3676 enters(
"DISTRIBUTED_MATRIX_VALUES_ADD_SP2",err,error,*999)
3678 IF(
ASSOCIATED(distributed_matrix))
THEN 3679 IF(distributed_matrix%MATRIX_FINISHED)
THEN 3680 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3682 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 3683 CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
3685 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
3688 CALL flagerror(
"Adding values to a single precision PETSc distributed matrix is not implemented.",err,error,*999)
3690 local_error=
"The distributed matrix library type of "// &
3692 CALL flagerror(local_error,err,error,*999)
3695 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
3698 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
3701 exits(
"DISTRIBUTED_MATRIX_VALUES_ADD_SP2")
3703 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_ADD_SP2",err,error)
3716 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
3717 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
3718 REAL(DP),
INTENT(IN) :: VALUES(:)
3719 INTEGER(INTG),
INTENT(OUT) :: ERR
3725 enters(
"DISTRIBUTED_MATRIX_VALUES_ADD_DP",err,error,*999)
3727 IF(
ASSOCIATED(distributed_matrix))
THEN 3728 IF(distributed_matrix%MATRIX_FINISHED)
THEN 3729 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3731 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 3732 CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
3734 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
3737 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 3738 IF(
SIZE(row_indices,1)==
SIZE(values,1))
THEN 3739 IF(
SIZE(column_indices,1)==
SIZE(values,1))
THEN 3740 IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX)
THEN 3741 DO i=1,
SIZE(row_indices,1)
3743 CALL petsc_matsetvalues(distributed_matrix%PETSC%OVERRIDE_MATRIX,1,distributed_matrix%PETSC% &
3744 & global_row_numbers(row_indices(i:i)),1,column_indices(i:i)-1,values(i:i),petsc_add_values, &
3748 DO i=1,
SIZE(row_indices,1)
3750 CALL petsc_matsetvalues(distributed_matrix%PETSC%MATRIX,1,distributed_matrix%PETSC% &
3751 & global_row_numbers(row_indices(i:i)),1,column_indices(i:i)-1,values(i:i),petsc_add_values, &
3756 local_error=
"The size of the column indices array ("// &
3758 &
") does not conform to the size of the values array ("// &
3760 CALL flagerror(local_error,err,error,*999)
3763 local_error=
"The size of the row indices array ("// &
3765 &
") does not conform to the size of the values array ("// &
3767 CALL flagerror(local_error,err,error,*999)
3770 CALL flagerror(
"The distributed matrix PETSc is not associated.",err,error,*999)
3773 local_error=
"The distributed matrix library type of "// &
3775 CALL flagerror(local_error,err,error,*999)
3778 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
3781 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
3784 exits(
"DISTRIBUTED_MATRIX_VALUES_ADD_DP")
3786 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_ADD_DP",err,error)
3799 INTEGER(INTG),
INTENT(IN) :: ROW_INDEX
3800 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDEX
3801 REAL(DP),
INTENT(IN) ::
VALUE 3802 INTEGER(INTG),
INTENT(OUT) :: ERR
3805 INTEGER(INTG) :: PETSC_COL_INDEX(1)
3806 REAL(DP) :: PETSC_VALUE(1)
3809 enters(
"DISTRIBUTED_MATRIX_VALUES_ADD_DP1",err,error,*999)
3811 IF(
ASSOCIATED(distributed_matrix))
THEN 3812 IF(distributed_matrix%MATRIX_FINISHED)
THEN 3813 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3815 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 3816 CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_index,column_index,
VALUE,err,error,*999)
3818 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
3821 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 3823 petsc_col_index(1)=column_index-1
3824 petsc_value(1)=
VALUE 3825 IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX)
THEN 3826 CALL petsc_matsetvalue(distributed_matrix%PETSC%OVERRIDE_MATRIX,distributed_matrix%PETSC%GLOBAL_ROW_NUMBERS( &
3827 & row_index),column_index-1,
VALUE,petsc_add_values,err,error,*999)
3829 CALL petsc_matsetvalue(distributed_matrix%PETSC%MATRIX,distributed_matrix%PETSC%GLOBAL_ROW_NUMBERS( &
3830 & row_index),column_index-1,
VALUE,petsc_add_values,err,error,*999)
3833 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
3836 local_error=
"The distributed matrix library type of "// &
3838 CALL flagerror(local_error,err,error,*999)
3841 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
3844 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
3847 exits(
"DISTRIBUTED_MATRIX_VALUES_ADD_DP1")
3849 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_ADD_DP1",err,error)
3862 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
3863 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
3864 REAL(DP),
INTENT(IN) :: VALUES(:,:)
3865 INTEGER(INTG),
INTENT(OUT) :: ERR
3868 INTEGER(INTG) :: GLOBAL_ROW_INDICES(size(row_indices)),i
3871 enters(
"DISTRIBUTED_MATRIX_VALUES_ADD_DP2",err,error,*999)
3873 IF(
ASSOCIATED(distributed_matrix))
THEN 3874 IF(distributed_matrix%MATRIX_FINISHED)
THEN 3875 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3877 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 3878 CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
3880 CALL flagerror(
"Distributed matrix CMISS is not associated",err,error,*999)
3883 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 3884 IF(
SIZE(row_indices,1)==
SIZE(values,1))
THEN 3885 IF(
SIZE(column_indices,1)==
SIZE(values,2))
THEN 3886 DO i=1,
SIZE(row_indices,1)
3887 global_row_indices(i)=distributed_matrix%PETSC%GLOBAL_ROW_NUMBERS(row_indices(i))
3889 IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX)
THEN 3890 CALL petsc_matsetvalues(distributed_matrix%PETSC%OVERRIDE_MATRIX,
SIZE(row_indices,1),global_row_indices, &
3891 &
SIZE(column_indices,1),column_indices-1,values,petsc_add_values,err,error,*999)
3893 CALL petsc_matsetvalues(distributed_matrix%PETSC%MATRIX,
SIZE(row_indices,1),global_row_indices, &
3894 &
SIZE(column_indices,1),column_indices-1,values,petsc_add_values,err,error,*999)
3897 local_error=
"The size of the column indices array ("// &
3899 &
") does not conform to the number of columns in the values array ("// &
3901 CALL flagerror(local_error,err,error,*999)
3904 local_error=
"The size of the row indices array ("// &
3906 &
") does not conform to the number of rows in the values array ("// &
3908 CALL flagerror(local_error,err,error,*999)
3911 CALL flagerror(
"The distributed matrix PETSc is not associated.",err,error,*999)
3914 local_error=
"The distributed matrix library type of "// &
3916 CALL flagerror(local_error,err,error,*999)
3919 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
3922 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
3925 exits(
"DISTRIBUTED_MATRIX_VALUES_ADD_DP2")
3927 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_ADD_DP2",err,error)
3940 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
3941 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
3942 LOGICAL,
INTENT(IN) :: VALUES(:)
3943 INTEGER(INTG),
INTENT(OUT) :: ERR
3948 enters(
"DISTRIBUTED_MATRIX_VALUES_ADD_L",err,error,*999)
3950 IF(
ASSOCIATED(distributed_matrix))
THEN 3951 IF(distributed_matrix%MATRIX_FINISHED)
THEN 3952 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3954 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 3955 CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
3957 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
3960 CALL flagerror(
"Adding values to a logical PETSc distributed matrix is not implemented.",err,error,*999)
3962 local_error=
"The distributed matrix library type of "// &
3964 CALL flagerror(local_error,err,error,*999)
3967 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
3970 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
3973 exits(
"DISTRIBUTED_MATRIX_VALUES_ADD_L")
3975 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_ADD_L",err,error)
3988 INTEGER(INTG),
INTENT(IN) :: ROW_INDEX
3989 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDEX
3990 LOGICAL,
INTENT(IN) ::
VALUE 3991 INTEGER(INTG),
INTENT(OUT) :: ERR
3996 enters(
"DISTRIBUTED_MATRIX_VALUES_ADD_L1",err,error,*999)
3998 IF(
ASSOCIATED(distributed_matrix))
THEN 3999 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4000 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4002 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4003 CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_index,column_index,
VALUE,err,error,*999)
4005 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4008 CALL flagerror(
"Adding values to a logical PETSc distributed matrix is not implemented.",err,error,*999)
4010 local_error=
"The distributed matrix library type of "// &
4012 CALL flagerror(local_error,err,error,*999)
4015 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4018 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4021 exits(
"DISTRIBUTED_MATRIX_VALUES_ADD_L1")
4023 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_ADD_L1",err,error)
4036 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
4037 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
4038 LOGICAL,
INTENT(IN) :: VALUES(:,:)
4039 INTEGER(INTG),
INTENT(OUT) :: ERR
4044 enters(
"DISTRIBUTED_MATRIX_VALUES_ADD_L2",err,error,*999)
4046 IF(
ASSOCIATED(distributed_matrix))
THEN 4047 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4048 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4050 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4051 CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4053 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4056 CALL flagerror(
"Adding values to a logical PETSc distributed matrix is not implemented.",err,error,*999)
4058 local_error=
"The distributed matrix library type of "// &
4060 CALL flagerror(local_error,err,error,*999)
4063 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4066 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4069 exits(
"DISTRIBUTED_MATRIX_VALUES_ADD_L2")
4071 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_ADD_L2",err,error)
4084 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
4085 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
4086 INTEGER(INTG),
INTENT(OUT) :: VALUES(:)
4087 INTEGER(INTG),
INTENT(OUT) :: ERR
4092 enters(
"DISTRIBUTED_MATRIX_VALUES_GET_INTG",err,error,*999)
4094 IF(
ASSOCIATED(distributed_matrix))
THEN 4095 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4096 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4098 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4099 CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4101 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4104 CALL flagerror(
"Cannot get values for an integer PETSc distributed matrix.",err,error,*999)
4106 local_error=
"The distributed matrix library type of "// &
4108 CALL flagerror(local_error,err,error,*999)
4111 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4114 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4117 exits(
"DISTRIBUTED_MATRIX_VALUES_GET_INTG")
4119 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_GET_INTG",err,error)
4132 INTEGER(INTG),
INTENT(IN) :: ROW_INDEX
4133 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDEX
4134 INTEGER(INTG),
INTENT(OUT) ::
VALUE 4135 INTEGER(INTG),
INTENT(OUT) :: ERR
4140 enters(
"DISTRIBUTED_MATRIX_VALUES_GET_INTG1",err,error,*999)
4142 IF(
ASSOCIATED(distributed_matrix))
THEN 4143 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4144 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4146 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4147 CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_index,column_index,
VALUE,err,error,*999)
4149 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4152 CALL flagerror(
"Cannot get values for an integer PETSc distributed matrix.",err,error,*999)
4154 local_error=
"The distributed matrix library type of "// &
4156 CALL flagerror(local_error,err,error,*999)
4159 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4162 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4165 exits(
"DISTRIBUTED_MATRIX_VALUES_GET_INTG1")
4167 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_GET_INTG1",err,error)
4180 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
4181 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
4182 INTEGER(INTG),
INTENT(OUT) :: VALUES(:,:)
4183 INTEGER(INTG),
INTENT(OUT) :: ERR
4188 enters(
"DISTRIBUTED_MATRIX_VALUES_GET_INTG2",err,error,*999)
4190 IF(
ASSOCIATED(distributed_matrix))
THEN 4191 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4192 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4194 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4195 CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4197 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4200 CALL flagerror(
"Cannot get values for an integer PETSc distributed matrix.",err,error,*999)
4202 local_error=
"The distributed matrix library type of "// &
4204 CALL flagerror(local_error,err,error,*999)
4207 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4210 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4213 exits(
"DISTRIBUTED_MATRIX_VALUES_GET_INTG2")
4215 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_GET_INTG2",err,error)
4228 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
4229 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
4230 REAL(SP),
INTENT(OUT) :: VALUES(:)
4231 INTEGER(INTG),
INTENT(OUT) :: ERR
4236 enters(
"DISTRIBUTED_MATRIX_VALUES_GET_SP",err,error,*999)
4238 IF(
ASSOCIATED(distributed_matrix))
THEN 4239 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4240 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4242 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4243 CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4245 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4248 CALL flagerror(
"Cannot get values for a single precision PETSc distributed matrix.",err,error,*999)
4250 local_error=
"The distributed matrix library type of "// &
4252 CALL flagerror(local_error,err,error,*999)
4255 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4258 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4261 exits(
"DISTRIBUTED_MATRIX_VALUES_GET_SP")
4263 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_GET_SP",err,error)
4276 INTEGER(INTG),
INTENT(IN) :: ROW_INDEX
4277 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDEX
4278 REAL(SP),
INTENT(OUT) ::
VALUE 4279 INTEGER(INTG),
INTENT(OUT) :: ERR
4284 enters(
"DISTRIBUTED_MATRIX_VALUES_GET_SP1",err,error,*999)
4286 IF(
ASSOCIATED(distributed_matrix))
THEN 4287 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4288 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4290 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4291 CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_index,column_index,
VALUE,err,error,*999)
4293 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4296 CALL flagerror(
"Cannot get values for a single precision PETSc distributed matrix.",err,error,*999)
4298 local_error=
"The distributed matrix library type of "// &
4300 CALL flagerror(local_error,err,error,*999)
4303 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4306 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4309 exits(
"DISTRIBUTED_MATRIX_VALUES_GET_SP1")
4311 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_GET_SP1",err,error)
4324 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
4325 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
4326 REAL(SP),
INTENT(OUT) :: VALUES(:,:)
4327 INTEGER(INTG),
INTENT(OUT) :: ERR
4332 enters(
"DISTRIBUTED_MATRIX_VALUES_GET_SP2",err,error,*999)
4334 IF(
ASSOCIATED(distributed_matrix))
THEN 4335 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4336 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4338 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4339 CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4341 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4344 CALL flagerror(
"Cannot get values for a single precision PETSc distributed matrix.",err,error,*999)
4346 local_error=
"The distributed matrix library type of "// &
4348 CALL flagerror(local_error,err,error,*999)
4351 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4354 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4357 exits(
"DISTRIBUTED_MATRIX_VALUES_GET_SP2")
4359 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_GET_SP2",err,error)
4372 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
4373 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
4374 REAL(DP),
INTENT(OUT) :: VALUES(:)
4375 INTEGER(INTG),
INTENT(OUT) :: ERR
4381 enters(
"DISTRIBUTED_MATRIX_VALUES_GET_DP",err,error,*999)
4383 IF(
ASSOCIATED(distributed_matrix))
THEN 4384 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4385 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4387 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4388 CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4390 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4393 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 4394 IF(
SIZE(row_indices,1)==
SIZE(values,1))
THEN 4395 IF(
SIZE(column_indices,1)==
SIZE(values,1))
THEN 4396 IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX)
THEN 4397 DO i=1,
SIZE(row_indices,1)
4398 CALL petsc_matgetvalues(distributed_matrix%PETSC%OVERRIDE_MATRIX,1,distributed_matrix%PETSC% &
4399 & global_row_numbers(row_indices(i:i)),1,column_indices(i:i)-1,values(i:i), &
4403 DO i=1,
SIZE(row_indices,1)
4404 CALL petsc_matgetvalues(distributed_matrix%PETSC%MATRIX,1,distributed_matrix%PETSC%GLOBAL_ROW_NUMBERS( &
4405 & row_indices(i:i)),1,column_indices(i:i)-1,values(i:i),err,error,*999)
4409 local_error=
"The size of the column indices array ("// &
4411 &
") does not conform to the size of the values array ("// &
4413 CALL flagerror(local_error,err,error,*999)
4416 local_error=
"The size of the row indices array ("// &
4418 &
") does not conform to the size of the values array ("// &
4420 CALL flagerror(local_error,err,error,*999)
4423 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
4426 local_error=
"The distributed matrix library type of "// &
4428 CALL flagerror(local_error,err,error,*999)
4431 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4434 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4437 exits(
"DISTRIBUTED_MATRIX_VALUES_GET_DP")
4439 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_GET_DP",err,error)
4452 INTEGER(INTG),
INTENT(IN) :: ROW_INDEX
4453 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDEX
4454 REAL(DP),
INTENT(OUT) ::
VALUE 4455 INTEGER(INTG),
INTENT(OUT) :: ERR
4458 INTEGER(INTG) :: COLUMN_INDICES(1)
4459 REAL(DP) :: VALUES(1)
4462 enters(
"DISTRIBUTED_MATRIX_VALUES_GET_DP1",err,error,*999)
4464 IF(
ASSOCIATED(distributed_matrix))
THEN 4465 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4466 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4468 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4469 CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_index,column_index,
VALUE,err,error,*999)
4471 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4474 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 4475 column_indices(1)=column_index-1
4476 IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX)
THEN 4477 CALL petsc_matgetvalues(distributed_matrix%PETSC%OVERRIDE_MATRIX,1,distributed_matrix%PETSC% &
4478 & global_row_numbers(row_index),1,column_indices,values,err,error,*999)
4480 CALL petsc_matgetvalues(distributed_matrix%PETSC%MATRIX,1,distributed_matrix%PETSC%GLOBAL_ROW_NUMBERS(row_index), &
4481 & 1,column_indices,values,err,error,*999)
4485 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
4488 local_error=
"The distributed matrix library type of "// &
4490 CALL flagerror(local_error,err,error,*999)
4493 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4496 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4499 exits(
"DISTRIBUTED_MATRIX_VALUES_GET_DP1")
4501 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_GET_DP1",err,error)
4515 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
4516 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
4517 REAL(DP),
INTENT(OUT) :: VALUES(:,:)
4518 INTEGER(INTG),
INTENT(OUT) :: ERR
4521 INTEGER(INTG) :: GLOBAL_ROW_INDICES(size(row_indices,1)),i
4524 enters(
"DISTRIBUTED_MATRIX_VALUES_GET_DP2",err,error,*999)
4526 IF(
ASSOCIATED(distributed_matrix))
THEN 4527 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4528 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4530 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4531 CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4533 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4536 CALL flagerror(
"Cannot get values for an integer precision PETSc distributed matrix.",err,error,*999)
4537 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 4538 IF(
SIZE(row_indices,1)==
SIZE(values,1))
THEN 4539 IF(
SIZE(column_indices,1)==
SIZE(values,2))
THEN 4540 DO i=1,
SIZE(row_indices,1)
4541 global_row_indices(i)=distributed_matrix%PETSC%GLOBAL_ROW_NUMBERS(row_indices(i))
4543 IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX)
THEN 4544 CALL petsc_matgetvalues(distributed_matrix%PETSC%OVERRIDE_MATRIX,
SIZE(row_indices,1),global_row_indices, &
4545 &
SIZE(column_indices,1),column_indices-1,values,err,error,*999)
4547 CALL petsc_matgetvalues(distributed_matrix%PETSC%MATRIX,
SIZE(row_indices,1),global_row_indices, &
4548 &
SIZE(column_indices,1),column_indices-1,values,err,error,*999)
4551 local_error=
"The size of the column indices array ("// &
4553 &
") does not conform to the number of columns in the values array ("// &
4555 CALL flagerror(local_error,err,error,*999)
4558 local_error=
"The size of the row indices array ("// &
4560 &
") does not conform to the number of rows in the values array ("// &
4562 CALL flagerror(local_error,err,error,*999)
4565 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
4568 local_error=
"The distributed matrix library type of "// &
4570 CALL flagerror(local_error,err,error,*999)
4573 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4576 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4579 exits(
"DISTRIBUTED_MATRIX_VALUES_GET_DP2")
4581 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_GET_DP2",err,error)
4594 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
4595 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
4596 LOGICAL,
INTENT(OUT) :: VALUES(:)
4597 INTEGER(INTG),
INTENT(OUT) :: ERR
4602 enters(
"DISTRIBUTED_MATRIX_VALUES_GET_L",err,error,*999)
4604 IF(
ASSOCIATED(distributed_matrix))
THEN 4605 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4606 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4608 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4609 CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4611 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4614 CALL flagerror(
"Cannot get values for a logical PETSc distributed matrix.",err,error,*999)
4616 local_error=
"The distributed matrix library type of "// &
4618 CALL flagerror(local_error,err,error,*999)
4621 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4624 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4627 exits(
"DISTRIBUTED_MATRIX_VALUES_GET_L")
4629 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_GET_L",err,error)
4642 INTEGER(INTG),
INTENT(IN) :: ROW_INDEX
4643 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDEX
4644 LOGICAL,
INTENT(OUT) ::
VALUE 4645 INTEGER(INTG),
INTENT(OUT) :: ERR
4650 enters(
"DISTRIBUTED_MATRIX_VALUES_GET_L1",err,error,*999)
4652 IF(
ASSOCIATED(distributed_matrix))
THEN 4653 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4654 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4656 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4657 CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_index,column_index,
VALUE,err,error,*999)
4659 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4662 CALL flagerror(
"Cannot get values for a logical PETSc distributed matrix.",err,error,*999)
4664 local_error=
"The distributed matrix library type of "// &
4666 CALL flagerror(local_error,err,error,*999)
4669 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4672 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4675 exits(
"DISTRIBUTED_MATRIX_VALUES_GET_L1")
4677 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_GET_L1",err,error)
4690 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
4691 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
4692 LOGICAL,
INTENT(OUT) :: VALUES(:,:)
4693 INTEGER(INTG),
INTENT(OUT) :: ERR
4698 enters(
"DISTRIBUTED_MATRIX_VALUES_GET_L2",err,error,*999)
4700 IF(
ASSOCIATED(distributed_matrix))
THEN 4701 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4702 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4704 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4705 CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4707 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4710 CALL flagerror(
"Cannot get values for a logical PETSc distributed matrix.",err,error,*999)
4712 local_error=
"The distributed matrix library type of "// &
4714 CALL flagerror(local_error,err,error,*999)
4717 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4720 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4723 exits(
"DISTRIBUTED_MATRIX_VALUES_GET_L2")
4725 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_GET_L2",err,error)
4738 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
4739 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
4740 INTEGER(INTG),
INTENT(IN) :: VALUES(:)
4741 INTEGER(INTG),
INTENT(OUT) :: ERR
4746 enters(
"DISTRIBUTED_MATRIX_VALUES_SET_INTG",err,error,*999)
4748 IF(
ASSOCIATED(distributed_matrix))
THEN 4749 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4750 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4752 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4753 CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4755 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4758 CALL flagerror(
"Cannot get values for an integer PETSc distributed matrix.",err,error,*999)
4760 local_error=
"The distributed matrix library type of "// &
4762 CALL flagerror(local_error,err,error,*999)
4765 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4768 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4771 exits(
"DISTRIBUTED_MATRIX_VALUES_SET_INTG")
4773 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_SET_INTG",err,error)
4786 INTEGER(INTG),
INTENT(IN) :: ROW_INDEX
4787 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDEX
4788 INTEGER(INTG),
INTENT(IN) ::
VALUE 4789 INTEGER(INTG),
INTENT(OUT) :: ERR
4794 enters(
"DISTRIBUTED_MATRIX_VALUES_SET_INTG1",err,error,*999)
4796 IF(
ASSOCIATED(distributed_matrix))
THEN 4797 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4798 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4800 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4801 CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_index,column_index,
VALUE,err,error,*999)
4803 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4806 CALL flagerror(
"Cannot get values for an integer PETSc distributed matrix.",err,error,*999)
4808 local_error=
"The distributed matrix library type of "// &
4810 CALL flagerror(local_error,err,error,*999)
4813 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4816 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4819 exits(
"DISTRIBUTED_MATRIX_VALUES_SET_INTG1")
4821 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_SET_INTG1",err,error)
4834 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
4835 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
4836 INTEGER(INTG),
INTENT(IN) :: VALUES(:,:)
4837 INTEGER(INTG),
INTENT(OUT) :: ERR
4842 enters(
"DISTRIBUTED_MATRIX_VALUES_SET_INTG2",err,error,*999)
4844 IF(
ASSOCIATED(distributed_matrix))
THEN 4845 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4846 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4848 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4849 CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4851 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4854 CALL flagerror(
"Cannot get values for an integer PETSc distributed matrix.",err,error,*999)
4856 local_error=
"The distributed matrix library type of "// &
4858 CALL flagerror(local_error,err,error,*999)
4861 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4864 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4867 exits(
"DISTRIBUTED_MATRIX_VALUES_SET_INTG2")
4869 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_SET_INTG2",err,error)
4882 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
4883 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
4884 REAL(SP),
INTENT(IN) :: VALUES(:)
4885 INTEGER(INTG),
INTENT(OUT) :: ERR
4890 enters(
"DISTRIBUTED_MATRIX_VALUES_SET_SP",err,error,*999)
4892 IF(
ASSOCIATED(distributed_matrix))
THEN 4893 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4894 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4896 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4897 CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4899 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4902 CALL flagerror(
"Cannot get values for a single precision PETSc distributed matrix.",err,error,*999)
4904 local_error=
"The distributed matrix library type of "// &
4906 CALL flagerror(local_error,err,error,*999)
4909 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4912 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4915 exits(
"DISTRIBUTED_MATRIX_VALUES_SET_SP")
4917 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_SET_SP",err,error)
4930 INTEGER(INTG),
INTENT(IN) :: ROW_INDEX
4931 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDEX
4932 REAL(SP),
INTENT(IN) ::
VALUE 4933 INTEGER(INTG),
INTENT(OUT) :: ERR
4938 enters(
"DISTRIBUTED_MATRIX_VALUES_SET_SP1",err,error,*999)
4940 IF(
ASSOCIATED(distributed_matrix))
THEN 4941 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4942 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4944 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4945 CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_index,column_index,
VALUE,err,error,*999)
4947 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4950 CALL flagerror(
"Cannot get values for a single precision PETSc distributed matrix.",err,error,*999)
4952 local_error=
"The distributed matrix library type of "// &
4954 CALL flagerror(local_error,err,error,*999)
4957 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4960 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4963 exits(
"DISTRIBUTED_MATRIX_VALUES_SET_SP1")
4965 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_SET_SP1",err,error)
4978 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
4979 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
4980 REAL(SP),
INTENT(IN) :: VALUES(:,:)
4981 INTEGER(INTG),
INTENT(OUT) :: ERR
4986 enters(
"DISTRIBUTED_MATRIX_VALUES_SET_SP2",err,error,*999)
4988 IF(
ASSOCIATED(distributed_matrix))
THEN 4989 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4990 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4992 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4993 CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4995 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4998 CALL flagerror(
"Cannot get values for a single precision PETSc distributed matrix.",err,error,*999)
5000 local_error=
"The distributed matrix library type of "// &
5002 CALL flagerror(local_error,err,error,*999)
5005 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
5008 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
5011 exits(
"DISTRIBUTED_MATRIX_VALUES_SET_SP2")
5013 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_SET_SP2",err,error)
5026 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
5027 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
5028 REAL(DP),
INTENT(IN) :: VALUES(:)
5029 INTEGER(INTG),
INTENT(OUT) :: ERR
5035 enters(
"DISTRIBUTED_MATRIX_VALUES_SET_DP",err,error,*999)
5037 IF(
ASSOCIATED(distributed_matrix))
THEN 5038 IF(distributed_matrix%MATRIX_FINISHED)
THEN 5039 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
5041 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 5042 CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
5044 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
5047 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 5048 IF(
SIZE(row_indices,1)==
SIZE(values,1))
THEN 5049 IF(
SIZE(column_indices,1)==
SIZE(values,1))
THEN 5050 IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX)
THEN 5051 DO i=1,
SIZE(row_indices,1)
5052 CALL petsc_matsetvalues(distributed_matrix%PETSC%OVERRIDE_MATRIX,1,distributed_matrix%PETSC% &
5053 & global_row_numbers(row_indices(i:i)),1,column_indices(i:i)-1,values(i:i),petsc_insert_values, &
5057 DO i=1,
SIZE(row_indices,1)
5058 CALL petsc_matsetvalues(distributed_matrix%PETSC%MATRIX,1,distributed_matrix%PETSC%GLOBAL_ROW_NUMBERS( &
5059 & row_indices(i:i)),1,column_indices(i:i)-1,values(i:i),petsc_insert_values,err,error,*999)
5063 local_error=
"The size of the column indices array ("// &
5065 &
") does not conform to the size of the values array ("// &
5067 CALL flagerror(local_error,err,error,*999)
5070 local_error=
"The size of the row indices array ("// &
5072 &
") does not conform to the size of the values array ("// &
5074 CALL flagerror(local_error,err,error,*999)
5077 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
5080 local_error=
"The distributed matrix library type of "// &
5082 CALL flagerror(local_error,err,error,*999)
5085 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
5088 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
5091 exits(
"DISTRIBUTED_MATRIX_VALUES_SET_DP")
5093 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_SET_DP",err,error)