72 #include "petscversion.h" 697 INTEGER(INTG),
INTENT(IN) ::
VALUE 698 INTEGER(INTG),
INTENT(OUT) :: ERR
703 enters(
"DISTRIBUTED_MATRIX_ALL_VALUES_SET_INTG",err,error,*999)
705 IF(
ASSOCIATED(distributed_matrix))
THEN 706 IF(distributed_matrix%MATRIX_FINISHED)
THEN 707 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
709 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 712 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
715 CALL flagerror(
"Cannot set all values for an integer PETSc distributed matrix.",err,error,*999)
717 local_error=
"The distributed matrix library type of "// &
719 CALL flagerror(local_error,err,error,*999)
722 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
725 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
728 exits(
"DISTRIBUTED_MATRIX_ALL_VALUES_SET_INTG")
730 999 errorsexits(
"DISTRIBUTED_MATRIX_ALL_VALUES_SET_INTG",err,error)
743 REAL(SP),
INTENT(IN) ::
VALUE 744 INTEGER(INTG),
INTENT(OUT) :: ERR
749 enters(
"DISTRIBUTED_MATRIX_ALL_VALUES_SET_SP",err,error,*999)
751 IF(
ASSOCIATED(distributed_matrix))
THEN 752 IF(distributed_matrix%MATRIX_FINISHED)
THEN 753 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
755 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 758 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
761 CALL flagerror(
"Cannot set all values for a single precision PETSc distributed matrix.",err,error,*999)
763 local_error=
"The distributed matrix library type of "// &
765 CALL flagerror(local_error,err,error,*999)
768 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
771 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
774 exits(
"DISTRIBUTED_MATRIX_ALL_VALUES_SET_SP")
776 999 errorsexits(
"DISTRIBUTED_MATRIX_ALL_VALUES_SET_SP",err,error)
789 REAL(DP),
INTENT(IN) ::
VALUE 790 INTEGER(INTG),
INTENT(OUT) :: ERR
795 enters(
"DISTRIBUTED_MATRIX_ALL_VALUES_SET_DP",err,error,*999)
797 IF(
ASSOCIATED(distributed_matrix))
THEN 798 IF(distributed_matrix%MATRIX_FINISHED)
THEN 799 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
801 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 804 CALL flagerror(
"Distributed matrix cmiss is not associated.",err,error,*999)
807 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 808 IF(abs(
VALUE)<=zero_tolerance)
THEN 809 IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX)
THEN 815 CALL flagerror(
"Not implemented.",err,error,*999)
818 CALL flagerror(
"Distributed matrix petsc is not associated.",err,error,*999)
821 local_error=
"The distributed matrix library type of "// &
823 CALL flagerror(local_error,err,error,*999)
826 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
829 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
832 exits(
"DISTRIBUTED_MATRIX_ALL_VALUES_SET_DP")
834 999 errorsexits(
"DISTRIBUTED_MATRIX_ALL_VALUES_SET_DP",err,error)
847 LOGICAL,
INTENT(IN) ::
VALUE 848 INTEGER(INTG),
INTENT(OUT) :: ERR
853 enters(
"DISTRIBUTED_MATRIX_ALL_VALUES_SET_L",err,error,*999)
855 IF(
ASSOCIATED(distributed_matrix))
THEN 856 IF(distributed_matrix%MATRIX_FINISHED)
THEN 857 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
859 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 862 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
865 CALL flagerror(
"Cannot set all values for a logical PETSc distributed matrix.",err,error,*999)
867 local_error=
"The distributed matrix library type of "// &
869 CALL flagerror(local_error,err,error,*999)
872 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
875 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
878 exits(
"DISTRIBUTED_MATRIX_ALL_VALUES_SET_L")
880 999 errorsexits(
"DISTRIBUTED_MATRIX_ALL_VALUES_SET_L",err,error)
893 INTEGER(INTG),
INTENT(OUT) :: ERR
897 INTEGER(INTG) :: DUMMY_ERR
900 enters(
"DISTRIBUTED_MATRIX_CMISS_CREATE_FINISH",err,error,*998)
902 IF(
ASSOCIATED(cmiss_matrix))
THEN 904 domain_mapping=>cmiss_matrix%DISTRIBUTED_MATRIX%ROW_DOMAIN_MAPPING
905 IF(
ASSOCIATED(domain_mapping))
THEN 906 IF(domain_mapping%NUMBER_OF_DOMAINS==1)
THEN 910 & domain_mapping%ADJACENT_DOMAINS_PTR(domain_mapping%NUMBER_OF_DOMAINS)
914 CALL flagerror(
"Distributed matrix row domain mapping is not associated.",err,error,*998)
917 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*998)
920 exits(
"DISTRIBUTED_MATRIX_CMISS_CREATE_FINISH")
923 998 errorsexits(
"DISTRIBUTED_MATRIX_CMISS_CREATE_FINISH",err,error)
936 INTEGER(INTG),
INTENT(OUT) :: ERR
940 enters(
"DISTRIBUTED_MATRIX_CMISS_FINALISE",err,error,*999)
942 IF(
ASSOCIATED(cmiss_matrix))
THEN 944 DEALLOCATE(cmiss_matrix)
947 exits(
"DISTRIBUTED_MATRIX_CMISS_FINALSE")
949 999 errorsexits(
"DISTRIBUTED_MATRIX_CMISS_FINALISE",err,error)
962 INTEGER(INTG),
INTENT(OUT) :: ERR
965 INTEGER(INTG) :: DUMMY_ERR
969 enters(
"DISTRIBUTED_MATRIX_CMISS_INITIALISE",err,error,*998)
971 IF(
ASSOCIATED(distributed_matrix))
THEN 972 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 973 CALL flagerror(
"CMISS is already associated for this distributed matrix.",err,error,*998)
975 row_domain_mapping=>distributed_matrix%ROW_DOMAIN_MAPPING
976 column_domain_mapping=>distributed_matrix%COLUMN_DOMAIN_MAPPING
977 IF(
ASSOCIATED(row_domain_mapping))
THEN 978 IF(
ASSOCIATED(column_domain_mapping))
THEN 979 ALLOCATE(distributed_matrix%CMISS,stat=err)
980 IF(err/=0)
CALL flagerror(
"Could not allocate CMISS distributed matrix.",err,error,*999)
981 distributed_matrix%CMISS%DISTRIBUTED_MATRIX=>distributed_matrix
983 NULLIFY(distributed_matrix%CMISS%MATRIX)
988 SELECT CASE(distributed_matrix%GHOSTING_TYPE)
990 CALL matrix_size_set(distributed_matrix%CMISS%MATRIX,row_domain_mapping%TOTAL_NUMBER_OF_LOCAL, &
991 & column_domain_mapping%NUMBER_OF_GLOBAL,err,error,*999)
993 CALL matrix_size_set(distributed_matrix%CMISS%MATRIX,row_domain_mapping%NUMBER_OF_LOCAL, &
994 & column_domain_mapping%NUMBER_OF_GLOBAL,err,error,*999)
996 local_error=
"The distributed matrix ghosting type of "// &
998 CALL flagerror(local_error,err,error,*999)
1001 CALL flagerror(
"Distributed matrix column domain mapping is not associated.",err,error,*998)
1004 CALL flagerror(
"Distributed matrix row domain mapping is not associated.",err,error,*998)
1008 CALL flagerror(
"Distributed matrix is not associated.",err,error,*998)
1011 exits(
"DISTRIBUTED_MATRIX_CMISS_INITIALSE")
1013 999
IF(
ASSOCIATED(distributed_matrix%CMISS)) &
1015 998 errorsexits(
"DISTRIBUTED_MATRIX_CMISS_INITIALISE",err,error)
1028 INTEGER(INTG),
INTENT(OUT) :: ERR
1031 INTEGER(INTG) :: DUMMY_ERR
1034 enters(
"DISTRIBUTED_MATRIX_CREATE_FINISH",err,error,*998)
1036 IF(
ASSOCIATED(distributed_matrix))
THEN 1037 IF(distributed_matrix%MATRIX_FINISHED)
THEN 1038 CALL flagerror(
"The distributed matrix has been finished.",err,error,*998)
1040 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1046 local_error=
"The distributed matrix library type of "// &
1048 CALL flagerror(local_error,err,error,*999)
1050 distributed_matrix%MATRIX_FINISHED=.true.
1053 CALL flagerror(
"Distributed matrix is not associated.",err,error,*998)
1056 exits(
"DISTRIBUTED_MATRIX_CREATE_FINISH")
1059 998 errorsexits(
"DISTRIBUTED_MATRIX_CREATE_FINISH",err,error)
1074 INTEGER(INTG),
INTENT(OUT) :: ERR
1077 INTEGER(INTG) :: DUMMY_ERR
1080 enters(
"DISTRIBUTED_MATRIX_CREATE_START",err,error,*998)
1082 IF(
ASSOCIATED(row_domain_mapping))
THEN 1083 IF(
ASSOCIATED(column_domain_mapping))
THEN 1084 IF(
ASSOCIATED(distributed_matrix))
THEN 1085 CALL flagerror(
"Distributed matrix is already associated.",err,error,*998)
1087 IF(row_domain_mapping%NUMBER_OF_DOMAINS==column_domain_mapping%NUMBER_OF_DOMAINS)
THEN 1091 local_error=
"The number of domains in the row domain mapping ("// &
1093 &
") does not match the number of domains in the column domain mapping ("// &
1095 CALL flagerror(local_error,err,error,*999)
1099 CALL flagerror(
"Column domain mapping is not associated.",err,error,*999)
1102 CALL flagerror(
"Row domain mapping is not associated.",err,error,*998)
1105 exits(
"DISTRIBUTED_MATRIX_CREATE_START")
1108 998 errorsexits(
"DISTRIBUTED_MATRIX_CREATE_START",err,error)
1121 INTEGER(INTG),
POINTER :: DATA(:)
1122 INTEGER(INTG),
INTENT(OUT) :: ERR
1127 enters(
"DISTRIBUTED_MATRIX_DATA_GET_INTG",err,error,*999)
1129 IF(
ASSOCIATED(distributed_matrix))
THEN 1130 IF(
ASSOCIATED(data))
THEN 1131 CALL flagerror(
"Data is already associated",err,error,*999)
1134 IF(distributed_matrix%MATRIX_FINISHED)
THEN 1135 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1137 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 1138 CALL matrix_data_get(distributed_matrix%CMISS%MATRIX,
DATA,err,error,*999)
1140 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
1143 CALL flagerror(
"Cannot get data for an integer PETSc distributed matrix.",err,error,*999)
1145 local_error=
"The distributed matrix library type of "// &
1147 CALL flagerror(local_error,err,error,*999)
1150 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
1154 CALL flagerror(
"Distributed matrix is not associated",err,error,*999)
1157 exits(
"DISTRIBUTED_MATRIX_DATA_GET_INTG")
1159 999 errorsexits(
"DISTRIBUTED_MATRIX_DATA_GET_INTG",err,error)
1172 REAL(SP),
POINTER :: DATA(:)
1173 INTEGER(INTG),
INTENT(OUT) :: ERR
1178 enters(
"DISTRIBUTED_MATRIX_DATA_GET_SP",err,error,*999)
1180 IF(
ASSOCIATED(distributed_matrix))
THEN 1181 IF(
ASSOCIATED(data))
THEN 1182 CALL flagerror(
"Data is already associated.",err,error,*999)
1185 IF(distributed_matrix%MATRIX_FINISHED)
THEN 1186 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1188 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 1189 CALL matrix_data_get(distributed_matrix%CMISS%MATRIX,
DATA,err,error,*999)
1191 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
1194 CALL flagerror(
"Cannot get data for a single precision PETSc distributed matrix.",err,error,*999)
1196 local_error=
"The distributed matrix library type of "// &
1198 CALL flagerror(local_error,err,error,*999)
1201 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
1205 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
1208 exits(
"DISTRIBUTED_MATRIX_DATA_GET_SP")
1210 999 errorsexits(
"DISTRIBUTED_MATRIX_DATA_GET_SP",err,error)
1223 REAL(DP),
POINTER :: DATA(:)
1224 INTEGER(INTG),
INTENT(OUT) :: ERR
1227 REAL(DP),
POINTER :: petscData(:,:)
1229 TYPE(c_ptr) :: TEMP_ME
1231 enters(
"DISTRIBUTED_MATRIX_DATA_GET_DP",err,error,*999)
1233 IF(
ASSOCIATED(distributed_matrix))
THEN 1234 IF(
ASSOCIATED(data))
THEN 1235 CALL flagerror(
"Data is already associated.",err,error,*999)
1238 IF(distributed_matrix%MATRIX_FINISHED)
THEN 1239 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1241 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 1242 CALL matrix_data_get(distributed_matrix%CMISS%MATRIX,
DATA,err,error,*999)
1244 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
1247 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 1248 IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX)
THEN 1249 SELECT CASE(distributed_matrix%PETSC%STORAGE_TYPE)
1253 CALL flagerror(
"Diagonal storage is not implemented for PETSc matrices.",err,error,*999)
1255 CALL flagerror(
"Column major storage is not implemented for PETSc matrices.",err,error,*999)
1257 CALL flagerror(
"Row major storage is not implemented for PETSc matrices.",err,error,*999)
1261 CALL flagerror(
"Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
1263 CALL flagerror(
"Row column storage is not implemented for PETSc matrices.",err,error,*999)
1266 & distributed_matrix%PETSC%STORAGE_TYPE,
"*",err,error))//
" is invalid." 1267 CALL flagerror(local_error,err,error,*999)
1270 SELECT CASE(distributed_matrix%PETSC%STORAGE_TYPE)
1274 CALL flagerror(
"Diagonal storage is not implemented for PETSc matrices.",err,error,*999)
1276 CALL flagerror(
"Column major storage is not implemented for PETSc matrices.",err,error,*999)
1278 CALL flagerror(
"Row major storage is not implemented for PETSc matrices.",err,error,*999)
1282 CALL flagerror(
"Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
1284 CALL flagerror(
"Row column storage is not implemented for PETSc matrices.",err,error,*999)
1287 & distributed_matrix%PETSC%STORAGE_TYPE,
"*",err,error))//
" is invalid." 1288 CALL flagerror(local_error,err,error,*999)
1293 SELECT CASE(distributed_matrix%PETSC%STORAGE_TYPE)
1295 temp_me = c_loc(petscdata(1,1))
1296 CALL c_f_pointer(temp_me,
DATA,[distributed_matrix%PETSC%M*distributed_matrix%PETSC%N])
1298 CALL flagerror(
"Diagonal storage is not implemented for PETSc matrices.",err,error,*999)
1300 CALL flagerror(
"Column major storage is not implemented for PETSc matrices.",err,error,*999)
1302 CALL flagerror(
"Row major storage is not implemented for PETSc matrices.",err,error,*999)
1307 temp_me = c_loc(petscdata(1,1))
1308 CALL c_f_pointer(temp_me,
DATA,[distributed_matrix%PETSC%NUMBER_NON_ZEROS])
1310 CALL flagerror(
"Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
1312 CALL flagerror(
"Row column storage is not implemented for PETSc matrices.",err,error,*999)
1315 & distributed_matrix%PETSC%STORAGE_TYPE,
"*",err,error))//
" is invalid." 1316 CALL flagerror(local_error,err,error,*999)
1319 CALL flagerror(
"Distributed matris PETSc is not associated.",err,error,*999)
1322 local_error=
"The distributed matrix library type of "// &
1324 CALL flagerror(local_error,err,error,*999)
1327 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
1331 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
1334 exits(
"DISTRIBUTED_MATRIX_DATA_GET_DP")
1336 999 errorsexits(
"DISTRIBUTED_MATRIX_DATA_GET_DP",err,error)
1350 LOGICAL,
POINTER :: DATA(:)
1351 INTEGER(INTG),
INTENT(OUT) :: ERR
1356 enters(
"DISTRIBUTED_MATRIX_DATA_GET_L",err,error,*999)
1358 IF(
ASSOCIATED(distributed_matrix))
THEN 1359 IF(
ASSOCIATED(data))
THEN 1360 CALL flagerror(
"Data is already associated",err,error,*999)
1363 IF(distributed_matrix%MATRIX_FINISHED)
THEN 1364 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1366 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 1367 CALL matrix_data_get(distributed_matrix%CMISS%MATRIX,
DATA,err,error,*999)
1369 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
1372 CALL flagerror(
"Cannot get data for a logical PETSc distributed matrix.",err,error,*999)
1374 local_error=
"The distributed matrix library type of "// &
1376 CALL flagerror(local_error,err,error,*999)
1379 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
1383 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
1386 exits(
"DISTRIBUTED_MATRIX_DATA_GET_L")
1388 999 errorsexits(
"DISTRIBUTED_MATRIX_DATA_GET_L",err,error)
1401 INTEGER(INTG),
POINTER :: DATA(:)
1402 INTEGER(INTG),
INTENT(OUT) :: ERR
1407 enters(
"DISTRIBUTED_MATRIX_DATA_RESTORE_INTG",err,error,*999)
1409 IF(
ASSOCIATED(distributed_matrix))
THEN 1410 IF(
ASSOCIATED(data))
THEN 1411 IF(distributed_matrix%MATRIX_FINISHED)
THEN 1412 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1416 CALL flagerror(
"Cannot restore data for an integer PETSc distributed matrix.",err,error,*999)
1418 local_error=
"The distributed matrix library type of "// &
1420 CALL flagerror(local_error,err,error,*999)
1423 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
1426 CALL flagerror(
"Data is not associated.",err,error,*999)
1429 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
1432 exits(
"DISTRIBUTED_MATRIX_DATA_RESTORE_INTG")
1434 999 errorsexits(
"DISTRIBUTED_MATRIX_DATA_RESTORE_INTG",err,error)
1447 REAL(SP),
POINTER :: DATA(:)
1448 INTEGER(INTG),
INTENT(OUT) :: ERR
1453 enters(
"DISTRIBUTED_MATRIX_DATA_RESTORE_SP",err,error,*999)
1455 IF(
ASSOCIATED(distributed_matrix))
THEN 1456 IF(
ASSOCIATED(data))
THEN 1457 IF(distributed_matrix%MATRIX_FINISHED)
THEN 1458 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1462 CALL flagerror(
"Cannot restore data for a single precision PETSc distributed matrix.",err,error,*999)
1464 local_error=
"The distributed matrix library type of "// &
1466 CALL flagerror(local_error,err,error,*999)
1469 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
1472 CALL flagerror(
"Data is not associated.",err,error,*999)
1475 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
1478 exits(
"DISTRIBUTED_MATRIX_DATA_RESTORE_SP")
1480 999 errorsexits(
"DISTRIBUTED_MATRIX_DATA_RESTORE_SP",err,error)
1493 REAL(DP),
POINTER :: DATA(:)
1494 INTEGER(INTG),
INTENT(OUT) :: ERR
1497 REAL(DP),
POINTER :: petscData(:,:)
1499 TYPE(c_ptr) :: TEMP_ME
1501 enters(
"DISTRIBUTED_MATRIX_DATA_RESTORE_DP",err,error,*999)
1503 IF(
ASSOCIATED(distributed_matrix))
THEN 1504 IF(
ASSOCIATED(data))
THEN 1505 IF(distributed_matrix%MATRIX_FINISHED)
THEN 1506 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1510 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 1511 SELECT CASE(distributed_matrix%PETSC%STORAGE_TYPE)
1514 temp_me = c_loc(
DATA(1))
1515 CALL c_f_pointer(temp_me,petscdata,[distributed_matrix%PETSC%M,distributed_matrix%PETSC%N])
1517 CALL flagerror(
"Diagonal storage is not implemented for PETSc matrices.",err,error,*999)
1519 CALL flagerror(
"Column major storage is not implemented for PETSc matrices.",err,error,*999)
1521 CALL flagerror(
"Row major storage is not implemented for PETSc matrices.",err,error,*999)
1525 temp_me = c_loc(
DATA(1))
1526 CALL c_f_pointer(temp_me,petscdata,[distributed_matrix%PETSC%M,distributed_matrix%PETSC%N])
1528 CALL flagerror(
"Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
1530 CALL flagerror(
"Row column storage is not implemented for PETSc matrices.",err,error,*999)
1533 & distributed_matrix%PETSC%STORAGE_TYPE,
"*",err,error))//
" is invalid." 1534 CALL flagerror(local_error,err,error,*999)
1536 IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX)
THEN 1537 SELECT CASE(distributed_matrix%PETSC%STORAGE_TYPE)
1541 CALL flagerror(
"Diagonal storage is not implemented for PETSc matrices.",err,error,*999)
1543 CALL flagerror(
"Column major storage is not implemented for PETSc matrices.",err,error,*999)
1545 CALL flagerror(
"Row major storage is not implemented for PETSc matrices.",err,error,*999)
1549 CALL flagerror(
"Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
1551 CALL flagerror(
"Row column storage is not implemented for PETSc matrices.",err,error,*999)
1554 & distributed_matrix%PETSC%STORAGE_TYPE,
"*",err,error))//
" is invalid." 1555 CALL flagerror(local_error,err,error,*999)
1558 SELECT CASE(distributed_matrix%PETSC%STORAGE_TYPE)
1562 CALL flagerror(
"Diagonal storage is not implemented for PETSc matrices.",err,error,*999)
1564 CALL flagerror(
"Column major storage is not implemented for PETSc matrices.",err,error,*999)
1566 CALL flagerror(
"Row major storage is not implemented for PETSc matrices.",err,error,*999)
1570 CALL flagerror(
"Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
1572 CALL flagerror(
"Row column storage is not implemented for PETSc matrices.",err,error,*999)
1575 & distributed_matrix%PETSC%STORAGE_TYPE,
"*",err,error))//
" is invalid." 1576 CALL flagerror(local_error,err,error,*999)
1580 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
1583 local_error=
"The distributed matrix library type of "// &
1585 CALL flagerror(local_error,err,error,*999)
1588 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
1591 CALL flagerror(
"Data is not associated.",err,error,*999)
1594 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
1597 exits(
"DISTRIBUTED_MATRIX_DATA_RESTORE_DP")
1599 999 errorsexits(
"DISTRIBUTED_MATRIX_DATA_RESTORE_DP",err,error)
1613 LOGICAL,
POINTER :: DATA(:)
1614 INTEGER(INTG),
INTENT(OUT) :: ERR
1619 enters(
"DISTRIBUTED_MATRIX_DATA_RESTORE_L",err,error,*999)
1621 IF(
ASSOCIATED(distributed_matrix))
THEN 1622 IF(
ASSOCIATED(data))
THEN 1623 IF(distributed_matrix%MATRIX_FINISHED)
THEN 1624 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1628 CALL flagerror(
"Cannot restore data for a logical PETSc distributed matrix.",err,error,*999)
1630 local_error=
"The distributed matrix library type of "// &
1632 CALL flagerror(local_error,err,error,*999)
1635 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
1638 CALL flagerror(
"Data is not associated.",err,error,*999)
1641 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
1644 exits(
"DISTRIBUTED_MATRIX_DATA_RESTORE_L")
1646 999 errorsexits(
"DISTRIBUTED_MATRIX_DATA_RESTORE_L",err,error)
1659 INTEGER(INTG),
INTENT(OUT) :: dataType
1660 INTEGER(INTG),
INTENT(OUT) :: err
1663 enters(
"DistributedMatrix_DataTypeGet",err,error,*999)
1665 IF(
ASSOCIATED(matrix))
THEN 1666 IF(.NOT.matrix%matrix_finished)
THEN 1667 CALL flag_error(
"The matrix has not been finished.",err,error,*999)
1669 datatype=matrix%data_type
1672 CALL flag_error(
"Distributed matrix is not associated.",err,error,*999)
1675 exits(
"DistributedMatrix_DataTypeGet")
1677 999 errorsexits(
"DistributedMatrix_DataTypeGet",err,error)
1690 INTEGER(INTG),
INTENT(IN) :: DATA_TYPE
1691 INTEGER(INTG),
INTENT(OUT) :: ERR
1696 enters(
"DISTRIBUTED_MATRIX_DATA_TYPE_SET",err,error,*999)
1698 IF(
ASSOCIATED(distributed_matrix))
THEN 1699 IF(distributed_matrix%MATRIX_FINISHED)
THEN 1700 CALL flagerror(
"The distributed matrix has been finished.",err,error,*999)
1702 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1704 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 1706 distributed_matrix%DATA_TYPE=data_type
1708 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
1711 SELECT CASE(data_type)
1713 CALL flagerror(
"An integer distributed PETSc matrix is not implemented.",err,error,*999)
1715 CALL flagerror(
"A single precision distributed PETSc matrix is not implemented.",err,error,*999)
1719 CALL flagerror(
"A logical distributed PETSc matrix is not implemented.",err,error,*999)
1721 local_error=
"The specified data type of "//
trim(
numbertovstring(data_type,
"*",err,error))//
" is invalid." 1722 CALL flagerror(local_error,err,error,*999)
1725 local_error=
"The distributed matrix library type of "// &
1727 CALL flagerror(local_error,err,error,*999)
1731 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
1734 exits(
"DISTRIBUTED_MATRIX_DATA_TYPE_SET")
1736 999 errorsexits(
"DISTRIBUTED_MATRIX_DATA_TYPE_SET",err,error)
1749 INTEGER(INTG),
INTENT(OUT) :: m
1750 INTEGER(INTG),
INTENT(OUT) :: n
1751 INTEGER(INTG),
INTENT(OUT) :: err
1758 enters(
"DistributedMatrix_DimensionsGet",err,error,*999)
1760 IF(
ASSOCIATED(distributedmatrix))
THEN 1761 SELECT CASE(distributedmatrix%library_type)
1763 IF(
ASSOCIATED(distributedmatrix%cmiss))
THEN 1764 matrix=>distributedmatrix%cmiss%matrix
1765 IF(
ASSOCIATED(matrix))
THEN 1766 IF(.NOT.matrix%matrix_finished)
THEN 1767 CALL flag_error(
"The matrix has not been finished.",err,error,*999)
1773 CALL flag_error(
"Distributed matrix CMISS matrix is not associated.",err,error,*999)
1776 CALL flag_error(
"Distributed matrix CMISS is not associated.",err,error,*999)
1779 petscmatrix=>distributedmatrix%petsc
1780 IF(
ASSOCIATED(petscmatrix))
THEN 1784 CALL flag_error(
"Distributed matrix PETSc is not associated.",err,error,*999)
1787 localerror=
"The distributed matrix library type of "// &
1792 CALL flag_error(
"Distributed matrix is not associated.",err,error,*999)
1795 exits(
"DistributedMatrix_DimensionsGet")
1797 999 errorsexits(
"DistributedMatrix_DimensionsGet",err,error)
1810 INTEGER(INTG),
INTENT(OUT) :: ERR
1814 enters(
"DISTRIBUTED_MATRIX_DESTROY",err,error,*999)
1816 IF(
ASSOCIATED(distributed_matrix))
THEN 1819 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
1822 exits(
"DISTRIBUTED_MATRIX_DESTROY")
1824 999 errorsexits(
"DISTRIBUTED_MATRIX_DESTROY",err,error)
1838 INTEGER(INTG),
INTENT(OUT) :: ERR
1841 INTEGER(INTG) :: DUMMY_ERR
1844 enters(
"DISTRIBUTED_MATRIX_DUPLICATE",err,error,*998)
1846 IF(
ASSOCIATED(distributed_matrix))
THEN 1847 IF(
ASSOCIATED(new_distributed_matrix))
THEN 1848 CALL flagerror(
"New distributed matrix is already associated.",err,error,*998)
1850 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1852 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 1854 & new_distributed_matrix,err,error,*999)
1855 CALL matrix_duplicate(distributed_matrix%CMISS%MATRIX,new_distributed_matrix%CMISS%MATRIX,err,error,*999)
1858 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
1862 & new_distributed_matrix,err,error,*999)
1866 local_error=
"The distributed matrix library type of "// &
1868 CALL flagerror(local_error,err,error,*999)
1872 CALL flagerror(
"Distributed matrix is not associated.",err,error,*998)
1875 exits(
"DISTRIBUTED_MATRIX_DUPLICATE")
1878 998 errorsexits(
"DISTRIBUTED_MATRIX_DUPLICATE",err,error)
1891 INTEGER(INTG),
INTENT(OUT) :: ERR
1895 enters(
"DISTRIBUTED_MATRIX_FINALISE",err,error,*999)
1897 IF(
ASSOCIATED(distributed_matrix))
THEN 1900 DEALLOCATE(distributed_matrix)
1903 exits(
"DISTRIBUTED_MATRIX_FINALISE")
1905 999 errorsexits(
"DISTRIBUTED_MATRIX_FINALISE",err,error)
1918 INTEGER(INTG),
INTENT(OUT) :: ERR
1921 INTEGER(INTG) :: column_idx,row
1925 enters(
"DISTRIBUTED_MATRIX_FORM",err,error,*999)
1927 IF(
ASSOCIATED(distributed_matrix))
THEN 1928 IF(distributed_matrix%MATRIX_FINISHED)
THEN 1929 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1931 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 1932 CALL flagerror(
"Not implemented.",err,error,*999)
1934 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
1937 petsc_matrix=>distributed_matrix%PETSC
1938 IF(
ASSOCIATED(petsc_matrix))
THEN 1939 SELECT CASE(petsc_matrix%STORAGE_TYPE)
1943 CALL flagerror(
"Diagonal storage is not implemented for PETSc matrices.",err,error,*999)
1945 CALL flagerror(
"Column major storage is not implemented for PETSc matrices.",err,error,*999)
1947 CALL flagerror(
"Row major storage is not implemented for PETSc matrices.",err,error,*999)
1949 IF(petsc_matrix%USE_OVERRIDE_MATRIX)
THEN 1950 DO row=1,petsc_matrix%M
1951 DO column_idx=petsc_matrix%ROW_INDICES(row),petsc_matrix%ROW_INDICES(row+1)-1
1952 CALL petsc_matsetvalue(petsc_matrix%OVERRIDE_MATRIX,petsc_matrix%GLOBAL_ROW_NUMBERS(row), &
1953 & petsc_matrix%COLUMN_INDICES(column_idx)-1,0.0_dp,petsc_insert_values, &
1958 DO row=1,petsc_matrix%M
1959 DO column_idx=petsc_matrix%ROW_INDICES(row),petsc_matrix%ROW_INDICES(row+1)-1
1960 CALL petsc_matsetvalue(petsc_matrix%MATRIX,petsc_matrix%GLOBAL_ROW_NUMBERS(row), &
1961 & petsc_matrix%COLUMN_INDICES(column_idx)-1,0.0_dp,petsc_insert_values, &
1967 CALL flagerror(
"Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
1969 CALL flagerror(
"Row column storage is not implemented for PETSc matrices.",err,error,*999)
1971 local_error=
"The PETSc matrix storage type of "//
trim(
numbertovstring(petsc_matrix%STORAGE_TYPE,
"*",err,error))// &
1973 CALL flagerror(local_error,err,error,*999)
1975 IF(petsc_matrix%USE_OVERRIDE_MATRIX)
THEN 1977 CALL petsc_matassemblyend(petsc_matrix%OVERRIDE_MATRIX,petsc_mat_final_assembly,err,error,*999)
1983 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
1986 local_error=
"The distributed matrix library type of "// &
1988 CALL flagerror(local_error,err,error,*999)
1991 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
1995 exits(
"DISTRIBUTED_MATRIX_FORM")
1997 999 errorsexits(
"DISTRIBUTED_MATRIX_FORM",err,error)
2010 INTEGER(INTG),
INTENT(IN) :: GHOSTING_TYPE
2011 INTEGER(INTG),
INTENT(OUT) :: ERR
2017 enters(
"DISTRIBUTED_MATRIX_GHOSTING_TYPE_SET",err,error,*999)
2019 IF(
ASSOCIATED(distributed_matrix))
THEN 2020 IF(distributed_matrix%MATRIX_FINISHED)
THEN 2021 CALL flagerror(
"The distributed matrix has already been finished.",err,error,*999)
2023 row_domain_mapping=>distributed_matrix%ROW_DOMAIN_MAPPING
2024 column_domain_mapping=>distributed_matrix%COLUMN_DOMAIN_MAPPING
2025 IF(
ASSOCIATED(row_domain_mapping))
THEN 2026 IF(
ASSOCIATED(column_domain_mapping))
THEN 2027 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2029 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 2030 SELECT CASE(ghosting_type)
2032 CALL matrix_size_set(distributed_matrix%CMISS%MATRIX,row_domain_mapping%TOTAL_NUMBER_OF_LOCAL, &
2033 & column_domain_mapping%NUMBER_OF_GLOBAL,err,error,*999)
2035 CALL matrix_size_set(distributed_matrix%CMISS%MATRIX,row_domain_mapping%NUMBER_OF_LOCAL, &
2036 & column_domain_mapping%NUMBER_OF_GLOBAL,err,error,*999)
2038 local_error=
"The given ghosting type of "//
trim(
numbertovstring(ghosting_type,
"*",err,error))//
" is invalid." 2039 CALL flagerror(local_error,err,error,*999)
2042 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
2045 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 2046 SELECT CASE(ghosting_type)
2048 distributed_matrix%PETSC%N=row_domain_mapping%TOTAL_NUMBER_OF_LOCAL
2050 distributed_matrix%PETSC%N=row_domain_mapping%NUMBER_OF_LOCAL
2052 local_error=
"The given ghosting type of "//
trim(
numbertovstring(ghosting_type,
"*",err,error))//
" is invalid." 2053 CALL flagerror(local_error,err,error,*999)
2056 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
2059 local_error=
"The distributed matrix library type of "// &
2061 CALL flagerror(local_error,err,error,*999)
2063 distributed_matrix%GHOSTING_TYPE=ghosting_type
2065 CALL flagerror(
"Distributed matrix column domain mapping is not associated.",err,error,*999)
2068 CALL flagerror(
"Distributed matrix row domain mapping is not associated.",err,error,*999)
2072 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
2075 exits(
"DISTRIBUTED_MATRIX_GHOSTING_TYPE_SET")
2077 999 errorsexits(
"DISTRIBUTED_MATRIX_GHOSTING_TYPE_SET",err,error)
2090 INTEGER(INTG),
INTENT(IN) :: LIBRARY_TYPE
2091 INTEGER(INTG),
INTENT(OUT) :: ERR
2094 INTEGER(INTG) :: DUMMY_ERR,OLD_LIBRARY_TYPE
2097 enters(
"DISTRIBUTED_MATRIX_LIBRARY_TYPE_SET",err,error,*998)
2099 IF(
ASSOCIATED(distributed_matrix))
THEN 2100 IF(distributed_matrix%MATRIX_FINISHED)
THEN 2101 CALL flagerror(
"The distributed matrix has already been finished.",err,error,*998)
2103 old_library_type=distributed_matrix%LIBRARY_TYPE
2104 IF(library_type/=old_library_type)
THEN 2106 SELECT CASE(library_type)
2112 local_error=
"The library type of "//
trim(
numbertovstring(library_type,
"*",err,error))//
" is invalid." 2113 CALL flagerror(local_error,err,error,*999)
2116 SELECT CASE(old_library_type)
2122 local_error=
"The distributed matrix library type of "// &
2124 CALL flagerror(local_error,err,error,*999)
2126 distributed_matrix%LIBRARY_TYPE=library_type
2130 CALL flagerror(
"Distributed matrix is not associated.",err,error,*998)
2133 exits(
"DISTRIBUTED_MATRIX_LIBRARY_TYPE_SET")
2135 999
SELECT CASE(library_type)
2141 998 errorsexits(
"DISTRIBUTED_MATRIX_LIBRARY_TYPE_SET",err,error)
2156 INTEGER(INTG),
INTENT(OUT) :: ERR
2159 INTEGER(INTG) :: DUMMY_ERR
2162 enters(
"DISTRIBUTED_MATRIX_INITIALISE",err,error,*998)
2164 IF(
ASSOCIATED(row_domain_mapping))
THEN 2165 IF(
ASSOCIATED(column_domain_mapping))
THEN 2166 IF(
ASSOCIATED(distributed_matrix))
THEN 2167 CALL flagerror(
"Distributed matrix is already associated.",err,error,*998)
2169 ALLOCATE(distributed_matrix,stat=err)
2170 IF(err/=0)
CALL flagerror(
"Could not allocated the distributed matrix.",err,error,*999)
2171 distributed_matrix%MATRIX_FINISHED=.false.
2172 distributed_matrix%LIBRARY_TYPE=0
2174 distributed_matrix%ROW_DOMAIN_MAPPING=>row_domain_mapping
2175 distributed_matrix%COLUMN_DOMAIN_MAPPING=>column_domain_mapping
2177 NULLIFY(distributed_matrix%CMISS)
2178 NULLIFY(distributed_matrix%PETSC)
2182 CALL flagerror(
"Column domain mapping is not associated.",err,error,*999)
2185 CALL flagerror(
"Row domain mapping is not associated.",err,error,*998)
2188 exits(
"DISTRIBUTED_MATRIX_INITIALSE")
2191 998 errorsexits(
"DISTRIBUTED_MATRIX_INITIALISE",err,error)
2204 INTEGER(INTG),
INTENT(OUT) :: MAX_COLUMNS_PER_ROW
2205 INTEGER(INTG),
INTENT(OUT) :: ERR
2210 enters(
"DISTRIBUTED_MATRIX_MAX_COLUMNS_PER_ROW_GET",err,error,*999)
2212 IF(
ASSOCIATED(distributed_matrix))
THEN 2213 IF(distributed_matrix%MATRIX_FINISHED)
THEN 2214 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2216 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 2219 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
2222 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 2223 max_columns_per_row=distributed_matrix%PETSC%MAXIMUM_COLUMN_INDICES_PER_ROW
2225 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
2228 local_error=
"The distributed matrix library type of "// &
2230 CALL flagerror(local_error,err,error,*999)
2233 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
2236 CALL flagerror(
"Distributed mtrix is not associated.",err,error,*999)
2239 exits(
"DISTRIBUTED_MATRIX_MAX_COLUMNS_PER_ROW_GET")
2241 999 errorsexits(
"DISTRIBUTED_MATRIX_MAX_COLUMNS_PER_ROW_GET",err,error)
2254 INTEGER(INTG),
INTENT(IN) :: NUMBER_NON_ZEROS
2255 INTEGER(INTG),
INTENT(OUT) :: ERR
2260 enters(
"DISTRIBUTED_MATRIX_NUMBER_NON_ZEROS_SET",err,error,*999)
2262 IF(
ASSOCIATED(distributed_matrix))
THEN 2263 IF(distributed_matrix%MATRIX_FINISHED)
THEN 2264 CALL flagerror(
"The distributed matrix has been finished.",err,error,*999)
2266 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2268 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 2271 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
2274 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 2275 IF(number_non_zeros>0)
THEN 2276 distributed_matrix%PETSC%NUMBER_NON_ZEROS=number_non_zeros
2278 local_error=
"The specified number of non zeros ("//
trim(
numbertovstring(number_non_zeros,
"*",err,error))// &
2279 &
") is invalid. The number must be > 0." 2280 CALL flagerror(local_error,err,error,*999)
2283 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
2286 local_error=
"The distributed matrix library type of "// &
2288 CALL flagerror(local_error,err,error,*999)
2292 CALL flagerror(
"Distributed mtrix is not associated.",err,error,*999)
2295 exits(
"DISTRIBUTED_MATRIX_NUMBER_NON_ZEROS_SET")
2297 999 errorsexits(
"DISTRIBUTED_MATRIX_NUMBER_NON_ZEROS_SET",err,error)
2310 INTEGER(INTG),
INTENT(OUT) :: NUMBER_NON_ZEROS
2311 INTEGER(INTG),
INTENT(OUT) :: ERR
2316 enters(
"DISTRIBUTED_MATRIX_NUMBER_NON_ZEROS_GET",err,error,*999)
2318 IF(
ASSOCIATED(distributed_matrix))
THEN 2319 IF(distributed_matrix%MATRIX_FINISHED)
THEN 2320 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2322 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 2325 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
2328 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 2329 number_non_zeros=distributed_matrix%PETSC%NUMBER_NON_ZEROS
2331 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
2334 local_error=
"The distributed matrix library type of "// &
2336 CALL flagerror(local_error,err,error,*999)
2339 CALL flagerror(
"The distributed matrix is not finished.",err,error,*999)
2342 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
2345 exits(
"DISTRIBUTED_MATRIX_NUMBER_NON_ZEROS_GET")
2347 999 errorsexits(
"DISTRIBUTED_MATRIX_NUMBER_NON_ZEROS_GET",err,error)
2364 INTEGER(INTG),
INTENT(OUT) :: ERR
2369 enters(
"DISTRIBUTED_MATRIX_LINKLIST_SET",err,error,*999)
2371 IF(
ASSOCIATED(distributed_matrix))
THEN 2372 IF(distributed_matrix%MATRIX_FINISHED)
THEN 2373 CALL flagerror(
"The distributed matrix has been finished.",err,error,*999)
2375 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2377 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 2381 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
2384 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 2387 distributed_matrix%PETSC%list=>list
2394 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
2397 local_error=
"The distributed matrix library type of "// &
2399 CALL flagerror(local_error,err,error,*999)
2403 CALL flagerror(
"Distributed mtrix is not associated.",err,error,*999)
2406 exits(
"DISTRIBUTED_MATRIX_LIKLIST_SET")
2408 999 errorsexits(
"DISTRIBUTED_MATRIX_LINKLIST_SET",err,error)
2423 INTEGER(INTG),
INTENT(OUT) :: ERR
2428 enters(
"DISTRIBUTED_MATRIX_NUMBER_NON_ZEROS_GET",err,error,*999)
2430 IF(
ASSOCIATED(distributed_matrix))
THEN 2431 IF(distributed_matrix%MATRIX_FINISHED)
THEN 2432 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2434 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 2438 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
2441 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 2442 list=>distributed_matrix%PETSC%list
2444 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
2447 local_error=
"The distributed matrix library type of "// &
2449 CALL flagerror(local_error,err,error,*999)
2452 CALL flagerror(
"The distributed matrix is not finished.",err,error,*999)
2455 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
2458 exits(
"DISTRIBUTED_MATRIX_LINKLIST_GET")
2460 999 errorsexits(
"DISTRIBUTED_MATRIX_LINKLIST_GET",err,error)
2471 INTEGER(INTG),
INTENT(IN) :: ID
2473 INTEGER(INTG),
INTENT(OUT) :: ERR
2483 enters(
"DISTRIBUTED_MATRIX_OUTPUT",err,error,*999)
2485 IF(
ASSOCIATED(distributed_matrix))
THEN 2486 IF(distributed_matrix%MATRIX_FINISHED)
THEN 2487 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2489 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 2490 CALL matrix_output(id,distributed_matrix%CMISS%MATRIX,err,error,*999)
2492 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
2495 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 2496 IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX)
THEN 2497 CALL petsc_matview(distributed_matrix%PETSC%OVERRIDE_MATRIX,petsc_viewer_stdout_world,err,error,*999)
2499 CALL petsc_matview(distributed_matrix%PETSC%MATRIX,petsc_viewer_stdout_world,err,error,*999)
2526 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
2529 local_error=
"The distributed matrix library type of "// &
2531 CALL flagerror(local_error,err,error,*999)
2534 CALL flagerror(
"Distributed matrix has not been finished.",err,error,*999)
2537 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
2540 exits(
"DISTRIBUTED_MATRIX_OUTPUT")
2544 999 errorsexits(
"DISTRIBUTED_MATRIX_OUTPUT",err,error)
2557 TYPE(petscmattype),
INTENT(IN) :: OVERRIDE_MATRIX
2558 INTEGER(INTG),
INTENT(OUT) :: ERR
2563 enters(
"DISTRIBUTED_MATRIX_OVERRIDE_SET_ON",err,error,*999)
2565 IF(
ASSOCIATED(distributed_matrix))
THEN 2566 IF(distributed_matrix%MATRIX_FINISHED)
THEN 2567 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2569 CALL flagerror(
"Not implemented.",err,error,*999)
2571 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 2572 distributed_matrix%PETSC%USE_OVERRIDE_MATRIX=.true.
2573 distributed_matrix%PETSC%OVERRIDE_MATRIX=override_matrix
2575 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
2578 local_error=
"The distributed matrix library type of "// &
2580 CALL flagerror(local_error,err,error,*999)
2583 CALL flagerror(
"Distributed matrix has not been finished.",err,error,*999)
2586 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
2589 exits(
"DISTRIBUTED_MATRIX_OVERRIDE_SET_ON")
2591 999 errorsexits(
"DISTRIBUTED_MATRIX_OVERRIDE_SET_ON",err,error)
2604 INTEGER(INTG),
INTENT(OUT) :: ERR
2609 enters(
"DISTRIBUTED_MATRIX_OVERRIDE_SET_OFF",err,error,*999)
2611 IF(
ASSOCIATED(distributed_matrix))
THEN 2612 IF(distributed_matrix%MATRIX_FINISHED)
THEN 2613 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2615 CALL flagerror(
"Not implemented.",err,error,*999)
2617 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 2618 distributed_matrix%PETSC%USE_OVERRIDE_MATRIX=.false.
2621 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
2624 local_error=
"The distributed matrix library type of "// &
2626 CALL flagerror(local_error,err,error,*999)
2629 CALL flagerror(
"Distributed matrix has not been finished.",err,error,*999)
2632 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
2635 exits(
"DISTRIBUTED_MATRIX_OVERRIDE_SET_OFF")
2637 999 errorsexits(
"DISTRIBUTED_MATRIX_OVERRIDE_SET_OFF",err,error)
2650 INTEGER(INTG),
INTENT(OUT) :: ERR
2653 INTEGER(INTG) :: DUMMY_ERR,i
2659 enters(
"DISTRIBUTED_MATRIX_PETSC_CREATE_FINISH",err,error,*999)
2661 IF(
ASSOCIATED(petsc_matrix))
THEN 2662 distributed_matrix=>petsc_matrix%DISTRIBUTED_MATRIX
2663 IF(
ASSOCIATED(distributed_matrix))
THEN 2664 row_domain_mapping=>distributed_matrix%ROW_DOMAIN_MAPPING
2665 column_domain_mapping=>distributed_matrix%COLUMN_DOMAIN_MAPPING
2666 IF(
ASSOCIATED(row_domain_mapping))
THEN 2667 IF(
ASSOCIATED(column_domain_mapping))
THEN 2668 SELECT CASE(petsc_matrix%STORAGE_TYPE)
2670 petsc_matrix%NUMBER_NON_ZEROS=petsc_matrix%M*petsc_matrix%GLOBAL_N
2671 petsc_matrix%MAXIMUM_COLUMN_INDICES_PER_ROW=petsc_matrix%GLOBAL_N
2672 petsc_matrix%DATA_SIZE=petsc_matrix%NUMBER_NON_ZEROS
2674 ALLOCATE(petsc_matrix%GLOBAL_ROW_NUMBERS(petsc_matrix%M),stat=err)
2675 IF(err/=0)
CALL flagerror(
"Could not allocate global row numbers for PETSc distributed matrix.",err,error,*999)
2676 DO i=1,petsc_matrix%M
2677 petsc_matrix%GLOBAL_ROW_NUMBERS(i)=row_domain_mapping%LOCAL_TO_GLOBAL_MAP(i)-1
2680 ALLOCATE(petsc_matrix%DATA_DP(petsc_matrix%DATA_SIZE),stat=err)
2681 IF(err/=0)
CALL flagerror(
"Could not allocate PETSc matrix data.",err,error,*999)
2683 & petsc_matrix%GLOBAL_M,petsc_matrix%GLOBAL_N,petsc_matrix%DATA_DP,petsc_matrix%MATRIX,err,error,*999)
2685 petsc_matrix%NUMBER_NON_ZEROS=petsc_matrix%M
2686 petsc_matrix%MAXIMUM_COLUMN_INDICES_PER_ROW=1
2687 petsc_matrix%DATA_SIZE=petsc_matrix%NUMBER_NON_ZEROS
2689 ALLOCATE(petsc_matrix%GLOBAL_ROW_NUMBERS(petsc_matrix%M),stat=err)
2690 IF(err/=0)
CALL flagerror(
"Could not allocate global row numbers for PETSc distributed matrix.",err,error,*999)
2691 DO i=1,petsc_matrix%M
2692 petsc_matrix%GLOBAL_ROW_NUMBERS(i)=row_domain_mapping%LOCAL_TO_GLOBAL_MAP(i)-1
2695 ALLOCATE(petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS(petsc_matrix%N),stat=err)
2696 IF(err/=0)
CALL flagerror(
"Could not allocate diagonal number of non zeros.",err,error,*999)
2697 ALLOCATE(petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS(petsc_matrix%N),stat=err)
2698 IF(err/=0)
CALL flagerror(
"Could not allocate off diagonal number of non zeros.",err,error,*999)
2699 petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS=1
2700 petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS=0
2703 & petsc_matrix%GLOBAL_M,petsc_matrix%GLOBAL_N,petsc_null_integer,petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS, &
2704 & petsc_null_integer,petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS,petsc_matrix%MATRIX,err,error,*999)
2706 CALL flagerror(
"Column major storage is not implemented for PETSc matrices.",err,error,*999)
2708 CALL flagerror(
"Row major storage is not implemented for PETSc matrices.",err,error,*999)
2710 IF(
ALLOCATED(petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS))
THEN 2711 IF(
ALLOCATED(petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS))
THEN 2714 & petsc_matrix%GLOBAL_M,petsc_matrix%GLOBAL_N,petsc_null_integer,petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS, &
2715 & petsc_null_integer,petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS,petsc_matrix%MATRIX,err,error,*999)
2717 CALL petsc_matsetoption(petsc_matrix%MATRIX,petsc_mat_new_nonzero_location_err,.true.,err,error,*999)
2718 CALL petsc_matsetoption(petsc_matrix%MATRIX,petsc_mat_new_nonzero_allocation_err,.true.,err,error,*999)
2719 CALL petsc_matsetoption(petsc_matrix%MATRIX,petsc_mat_unused_nonzero_location_err,.true.,err,error,*999)
2721 ALLOCATE(petsc_matrix%GLOBAL_ROW_NUMBERS(petsc_matrix%M),stat=err)
2722 IF(err/=0)
CALL flagerror(
"Could not allocate global row numbers for PETSc distributed matrix.",err,error,*999)
2723 petsc_matrix%MAXIMUM_COLUMN_INDICES_PER_ROW=0
2724 DO i=1,petsc_matrix%M
2725 petsc_matrix%GLOBAL_ROW_NUMBERS(i)=row_domain_mapping%LOCAL_TO_GLOBAL_MAP(i)-1
2726 IF((petsc_matrix%ROW_INDICES(i+1)-petsc_matrix%ROW_INDICES(i))>petsc_matrix%MAXIMUM_COLUMN_INDICES_PER_ROW) &
2727 & petsc_matrix%MAXIMUM_COLUMN_INDICES_PER_ROW=petsc_matrix%ROW_INDICES(i+1)-petsc_matrix%ROW_INDICES(i)
2730 CALL flagerror(
"Matrix off diagonal storage locations have not been set.",err,error,*999)
2733 CALL flagerror(
"Matrix diagonal storage locations have not been set.",err,error,*999)
2736 CALL flagerror(
"Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
2738 CALL flagerror(
"Row column storage is not implemented for PETSc matrices.",err,error,*999)
2740 local_error=
"The PETSc matrix storage type of "//
trim(
numbertovstring(petsc_matrix%STORAGE_TYPE,
"*",err,error))// &
2742 CALL flagerror(local_error,err,error,*999)
2745 CALL flagerror(
"PETSc matrix distributed matrix column domain mapping is not associated.",err,error,*999)
2748 CALL flagerror(
"PETSc matrix distributed matrix row domain mapping is not associated.",err,error,*999)
2752 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*998)
2755 exits(
"DISTRIBUTED_MATRIX_PETSC_CREATE_FINISH")
2758 998 errorsexits(
"DISTRIBUTED_MATRIX_PETSC_CREATE_FINISH",err,error)
2771 INTEGER(INTG),
INTENT(OUT) :: ERR
2775 enters(
"DISTRIBUTED_MATRIX_PETSC_FINALISE",err,error,*999)
2777 IF(
ASSOCIATED(petsc_matrix))
THEN 2778 IF(
ALLOCATED(petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS))
DEALLOCATE(petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS)
2779 IF(
ALLOCATED(petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS))
DEALLOCATE(petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS)
2780 IF(
ALLOCATED(petsc_matrix%ROW_INDICES))
DEALLOCATE(petsc_matrix%ROW_INDICES)
2781 IF(
ALLOCATED(petsc_matrix%COLUMN_INDICES))
DEALLOCATE(petsc_matrix%COLUMN_INDICES)
2782 IF(
ALLOCATED(petsc_matrix%GLOBAL_ROW_NUMBERS))
DEALLOCATE(petsc_matrix%GLOBAL_ROW_NUMBERS)
2786 DEALLOCATE(petsc_matrix)
2789 exits(
"DISTRIBUTED_MATRIX_PETSC_FINALSE")
2791 999 errorsexits(
"DISTRIBUTED_MATRIX_PETSC_FINALISE",err,error)
2804 INTEGER(INTG),
INTENT(OUT) :: ERR
2807 INTEGER(INTG) :: DUMMY_ERR
2811 enters(
"DISTRIBUTED_MATRIX_PETSC_INITIALISE",err,error,*998)
2813 IF(
ASSOCIATED(distributed_matrix))
THEN 2814 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 2815 CALL flagerror(
"PETSc is already associated for this distributed matrix",err,error,*998)
2817 row_domain_mapping=>distributed_matrix%ROW_DOMAIN_MAPPING
2818 column_domain_mapping=>distributed_matrix%COLUMN_DOMAIN_MAPPING
2819 IF(
ASSOCIATED(row_domain_mapping))
THEN 2820 IF(
ASSOCIATED(column_domain_mapping))
THEN 2821 ALLOCATE(distributed_matrix%PETSC,stat=err)
2822 IF(err/=0)
CALL flagerror(
"Could not allocate PETSc distributed matrix.",err,error,*999)
2823 distributed_matrix%PETSC%DISTRIBUTED_MATRIX=>distributed_matrix
2826 SELECT CASE(distributed_matrix%GHOSTING_TYPE)
2828 distributed_matrix%PETSC%M=row_domain_mapping%TOTAL_NUMBER_OF_LOCAL
2830 distributed_matrix%PETSC%M=row_domain_mapping%NUMBER_OF_LOCAL
2832 local_error=
"The distributed matrix ghosting type of "// &
2834 CALL flagerror(local_error,err,error,*999)
2836 distributed_matrix%PETSC%N=column_domain_mapping%TOTAL_NUMBER_OF_LOCAL
2837 distributed_matrix%PETSC%GLOBAL_M=row_domain_mapping%NUMBER_OF_GLOBAL
2838 distributed_matrix%PETSC%GLOBAL_N=column_domain_mapping%NUMBER_OF_GLOBAL
2840 distributed_matrix%PETSC%DATA_SIZE=0
2841 distributed_matrix%PETSC%MAXIMUM_COLUMN_INDICES_PER_ROW=0
2842 distributed_matrix%PETSC%USE_OVERRIDE_MATRIX=.false.
2846 CALL flagerror(
"Distributed matrix column domain mapping is not associated.",err,error,*998)
2849 CALL flagerror(
"Distributed matrix row domain mapping is not associated.",err,error,*998)
2853 CALL flagerror(
"Distributed matrix is not associated.",err,error,*998)
2856 exits(
"DISTRIBUTED_MATRIX_PETSC_INITIALSE")
2858 999
IF(
ASSOCIATED(distributed_matrix%PETSC)) &
2860 998 errorsexits(
"DISTRIBUTED_MATRIX_PETSC_INITIALISE",err,error)
2873 INTEGER(INTG),
POINTER :: ROW_INDICES(:)
2874 INTEGER(INTG),
POINTER :: COLUMN_INDICES(:)
2875 INTEGER(INTG),
INTENT(OUT) :: ERR
2882 enters(
"DISTRIBUTED_MATRIX_STORAGE_LOCATIONS_GET",err,error,*999)
2884 IF(
ASSOCIATED(distributed_matrix))
THEN 2885 IF(distributed_matrix%MATRIX_FINISHED)
THEN 2886 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2888 cmiss_matrix=>distributed_matrix%CMISS
2889 IF(
ASSOCIATED(cmiss_matrix))
THEN 2892 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
2895 petsc_matrix=>distributed_matrix%PETSC
2896 IF(
ASSOCIATED(petsc_matrix))
THEN 2897 SELECT CASE(petsc_matrix%STORAGE_TYPE)
2899 CALL flagerror(
"Cannot get matrix locations for a block storage matrix.",err,error,*999)
2901 CALL flagerror(
"Diagonal storage is not implemented for PETSc matrices.",err,error,*999)
2903 CALL flagerror(
"Column major storage is not implemented for PETSc matrices.",err,error,*999)
2905 CALL flagerror(
"Row major storage is not implemented for PETSc matrices.",err,error,*999)
2907 row_indices=>distributed_matrix%PETSC%ROW_INDICES
2908 column_indices=>distributed_matrix%PETSC%COLUMN_INDICES
2910 CALL flagerror(
"Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
2912 CALL flagerror(
"Row column storage is not implemented for PETSc matrices.",err,error,*999)
2914 local_error=
"The matrix storage type of "// &
2916 CALL flagerror(local_error,err,error,*999)
2919 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
2922 local_error=
"The distributed matrix library type of "// &
2924 CALL flagerror(local_error,err,error,*999)
2927 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
2930 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
2933 exits(
"DISTRIBUTED_MATRIX_STORAGE_LOCATIONS_GET")
2935 999 errorsexits(
"DISTRIBUTED_MATRIX_STORAGE_LOCATIONS_GET",err,error)
2948 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
2949 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
2950 INTEGER(INTG),
INTENT(OUT) :: ERR
2953 INTEGER(INTG) :: i,j,k,global_row_start,global_row_finish
2959 NULLIFY(cmiss_matrix)
2960 NULLIFY(petsc_matrix)
2962 enters(
"DISTRIBUTED_MATRIX_STORAGE_LOCATIONS_SET",err,error,*999)
2964 IF(
ASSOCIATED(distributed_matrix))
THEN 2965 IF(distributed_matrix%MATRIX_FINISHED)
THEN 2966 CALL flagerror(
"The distributed matrix has been finished.",err,error,*999)
2968 row_domain_mapping=>distributed_matrix%ROW_DOMAIN_MAPPING
2969 column_domain_mapping=>distributed_matrix%COLUMN_DOMAIN_MAPPING
2970 IF(
ASSOCIATED(row_domain_mapping))
THEN 2971 IF(
ASSOCIATED(column_domain_mapping))
THEN 2972 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2974 cmiss_matrix=>distributed_matrix%CMISS
2975 IF(
ASSOCIATED(cmiss_matrix))
THEN 2978 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
2981 petsc_matrix=>distributed_matrix%PETSC
2982 IF(
ASSOCIATED(petsc_matrix))
THEN 2983 SELECT CASE(petsc_matrix%STORAGE_TYPE)
2987 CALL flagerror(
"Diagonal storage is not implemented for PETSc matrices.",err,error,*999)
2989 CALL flagerror(
"Column major storage is not implemented for PETSc matrices.",err,error,*999)
2991 CALL flagerror(
"Row major storage is not implemented for PETSc matrices.",err,error,*999)
2993 IF(
SIZE(row_indices,1)==petsc_matrix%M+1)
THEN 2994 IF(
SIZE(column_indices,1)==petsc_matrix%NUMBER_NON_ZEROS)
THEN 2995 IF(row_indices(1)==1)
THEN 2997 IF(row_indices(petsc_matrix%M+1)==petsc_matrix%NUMBER_NON_ZEROS+1)
THEN 2998 DO i=2,petsc_matrix%M+1
2999 IF(row_indices(i)<row_indices(i-1))
THEN 3004 CALL flagerror(local_error,err,error,*999)
3008 ALLOCATE(petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS(petsc_matrix%M),stat=err)
3009 IF(err/=0)
CALL flagerror(
"Could not allocate PETSc matrix diagonal number of non zeros.",err,error,*999)
3010 petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS=0
3011 ALLOCATE(petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS(petsc_matrix%M),stat=err)
3012 IF(err/=0)
CALL flagerror(
"Could not allocate PETSc matrix off diagonal number of non zeros.", &
3014 petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS=0
3015 ALLOCATE(petsc_matrix%ROW_INDICES(petsc_matrix%M+1),stat=err)
3016 IF(err/=0)
CALL flagerror(
"Could not allocate PETSc matrix row indices.",err,error,*999)
3017 petsc_matrix%ROW_INDICES(1:petsc_matrix%M+1)=row_indices(1:petsc_matrix%M+1)
3018 ALLOCATE(petsc_matrix%COLUMN_INDICES(petsc_matrix%NUMBER_NON_ZEROS),stat=err)
3019 IF(err/=0)
CALL flagerror(
"Could not allocate PETSc matrix column indices.",err,error,*999)
3020 petsc_matrix%COLUMN_INDICES(1:petsc_matrix%NUMBER_NON_ZEROS)= &
3021 & column_indices(1:petsc_matrix%NUMBER_NON_ZEROS)
3023 global_row_start=row_domain_mapping%LOCAL_TO_GLOBAL_MAP(1)
3024 global_row_finish=row_domain_mapping%LOCAL_TO_GLOBAL_MAP(petsc_matrix%M)
3025 DO i=1,petsc_matrix%M
3026 DO j=row_indices(i),row_indices(i+1)-1
3029 IF(k>petsc_matrix%GLOBAL_N)
THEN 3030 local_error=
"Invalid column indices. Column index "//
trim(
numbertovstring(j,
"*",err,error))// &
3032 &
") is greater than the number of columns ("// &
3034 CALL flagerror(local_error,err,error,*999)
3036 IF(k>=global_row_start.AND.k<=global_row_finish)
THEN 3037 petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS(i)=petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS(i)+1
3039 petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS(i)=petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS(i)+1
3042 local_error=
"Invalid column indices. Column index "//
trim(
numbertovstring(j,
"*",err,error))// &
3044 CALL flagerror(local_error,err,error,*999)
3048 IF(petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS(i)==0) petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS(i)=1
3059 & number_non_zeros,err,error,*999)
3061 & diagonal_number_non_zeros,
'(" Diagonal number non zeros :",8(X,I10))',
'(33X,8(X,I10))', &
3064 & offdiagonal_number_non_zeros,
'(" Off-diagonal number non zeros :",8(X,I10))',
'(33X,8(X,I10))', &
3067 & row_indices,
'(" Row indices :",8(X,I10))',
'(33X,8(X,I10))', &
3070 & column_indices,
'(" Column indices :",8(X,I10))',
'(33X,8(X,I10))', &
3074 local_error=
"Invalid row indices. The last row index ("// &
3076 &
") does not equal the number of non-zeros + 1 ("// &
3078 CALL flagerror(local_error,err,error,*999)
3081 local_error=
"Invalid row indices. The first row index ("// &
3083 CALL flagerror(local_error,err,error,*999)
3086 local_error=
"The supplied number of column indices ("// &
3088 &
") does not match the number of non-zeros in the matrix ("// &
3090 CALL flagerror(local_error,err,error,*999)
3093 local_error=
"The supplied number of row indices ("// &
3095 &
") does not match the number of rows in the matrix + 1 ("// &
3097 CALL flagerror(local_error,err,error,*999)
3100 CALL flagerror(
"Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
3102 CALL flagerror(
"Row column storage is not implemented for PETSc matrices.",err,error,*999)
3104 local_error=
"The specified matrix storage type of "// &
3106 CALL flagerror(local_error,err,error,*999)
3109 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
3112 local_error=
"The distributed matrix library type of "// &
3114 CALL flagerror(local_error,err,error,*999)
3117 CALL flagerror(
"Distributed matrix column domain mapping is not associated.",err,error,*999)
3120 CALL flagerror(
"Distributed matrix row domain mapping is not associated.",err,error,*999)
3124 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
3127 exits(
"DISTRIBUTED_MATRIX_STORAGE_LOCATIONS_SET")
3129 999
IF(
ASSOCIATED(petsc_matrix))
THEN 3130 IF(
ALLOCATED(petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS))
DEALLOCATE(petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS)
3131 IF(
ALLOCATED(petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS))
DEALLOCATE(petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS)
3133 errorsexits(
"DISTRIBUTED_MATRIX_STORAGE_LOCATIONS_SET",err,error)
3146 INTEGER(INTG),
INTENT(OUT) :: STORAGE_TYPE
3147 INTEGER(INTG),
INTENT(OUT) :: ERR
3152 enters(
"DISTRIBUTED_MATRIX_STORAGE_TYPE_GET",err,error,*999)
3154 IF(
ASSOCIATED(distributed_matrix))
THEN 3155 IF(distributed_matrix%MATRIX_FINISHED)
THEN 3156 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3158 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 3161 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
3164 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 3165 storage_type=distributed_matrix%PETSC%STORAGE_TYPE
3167 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
3170 local_error=
"The distributed matrix library type of "// &
3172 CALL flagerror(local_error,err,error,*999)
3175 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
3178 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
3181 exits(
"DISTRIBUTED_MATRIX_STORAGE_TYPE_GET")
3183 999 errorsexits(
"DISTRIBUTED_MATRIX_STORAGE_TYPE_GET",err,error)
3196 INTEGER(INTG),
INTENT(IN) :: STORAGE_TYPE
3197 INTEGER(INTG),
INTENT(OUT) :: ERR
3202 enters(
"DISTRIBUTED_MATRIX_STORAGE_TYPE_SET",err,error,*999)
3204 IF(
ASSOCIATED(distributed_matrix))
THEN 3205 IF(distributed_matrix%MATRIX_FINISHED)
THEN 3206 CALL flagerror(
"The distributed matrix has been finished.",err,error,*999)
3208 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3210 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 3213 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
3216 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 3217 SELECT CASE(storage_type)
3223 CALL flagerror(
"Column major storage is not implemented for PETSc matrices.",err,error,*999)
3225 CALL flagerror(
"Row major storage is not implemented for PETSc matrices.",err,error,*999)
3229 CALL flagerror(
"Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
3231 CALL flagerror(
"Row column storage is not implemented for PETSc matrices.",err,error,*999)
3233 local_error=
"The specified matrix storage type of "//
trim(
numbertovstring(storage_type,
"*",err,error))// &
3235 CALL flagerror(local_error,err,error,*999)
3238 CALL flagerror(
"Distributed matrix PETSc is not implemented.",err,error,*999)
3241 local_error=
"The distributed matrix library type of "// &
3243 CALL flagerror(local_error,err,error,*999)
3247 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
3250 exits(
"DISTRIBUTED_MATRIX_STORAGE_TYPE_SET")
3252 999 errorsexits(
"DISTRIBUTED_MATRIX_STORAGE_TYPE_SET",err,error)
3265 INTEGER(INTG),
INTENT(OUT) :: ERR
3270 enters(
"DISTRIBUTED_MATRIX_UPDATE_FINISH",err,error,*999)
3272 IF(
ASSOCIATED(distributed_matrix))
THEN 3273 IF(distributed_matrix%MATRIX_FINISHED)
THEN 3274 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3278 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 3279 IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX)
THEN 3280 CALL petsc_matassemblyend(distributed_matrix%PETSC%OVERRIDE_MATRIX,petsc_mat_final_assembly,err,error,*999)
3282 CALL petsc_matassemblyend(distributed_matrix%PETSC%MATRIX,petsc_mat_final_assembly,err,error,*999)
3285 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
3288 local_error=
"The distributed matrix library type of "// &
3290 CALL flagerror(local_error,err,error,*999)
3293 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
3296 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
3299 exits(
"DISTRIBUTED_MATRIX_UPDATE_FINISH")
3301 999 errorsexits(
"DISTRIBUTED_MATRIX_UPDATE_FINISH",err,error)
3314 LOGICAL,
INTENT(OUT) :: ISFINISHED
3315 INTEGER(INTG),
INTENT(OUT) :: ERR
3319 enters(
"DISTRIBUTED_MATRIX_UPDATE_ISFINISHED",err,error,*999)
3322 IF(
ASSOCIATED(distributed_matrix))
THEN 3323 IF(distributed_matrix%MATRIX_FINISHED)
THEN 3327 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
3330 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
3333 exits(
"DISTRIBUTED_MATRIX_UPDATE_ISFINISHED")
3335 999 errorsexits(
"DISTRIBUTED_MATRIX_UPDATE_ISFINISHED",err,error)
3348 INTEGER(INTG),
INTENT(OUT) :: ERR
3352 enters(
"DISTRIBUTED_MATRIX_UPDATE_WAITFINISHED",err,error,*999)
3354 IF(
ASSOCIATED(distributed_matrix))
THEN 3355 IF(distributed_matrix%MATRIX_FINISHED)
THEN 3358 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
3361 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
3364 exits(
"DISTRIBUTED_MATRIX_UPDATE_WAITFINISHED")
3366 999 errorsexits(
"DISTRIBUTED_MATRIX_UPDATE_WAITFINISHED",err,error)
3379 INTEGER(INTG),
INTENT(OUT) :: ERR
3384 enters(
"DISTRIBUTED_MATRIX_UPDATE_START",err,error,*999)
3386 IF(
ASSOCIATED(distributed_matrix))
THEN 3387 IF(distributed_matrix%MATRIX_FINISHED)
THEN 3388 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3392 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 3393 IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX)
THEN 3394 CALL petsc_matassemblybegin(distributed_matrix%PETSC%OVERRIDE_MATRIX,petsc_mat_final_assembly,err,error,*999)
3399 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
3402 local_error=
"The distributed matrix library type of "// &
3404 CALL flagerror(local_error,err,error,*999)
3407 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
3410 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
3413 exits(
"DISTRIBUTED_MATRIX_UPDATE_START")
3415 999 errorsexits(
"DISTRIBUTED_MATRIX_UPDATE_START",err,error)
3428 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
3429 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
3430 INTEGER(INTG),
INTENT(IN) :: VALUES(:)
3431 INTEGER(INTG),
INTENT(OUT) :: ERR
3436 enters(
"DISTRIBUTED_MATRIX_VALUES_ADD_INTG",err,error,*999)
3438 IF(
ASSOCIATED(distributed_matrix))
THEN 3439 IF(distributed_matrix%MATRIX_FINISHED)
THEN 3440 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3442 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 3443 CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
3445 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
3448 CALL flagerror(
"Adding values to an integer PETSc distributed matrix is not implemented.",err,error,*999)
3450 local_error=
"The distributed matrix library type of "// &
3452 CALL flagerror(local_error,err,error,*999)
3455 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
3458 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
3461 exits(
"DISTRIBUTED_MATRIX_VALUES_ADD_INTG")
3463 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_ADD_INTG",err,error)
3476 INTEGER(INTG),
INTENT(IN) :: ROW_INDEX
3477 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDEX
3478 INTEGER(INTG),
INTENT(IN) ::
VALUE 3479 INTEGER(INTG),
INTENT(OUT) :: ERR
3484 enters(
"DISTRIBUTED_MATRIX_VALUES_ADD_INTG1",err,error,*999)
3486 IF(
ASSOCIATED(distributed_matrix))
THEN 3487 IF(distributed_matrix%MATRIX_FINISHED)
THEN 3488 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3490 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 3491 CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_index,column_index,
VALUE,err,error,*999)
3493 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
3496 CALL flagerror(
"Adding values to an integer PETSc distributed matrix is not implemented.",err,error,*999)
3498 local_error=
"The distributed matrix library type of "// &
3500 CALL flagerror(local_error,err,error,*999)
3503 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
3506 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
3509 exits(
"DISTRIBUTED_MATRIX_VALUES_ADD_INTG1")
3511 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_ADD_INTG1",err,error)
3524 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
3525 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
3526 INTEGER(INTG),
INTENT(IN) :: VALUES(:,:)
3527 INTEGER(INTG),
INTENT(OUT) :: ERR
3532 enters(
"DISTRIBUTED_MATRIX_VALUES_ADD_INTG2",err,error,*999)
3534 IF(
ASSOCIATED(distributed_matrix))
THEN 3535 IF(distributed_matrix%MATRIX_FINISHED)
THEN 3536 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3538 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 3539 CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
3541 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
3544 CALL flagerror(
"Adding values to an integer PETSc distributed matrix is not implemented.",err,error,*999)
3546 local_error=
"The distributed matrix library type of "// &
3548 CALL flagerror(local_error,err,error,*999)
3551 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
3554 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
3557 exits(
"DISTRIBUTED_MATRIX_VALUES_ADD_INTG2")
3559 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_ADD_INTG2",err,error)
3572 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
3573 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
3574 REAL(SP),
INTENT(IN) :: VALUES(:)
3575 INTEGER(INTG),
INTENT(OUT) :: ERR
3580 enters(
"DISTRIBUTED_MATRIX_VALUES_ADD_SP",err,error,*999)
3582 IF(
ASSOCIATED(distributed_matrix))
THEN 3583 IF(distributed_matrix%MATRIX_FINISHED)
THEN 3584 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3586 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 3587 CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
3589 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
3592 CALL flagerror(
"Adding values to a single precision PETSc distributed matrix is not implemented.",err,error,*999)
3594 local_error=
"The distributed matrix library type of "// &
3596 CALL flagerror(local_error,err,error,*999)
3599 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
3602 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
3605 exits(
"DISTRIBUTED_MATRIX_VALUES_ADD_SP")
3607 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_ADD_SP",err,error)
3620 INTEGER(INTG),
INTENT(IN) :: ROW_INDEX
3621 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDEX
3622 REAL(SP),
INTENT(IN) ::
VALUE 3623 INTEGER(INTG),
INTENT(OUT) :: ERR
3628 enters(
"DISTRIBUTED_MATRIX_VALUES_ADD_SP1",err,error,*999)
3630 IF(
ASSOCIATED(distributed_matrix))
THEN 3631 IF(distributed_matrix%MATRIX_FINISHED)
THEN 3632 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3634 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 3635 CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_index,column_index,
VALUE,err,error,*999)
3637 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
3640 CALL flagerror(
"Adding values to a single precision PETSc distributed matrix is not implemented.",err,error,*999)
3642 local_error=
"The distributed matrix library type of "// &
3644 CALL flagerror(local_error,err,error,*999)
3647 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
3650 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
3653 exits(
"DISTRIBUTED_MATRIX_VALUES_ADD_SP1")
3655 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_ADD_SP1",err,error)
3668 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
3669 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
3670 REAL(SP),
INTENT(IN) :: VALUES(:,:)
3671 INTEGER(INTG),
INTENT(OUT) :: ERR
3676 enters(
"DISTRIBUTED_MATRIX_VALUES_ADD_SP2",err,error,*999)
3678 IF(
ASSOCIATED(distributed_matrix))
THEN 3679 IF(distributed_matrix%MATRIX_FINISHED)
THEN 3680 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3682 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 3683 CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
3685 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
3688 CALL flagerror(
"Adding values to a single precision PETSc distributed matrix is not implemented.",err,error,*999)
3690 local_error=
"The distributed matrix library type of "// &
3692 CALL flagerror(local_error,err,error,*999)
3695 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
3698 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
3701 exits(
"DISTRIBUTED_MATRIX_VALUES_ADD_SP2")
3703 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_ADD_SP2",err,error)
3716 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
3717 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
3718 REAL(DP),
INTENT(IN) :: VALUES(:)
3719 INTEGER(INTG),
INTENT(OUT) :: ERR
3725 enters(
"DISTRIBUTED_MATRIX_VALUES_ADD_DP",err,error,*999)
3727 IF(
ASSOCIATED(distributed_matrix))
THEN 3728 IF(distributed_matrix%MATRIX_FINISHED)
THEN 3729 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3731 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 3732 CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
3734 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
3737 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 3738 IF(
SIZE(row_indices,1)==
SIZE(values,1))
THEN 3739 IF(
SIZE(column_indices,1)==
SIZE(values,1))
THEN 3740 IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX)
THEN 3741 DO i=1,
SIZE(row_indices,1)
3743 CALL petsc_matsetvalues(distributed_matrix%PETSC%OVERRIDE_MATRIX,1,distributed_matrix%PETSC% &
3744 & global_row_numbers(row_indices(i:i)),1,column_indices(i:i)-1,values(i:i),petsc_add_values, &
3748 DO i=1,
SIZE(row_indices,1)
3750 CALL petsc_matsetvalues(distributed_matrix%PETSC%MATRIX,1,distributed_matrix%PETSC% &
3751 & global_row_numbers(row_indices(i:i)),1,column_indices(i:i)-1,values(i:i),petsc_add_values, &
3756 local_error=
"The size of the column indices array ("// &
3758 &
") does not conform to the size of the values array ("// &
3760 CALL flagerror(local_error,err,error,*999)
3763 local_error=
"The size of the row indices array ("// &
3765 &
") does not conform to the size of the values array ("// &
3767 CALL flagerror(local_error,err,error,*999)
3770 CALL flagerror(
"The distributed matrix PETSc is not associated.",err,error,*999)
3773 local_error=
"The distributed matrix library type of "// &
3775 CALL flagerror(local_error,err,error,*999)
3778 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
3781 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
3784 exits(
"DISTRIBUTED_MATRIX_VALUES_ADD_DP")
3786 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_ADD_DP",err,error)
3799 INTEGER(INTG),
INTENT(IN) :: ROW_INDEX
3800 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDEX
3801 REAL(DP),
INTENT(IN) ::
VALUE 3802 INTEGER(INTG),
INTENT(OUT) :: ERR
3805 INTEGER(INTG) :: PETSC_COL_INDEX(1)
3806 REAL(DP) :: PETSC_VALUE(1)
3809 enters(
"DISTRIBUTED_MATRIX_VALUES_ADD_DP1",err,error,*999)
3811 IF(
ASSOCIATED(distributed_matrix))
THEN 3812 IF(distributed_matrix%MATRIX_FINISHED)
THEN 3813 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3815 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 3816 CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_index,column_index,
VALUE,err,error,*999)
3818 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
3821 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 3823 petsc_col_index(1)=column_index-1
3824 petsc_value(1)=
VALUE 3825 IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX)
THEN 3826 CALL petsc_matsetvalue(distributed_matrix%PETSC%OVERRIDE_MATRIX,distributed_matrix%PETSC%GLOBAL_ROW_NUMBERS( &
3827 & row_index),column_index-1,
VALUE,petsc_add_values,err,error,*999)
3829 CALL petsc_matsetvalue(distributed_matrix%PETSC%MATRIX,distributed_matrix%PETSC%GLOBAL_ROW_NUMBERS( &
3830 & row_index),column_index-1,
VALUE,petsc_add_values,err,error,*999)
3833 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
3836 local_error=
"The distributed matrix library type of "// &
3838 CALL flagerror(local_error,err,error,*999)
3841 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
3844 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
3847 exits(
"DISTRIBUTED_MATRIX_VALUES_ADD_DP1")
3849 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_ADD_DP1",err,error)
3862 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
3863 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
3864 REAL(DP),
INTENT(IN) :: VALUES(:,:)
3865 INTEGER(INTG),
INTENT(OUT) :: ERR
3868 INTEGER(INTG) :: GLOBAL_ROW_INDICES(size(row_indices)),i
3871 enters(
"DISTRIBUTED_MATRIX_VALUES_ADD_DP2",err,error,*999)
3873 IF(
ASSOCIATED(distributed_matrix))
THEN 3874 IF(distributed_matrix%MATRIX_FINISHED)
THEN 3875 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3877 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 3878 CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
3880 CALL flagerror(
"Distributed matrix CMISS is not associated",err,error,*999)
3883 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 3884 IF(
SIZE(row_indices,1)==
SIZE(values,1))
THEN 3885 IF(
SIZE(column_indices,1)==
SIZE(values,2))
THEN 3886 DO i=1,
SIZE(row_indices,1)
3887 global_row_indices(i)=distributed_matrix%PETSC%GLOBAL_ROW_NUMBERS(row_indices(i))
3889 IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX)
THEN 3890 CALL petsc_matsetvalues(distributed_matrix%PETSC%OVERRIDE_MATRIX,
SIZE(row_indices,1),global_row_indices, &
3891 &
SIZE(column_indices,1),column_indices-1,values,petsc_add_values,err,error,*999)
3893 CALL petsc_matsetvalues(distributed_matrix%PETSC%MATRIX,
SIZE(row_indices,1),global_row_indices, &
3894 &
SIZE(column_indices,1),column_indices-1,values,petsc_add_values,err,error,*999)
3897 local_error=
"The size of the column indices array ("// &
3899 &
") does not conform to the number of columns in the values array ("// &
3901 CALL flagerror(local_error,err,error,*999)
3904 local_error=
"The size of the row indices array ("// &
3906 &
") does not conform to the number of rows in the values array ("// &
3908 CALL flagerror(local_error,err,error,*999)
3911 CALL flagerror(
"The distributed matrix PETSc is not associated.",err,error,*999)
3914 local_error=
"The distributed matrix library type of "// &
3916 CALL flagerror(local_error,err,error,*999)
3919 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
3922 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
3925 exits(
"DISTRIBUTED_MATRIX_VALUES_ADD_DP2")
3927 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_ADD_DP2",err,error)
3940 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
3941 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
3942 LOGICAL,
INTENT(IN) :: VALUES(:)
3943 INTEGER(INTG),
INTENT(OUT) :: ERR
3948 enters(
"DISTRIBUTED_MATRIX_VALUES_ADD_L",err,error,*999)
3950 IF(
ASSOCIATED(distributed_matrix))
THEN 3951 IF(distributed_matrix%MATRIX_FINISHED)
THEN 3952 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3954 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 3955 CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
3957 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
3960 CALL flagerror(
"Adding values to a logical PETSc distributed matrix is not implemented.",err,error,*999)
3962 local_error=
"The distributed matrix library type of "// &
3964 CALL flagerror(local_error,err,error,*999)
3967 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
3970 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
3973 exits(
"DISTRIBUTED_MATRIX_VALUES_ADD_L")
3975 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_ADD_L",err,error)
3988 INTEGER(INTG),
INTENT(IN) :: ROW_INDEX
3989 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDEX
3990 LOGICAL,
INTENT(IN) ::
VALUE 3991 INTEGER(INTG),
INTENT(OUT) :: ERR
3996 enters(
"DISTRIBUTED_MATRIX_VALUES_ADD_L1",err,error,*999)
3998 IF(
ASSOCIATED(distributed_matrix))
THEN 3999 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4000 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4002 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4003 CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_index,column_index,
VALUE,err,error,*999)
4005 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4008 CALL flagerror(
"Adding values to a logical PETSc distributed matrix is not implemented.",err,error,*999)
4010 local_error=
"The distributed matrix library type of "// &
4012 CALL flagerror(local_error,err,error,*999)
4015 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4018 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4021 exits(
"DISTRIBUTED_MATRIX_VALUES_ADD_L1")
4023 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_ADD_L1",err,error)
4036 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
4037 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
4038 LOGICAL,
INTENT(IN) :: VALUES(:,:)
4039 INTEGER(INTG),
INTENT(OUT) :: ERR
4044 enters(
"DISTRIBUTED_MATRIX_VALUES_ADD_L2",err,error,*999)
4046 IF(
ASSOCIATED(distributed_matrix))
THEN 4047 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4048 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4050 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4051 CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4053 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4056 CALL flagerror(
"Adding values to a logical PETSc distributed matrix is not implemented.",err,error,*999)
4058 local_error=
"The distributed matrix library type of "// &
4060 CALL flagerror(local_error,err,error,*999)
4063 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4066 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4069 exits(
"DISTRIBUTED_MATRIX_VALUES_ADD_L2")
4071 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_ADD_L2",err,error)
4084 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
4085 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
4086 INTEGER(INTG),
INTENT(OUT) :: VALUES(:)
4087 INTEGER(INTG),
INTENT(OUT) :: ERR
4092 enters(
"DISTRIBUTED_MATRIX_VALUES_GET_INTG",err,error,*999)
4094 IF(
ASSOCIATED(distributed_matrix))
THEN 4095 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4096 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4098 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4099 CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4101 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4104 CALL flagerror(
"Cannot get values for an integer PETSc distributed matrix.",err,error,*999)
4106 local_error=
"The distributed matrix library type of "// &
4108 CALL flagerror(local_error,err,error,*999)
4111 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4114 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4117 exits(
"DISTRIBUTED_MATRIX_VALUES_GET_INTG")
4119 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_GET_INTG",err,error)
4132 INTEGER(INTG),
INTENT(IN) :: ROW_INDEX
4133 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDEX
4134 INTEGER(INTG),
INTENT(OUT) ::
VALUE 4135 INTEGER(INTG),
INTENT(OUT) :: ERR
4140 enters(
"DISTRIBUTED_MATRIX_VALUES_GET_INTG1",err,error,*999)
4142 IF(
ASSOCIATED(distributed_matrix))
THEN 4143 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4144 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4146 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4147 CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_index,column_index,
VALUE,err,error,*999)
4149 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4152 CALL flagerror(
"Cannot get values for an integer PETSc distributed matrix.",err,error,*999)
4154 local_error=
"The distributed matrix library type of "// &
4156 CALL flagerror(local_error,err,error,*999)
4159 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4162 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4165 exits(
"DISTRIBUTED_MATRIX_VALUES_GET_INTG1")
4167 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_GET_INTG1",err,error)
4180 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
4181 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
4182 INTEGER(INTG),
INTENT(OUT) :: VALUES(:,:)
4183 INTEGER(INTG),
INTENT(OUT) :: ERR
4188 enters(
"DISTRIBUTED_MATRIX_VALUES_GET_INTG2",err,error,*999)
4190 IF(
ASSOCIATED(distributed_matrix))
THEN 4191 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4192 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4194 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4195 CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4197 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4200 CALL flagerror(
"Cannot get values for an integer PETSc distributed matrix.",err,error,*999)
4202 local_error=
"The distributed matrix library type of "// &
4204 CALL flagerror(local_error,err,error,*999)
4207 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4210 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4213 exits(
"DISTRIBUTED_MATRIX_VALUES_GET_INTG2")
4215 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_GET_INTG2",err,error)
4228 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
4229 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
4230 REAL(SP),
INTENT(OUT) :: VALUES(:)
4231 INTEGER(INTG),
INTENT(OUT) :: ERR
4236 enters(
"DISTRIBUTED_MATRIX_VALUES_GET_SP",err,error,*999)
4238 IF(
ASSOCIATED(distributed_matrix))
THEN 4239 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4240 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4242 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4243 CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4245 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4248 CALL flagerror(
"Cannot get values for a single precision PETSc distributed matrix.",err,error,*999)
4250 local_error=
"The distributed matrix library type of "// &
4252 CALL flagerror(local_error,err,error,*999)
4255 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4258 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4261 exits(
"DISTRIBUTED_MATRIX_VALUES_GET_SP")
4263 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_GET_SP",err,error)
4276 INTEGER(INTG),
INTENT(IN) :: ROW_INDEX
4277 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDEX
4278 REAL(SP),
INTENT(OUT) ::
VALUE 4279 INTEGER(INTG),
INTENT(OUT) :: ERR
4284 enters(
"DISTRIBUTED_MATRIX_VALUES_GET_SP1",err,error,*999)
4286 IF(
ASSOCIATED(distributed_matrix))
THEN 4287 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4288 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4290 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4291 CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_index,column_index,
VALUE,err,error,*999)
4293 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4296 CALL flagerror(
"Cannot get values for a single precision PETSc distributed matrix.",err,error,*999)
4298 local_error=
"The distributed matrix library type of "// &
4300 CALL flagerror(local_error,err,error,*999)
4303 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4306 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4309 exits(
"DISTRIBUTED_MATRIX_VALUES_GET_SP1")
4311 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_GET_SP1",err,error)
4324 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
4325 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
4326 REAL(SP),
INTENT(OUT) :: VALUES(:,:)
4327 INTEGER(INTG),
INTENT(OUT) :: ERR
4332 enters(
"DISTRIBUTED_MATRIX_VALUES_GET_SP2",err,error,*999)
4334 IF(
ASSOCIATED(distributed_matrix))
THEN 4335 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4336 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4338 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4339 CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4341 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4344 CALL flagerror(
"Cannot get values for a single precision PETSc distributed matrix.",err,error,*999)
4346 local_error=
"The distributed matrix library type of "// &
4348 CALL flagerror(local_error,err,error,*999)
4351 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4354 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4357 exits(
"DISTRIBUTED_MATRIX_VALUES_GET_SP2")
4359 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_GET_SP2",err,error)
4372 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
4373 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
4374 REAL(DP),
INTENT(OUT) :: VALUES(:)
4375 INTEGER(INTG),
INTENT(OUT) :: ERR
4381 enters(
"DISTRIBUTED_MATRIX_VALUES_GET_DP",err,error,*999)
4383 IF(
ASSOCIATED(distributed_matrix))
THEN 4384 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4385 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4387 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4388 CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4390 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4393 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 4394 IF(
SIZE(row_indices,1)==
SIZE(values,1))
THEN 4395 IF(
SIZE(column_indices,1)==
SIZE(values,1))
THEN 4396 IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX)
THEN 4397 DO i=1,
SIZE(row_indices,1)
4398 CALL petsc_matgetvalues(distributed_matrix%PETSC%OVERRIDE_MATRIX,1,distributed_matrix%PETSC% &
4399 & global_row_numbers(row_indices(i:i)),1,column_indices(i:i)-1,values(i:i), &
4403 DO i=1,
SIZE(row_indices,1)
4404 CALL petsc_matgetvalues(distributed_matrix%PETSC%MATRIX,1,distributed_matrix%PETSC%GLOBAL_ROW_NUMBERS( &
4405 & row_indices(i:i)),1,column_indices(i:i)-1,values(i:i),err,error,*999)
4409 local_error=
"The size of the column indices array ("// &
4411 &
") does not conform to the size of the values array ("// &
4413 CALL flagerror(local_error,err,error,*999)
4416 local_error=
"The size of the row indices array ("// &
4418 &
") does not conform to the size of the values array ("// &
4420 CALL flagerror(local_error,err,error,*999)
4423 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
4426 local_error=
"The distributed matrix library type of "// &
4428 CALL flagerror(local_error,err,error,*999)
4431 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4434 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4437 exits(
"DISTRIBUTED_MATRIX_VALUES_GET_DP")
4439 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_GET_DP",err,error)
4452 INTEGER(INTG),
INTENT(IN) :: ROW_INDEX
4453 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDEX
4454 REAL(DP),
INTENT(OUT) ::
VALUE 4455 INTEGER(INTG),
INTENT(OUT) :: ERR
4458 INTEGER(INTG) :: COLUMN_INDICES(1)
4459 REAL(DP) :: VALUES(1)
4462 enters(
"DISTRIBUTED_MATRIX_VALUES_GET_DP1",err,error,*999)
4464 IF(
ASSOCIATED(distributed_matrix))
THEN 4465 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4466 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4468 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4469 CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_index,column_index,
VALUE,err,error,*999)
4471 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4474 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 4475 column_indices(1)=column_index-1
4476 IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX)
THEN 4477 CALL petsc_matgetvalues(distributed_matrix%PETSC%OVERRIDE_MATRIX,1,distributed_matrix%PETSC% &
4478 & global_row_numbers(row_index),1,column_indices,values,err,error,*999)
4480 CALL petsc_matgetvalues(distributed_matrix%PETSC%MATRIX,1,distributed_matrix%PETSC%GLOBAL_ROW_NUMBERS(row_index), &
4481 & 1,column_indices,values,err,error,*999)
4485 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
4488 local_error=
"The distributed matrix library type of "// &
4490 CALL flagerror(local_error,err,error,*999)
4493 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4496 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4499 exits(
"DISTRIBUTED_MATRIX_VALUES_GET_DP1")
4501 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_GET_DP1",err,error)
4515 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
4516 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
4517 REAL(DP),
INTENT(OUT) :: VALUES(:,:)
4518 INTEGER(INTG),
INTENT(OUT) :: ERR
4521 INTEGER(INTG) :: GLOBAL_ROW_INDICES(size(row_indices,1)),i
4524 enters(
"DISTRIBUTED_MATRIX_VALUES_GET_DP2",err,error,*999)
4526 IF(
ASSOCIATED(distributed_matrix))
THEN 4527 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4528 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4530 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4531 CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4533 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4536 CALL flagerror(
"Cannot get values for an integer precision PETSc distributed matrix.",err,error,*999)
4537 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 4538 IF(
SIZE(row_indices,1)==
SIZE(values,1))
THEN 4539 IF(
SIZE(column_indices,1)==
SIZE(values,2))
THEN 4540 DO i=1,
SIZE(row_indices,1)
4541 global_row_indices(i)=distributed_matrix%PETSC%GLOBAL_ROW_NUMBERS(row_indices(i))
4543 IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX)
THEN 4544 CALL petsc_matgetvalues(distributed_matrix%PETSC%OVERRIDE_MATRIX,
SIZE(row_indices,1),global_row_indices, &
4545 &
SIZE(column_indices,1),column_indices-1,values,err,error,*999)
4547 CALL petsc_matgetvalues(distributed_matrix%PETSC%MATRIX,
SIZE(row_indices,1),global_row_indices, &
4548 &
SIZE(column_indices,1),column_indices-1,values,err,error,*999)
4551 local_error=
"The size of the column indices array ("// &
4553 &
") does not conform to the number of columns in the values array ("// &
4555 CALL flagerror(local_error,err,error,*999)
4558 local_error=
"The size of the row indices array ("// &
4560 &
") does not conform to the number of rows in the values array ("// &
4562 CALL flagerror(local_error,err,error,*999)
4565 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
4568 local_error=
"The distributed matrix library type of "// &
4570 CALL flagerror(local_error,err,error,*999)
4573 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4576 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4579 exits(
"DISTRIBUTED_MATRIX_VALUES_GET_DP2")
4581 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_GET_DP2",err,error)
4594 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
4595 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
4596 LOGICAL,
INTENT(OUT) :: VALUES(:)
4597 INTEGER(INTG),
INTENT(OUT) :: ERR
4602 enters(
"DISTRIBUTED_MATRIX_VALUES_GET_L",err,error,*999)
4604 IF(
ASSOCIATED(distributed_matrix))
THEN 4605 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4606 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4608 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4609 CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4611 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4614 CALL flagerror(
"Cannot get values for a logical PETSc distributed matrix.",err,error,*999)
4616 local_error=
"The distributed matrix library type of "// &
4618 CALL flagerror(local_error,err,error,*999)
4621 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4624 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4627 exits(
"DISTRIBUTED_MATRIX_VALUES_GET_L")
4629 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_GET_L",err,error)
4642 INTEGER(INTG),
INTENT(IN) :: ROW_INDEX
4643 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDEX
4644 LOGICAL,
INTENT(OUT) ::
VALUE 4645 INTEGER(INTG),
INTENT(OUT) :: ERR
4650 enters(
"DISTRIBUTED_MATRIX_VALUES_GET_L1",err,error,*999)
4652 IF(
ASSOCIATED(distributed_matrix))
THEN 4653 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4654 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4656 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4657 CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_index,column_index,
VALUE,err,error,*999)
4659 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4662 CALL flagerror(
"Cannot get values for a logical PETSc distributed matrix.",err,error,*999)
4664 local_error=
"The distributed matrix library type of "// &
4666 CALL flagerror(local_error,err,error,*999)
4669 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4672 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4675 exits(
"DISTRIBUTED_MATRIX_VALUES_GET_L1")
4677 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_GET_L1",err,error)
4690 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
4691 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
4692 LOGICAL,
INTENT(OUT) :: VALUES(:,:)
4693 INTEGER(INTG),
INTENT(OUT) :: ERR
4698 enters(
"DISTRIBUTED_MATRIX_VALUES_GET_L2",err,error,*999)
4700 IF(
ASSOCIATED(distributed_matrix))
THEN 4701 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4702 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4704 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4705 CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4707 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4710 CALL flagerror(
"Cannot get values for a logical PETSc distributed matrix.",err,error,*999)
4712 local_error=
"The distributed matrix library type of "// &
4714 CALL flagerror(local_error,err,error,*999)
4717 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4720 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4723 exits(
"DISTRIBUTED_MATRIX_VALUES_GET_L2")
4725 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_GET_L2",err,error)
4738 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
4739 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
4740 INTEGER(INTG),
INTENT(IN) :: VALUES(:)
4741 INTEGER(INTG),
INTENT(OUT) :: ERR
4746 enters(
"DISTRIBUTED_MATRIX_VALUES_SET_INTG",err,error,*999)
4748 IF(
ASSOCIATED(distributed_matrix))
THEN 4749 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4750 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4752 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4753 CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4755 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4758 CALL flagerror(
"Cannot get values for an integer PETSc distributed matrix.",err,error,*999)
4760 local_error=
"The distributed matrix library type of "// &
4762 CALL flagerror(local_error,err,error,*999)
4765 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4768 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4771 exits(
"DISTRIBUTED_MATRIX_VALUES_SET_INTG")
4773 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_SET_INTG",err,error)
4786 INTEGER(INTG),
INTENT(IN) :: ROW_INDEX
4787 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDEX
4788 INTEGER(INTG),
INTENT(IN) ::
VALUE 4789 INTEGER(INTG),
INTENT(OUT) :: ERR
4794 enters(
"DISTRIBUTED_MATRIX_VALUES_SET_INTG1",err,error,*999)
4796 IF(
ASSOCIATED(distributed_matrix))
THEN 4797 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4798 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4800 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4801 CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_index,column_index,
VALUE,err,error,*999)
4803 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4806 CALL flagerror(
"Cannot get values for an integer PETSc distributed matrix.",err,error,*999)
4808 local_error=
"The distributed matrix library type of "// &
4810 CALL flagerror(local_error,err,error,*999)
4813 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4816 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4819 exits(
"DISTRIBUTED_MATRIX_VALUES_SET_INTG1")
4821 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_SET_INTG1",err,error)
4834 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
4835 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
4836 INTEGER(INTG),
INTENT(IN) :: VALUES(:,:)
4837 INTEGER(INTG),
INTENT(OUT) :: ERR
4842 enters(
"DISTRIBUTED_MATRIX_VALUES_SET_INTG2",err,error,*999)
4844 IF(
ASSOCIATED(distributed_matrix))
THEN 4845 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4846 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4848 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4849 CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4851 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4854 CALL flagerror(
"Cannot get values for an integer PETSc distributed matrix.",err,error,*999)
4856 local_error=
"The distributed matrix library type of "// &
4858 CALL flagerror(local_error,err,error,*999)
4861 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4864 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4867 exits(
"DISTRIBUTED_MATRIX_VALUES_SET_INTG2")
4869 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_SET_INTG2",err,error)
4882 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
4883 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
4884 REAL(SP),
INTENT(IN) :: VALUES(:)
4885 INTEGER(INTG),
INTENT(OUT) :: ERR
4890 enters(
"DISTRIBUTED_MATRIX_VALUES_SET_SP",err,error,*999)
4892 IF(
ASSOCIATED(distributed_matrix))
THEN 4893 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4894 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4896 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4897 CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4899 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4902 CALL flagerror(
"Cannot get values for a single precision PETSc distributed matrix.",err,error,*999)
4904 local_error=
"The distributed matrix library type of "// &
4906 CALL flagerror(local_error,err,error,*999)
4909 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4912 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4915 exits(
"DISTRIBUTED_MATRIX_VALUES_SET_SP")
4917 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_SET_SP",err,error)
4930 INTEGER(INTG),
INTENT(IN) :: ROW_INDEX
4931 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDEX
4932 REAL(SP),
INTENT(IN) ::
VALUE 4933 INTEGER(INTG),
INTENT(OUT) :: ERR
4938 enters(
"DISTRIBUTED_MATRIX_VALUES_SET_SP1",err,error,*999)
4940 IF(
ASSOCIATED(distributed_matrix))
THEN 4941 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4942 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4944 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4945 CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_index,column_index,
VALUE,err,error,*999)
4947 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4950 CALL flagerror(
"Cannot get values for a single precision PETSc distributed matrix.",err,error,*999)
4952 local_error=
"The distributed matrix library type of "// &
4954 CALL flagerror(local_error,err,error,*999)
4957 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
4960 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
4963 exits(
"DISTRIBUTED_MATRIX_VALUES_SET_SP1")
4965 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_SET_SP1",err,error)
4978 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
4979 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
4980 REAL(SP),
INTENT(IN) :: VALUES(:,:)
4981 INTEGER(INTG),
INTENT(OUT) :: ERR
4986 enters(
"DISTRIBUTED_MATRIX_VALUES_SET_SP2",err,error,*999)
4988 IF(
ASSOCIATED(distributed_matrix))
THEN 4989 IF(distributed_matrix%MATRIX_FINISHED)
THEN 4990 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4992 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 4993 CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4995 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
4998 CALL flagerror(
"Cannot get values for a single precision PETSc distributed matrix.",err,error,*999)
5000 local_error=
"The distributed matrix library type of "// &
5002 CALL flagerror(local_error,err,error,*999)
5005 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
5008 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
5011 exits(
"DISTRIBUTED_MATRIX_VALUES_SET_SP2")
5013 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_SET_SP2",err,error)
5026 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
5027 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
5028 REAL(DP),
INTENT(IN) :: VALUES(:)
5029 INTEGER(INTG),
INTENT(OUT) :: ERR
5035 enters(
"DISTRIBUTED_MATRIX_VALUES_SET_DP",err,error,*999)
5037 IF(
ASSOCIATED(distributed_matrix))
THEN 5038 IF(distributed_matrix%MATRIX_FINISHED)
THEN 5039 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
5041 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 5042 CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
5044 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
5047 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 5048 IF(
SIZE(row_indices,1)==
SIZE(values,1))
THEN 5049 IF(
SIZE(column_indices,1)==
SIZE(values,1))
THEN 5050 IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX)
THEN 5051 DO i=1,
SIZE(row_indices,1)
5052 CALL petsc_matsetvalues(distributed_matrix%PETSC%OVERRIDE_MATRIX,1,distributed_matrix%PETSC% &
5053 & global_row_numbers(row_indices(i:i)),1,column_indices(i:i)-1,values(i:i),petsc_insert_values, &
5057 DO i=1,
SIZE(row_indices,1)
5058 CALL petsc_matsetvalues(distributed_matrix%PETSC%MATRIX,1,distributed_matrix%PETSC%GLOBAL_ROW_NUMBERS( &
5059 & row_indices(i:i)),1,column_indices(i:i)-1,values(i:i),petsc_insert_values,err,error,*999)
5063 local_error=
"The size of the column indices array ("// &
5065 &
") does not conform to the size of the values array ("// &
5067 CALL flagerror(local_error,err,error,*999)
5070 local_error=
"The size of the row indices array ("// &
5072 &
") does not conform to the size of the values array ("// &
5074 CALL flagerror(local_error,err,error,*999)
5077 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
5080 local_error=
"The distributed matrix library type of "// &
5082 CALL flagerror(local_error,err,error,*999)
5085 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
5088 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
5091 exits(
"DISTRIBUTED_MATRIX_VALUES_SET_DP")
5093 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_SET_DP",err,error)
5106 INTEGER(INTG),
INTENT(IN) :: ROW_INDEX
5107 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDEX
5108 REAL(DP),
INTENT(IN) ::
VALUE 5109 INTEGER(INTG),
INTENT(OUT) :: ERR
5114 enters(
"DISTRIBUTED_MATRIX_VALUES_SET_DP1",err,error,*999)
5116 IF(
ASSOCIATED(distributed_matrix))
THEN 5117 IF(distributed_matrix%MATRIX_FINISHED)
THEN 5118 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
5120 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 5121 CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_index,column_index,
VALUE,err,error,*999)
5123 CALL flagerror(
"Distributed matrix CMISS is not associated",err,error,*999)
5126 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 5127 IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX)
THEN 5128 CALL petsc_matsetvalues(distributed_matrix%PETSC%OVERRIDE_MATRIX,1,distributed_matrix%PETSC%GLOBAL_ROW_NUMBERS( &
5129 & row_index),1,(/column_index-1/),(/
VALUE/),petsc_insert_values,err,error,*999)
5131 CALL petsc_matsetvalues(distributed_matrix%PETSC%MATRIX,1,distributed_matrix%PETSC%GLOBAL_ROW_NUMBERS(row_index), &
5132 & 1,(/column_index-1/),(/
VALUE/),petsc_insert_values,err,error,*999)
5135 CALL flagerror(
"Distributed matrix PETSc is not associated.",err,error,*999)
5138 local_error=
"The distributed matrix library type of "// &
5140 CALL flagerror(local_error,err,error,*999)
5143 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
5146 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
5149 exits(
"DISTRIBUTED_MATRIX_VALUES_SET_DP1")
5151 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_SET_DP1",err,error)
5164 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
5165 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
5166 REAL(DP),
INTENT(IN) :: VALUES(:,:)
5167 INTEGER(INTG),
INTENT(OUT) :: ERR
5170 INTEGER(INTG) :: GLOBAL_ROW_INDICES(size(row_indices,1)),i
5173 enters(
"DISTRIBUTED_MATRIX_VALUES_SET_DP2",err,error,*999)
5175 IF(
ASSOCIATED(distributed_matrix))
THEN 5176 IF(distributed_matrix%MATRIX_FINISHED)
THEN 5177 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
5179 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 5180 CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
5182 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
5185 IF(
ASSOCIATED(distributed_matrix%PETSC))
THEN 5186 IF(
SIZE(row_indices,1)==
SIZE(values,1))
THEN 5187 IF(
SIZE(column_indices,1)==
SIZE(values,2))
THEN 5188 DO i=1,
SIZE(row_indices,1)
5189 global_row_indices(i)=distributed_matrix%PETSC%GLOBAL_ROW_NUMBERS(row_indices(i))
5191 IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX)
THEN 5192 CALL petsc_matsetvalues(distributed_matrix%PETSC%OVERRIDE_MATRIX,
SIZE(row_indices,1),global_row_indices, &
5193 &
SIZE(column_indices,1),column_indices-1,values,petsc_insert_values,err,error,*999)
5195 CALL petsc_matsetvalues(distributed_matrix%PETSC%MATRIX,
SIZE(row_indices,1),global_row_indices, &
5196 &
SIZE(column_indices,1),column_indices-1,values,petsc_insert_values,err,error,*999)
5199 local_error=
"The size of the column indices array ("// &
5201 &
") does not conform to the number of columns in the values array ("// &
5203 CALL flagerror(local_error,err,error,*999)
5206 local_error=
"The size of the row indices array ("// &
5208 &
") does not conform to the number of rows in the values array ("// &
5210 CALL flagerror(local_error,err,error,*999)
5213 CALL flagerror(
"The distributed matrix PETSc is not associated.",err,error,*999)
5216 local_error=
"The distributed matrix library type of "// &
5218 CALL flagerror(local_error,err,error,*999)
5221 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
5224 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
5227 exits(
"DISTRIBUTED_MATRIX_VALUES_SET_DP2")
5229 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_SET_DP2",err,error)
5242 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
5243 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
5244 LOGICAL,
INTENT(IN) :: VALUES(:)
5245 INTEGER(INTG),
INTENT(OUT) :: ERR
5250 enters(
"DISTRIBUTED_MATRIX_VALUES_SET_L",err,error,*999)
5252 IF(
ASSOCIATED(distributed_matrix))
THEN 5253 IF(distributed_matrix%MATRIX_FINISHED)
THEN 5254 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
5256 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 5257 CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
5259 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
5262 CALL flagerror(
"Cannot set values for a logical PETSc distributed matrix.",err,error,*999)
5264 local_error=
"The distributed matrix library type of "// &
5266 CALL flagerror(local_error,err,error,*999)
5269 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
5272 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
5275 exits(
"DISTRIBUTED_MATRIX_VALUES_SET_L")
5277 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_SET_L",err,error)
5290 INTEGER(INTG),
INTENT(IN) :: ROW_INDEX
5291 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDEX
5292 LOGICAL,
INTENT(IN) ::
VALUE 5293 INTEGER(INTG),
INTENT(OUT) :: ERR
5298 enters(
"DISTRIBUTED_MATRIX_VALUES_SET_L1",err,error,*999)
5300 IF(
ASSOCIATED(distributed_matrix))
THEN 5301 IF(distributed_matrix%MATRIX_FINISHED)
THEN 5302 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
5304 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 5305 CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_index,column_index,
VALUE,err,error,*999)
5307 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
5310 CALL flagerror(
"Cannot get values for a logical PETSc distributed matrix.",err,error,*999)
5312 local_error=
"The distributed matrix library type of "// &
5314 CALL flagerror(local_error,err,error,*999)
5317 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
5320 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
5323 exits(
"DISTRIBUTED_MATRIX_VALUES_SET_L1")
5325 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_SET_L1",err,error)
5338 INTEGER(INTG),
INTENT(IN) :: ROW_INDICES(:)
5339 INTEGER(INTG),
INTENT(IN) :: COLUMN_INDICES(:)
5340 LOGICAL,
INTENT(IN) :: VALUES(:,:)
5341 INTEGER(INTG),
INTENT(OUT) :: ERR
5346 enters(
"DISTRIBUTED_MATRIX_VALUES_SET_L2",err,error,*999)
5348 IF(
ASSOCIATED(distributed_matrix))
THEN 5349 IF(distributed_matrix%MATRIX_FINISHED)
THEN 5350 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
5352 IF(
ASSOCIATED(distributed_matrix%CMISS))
THEN 5353 CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
5355 CALL flagerror(
"Distributed matrix CMISS is not associated.",err,error,*999)
5358 CALL flagerror(
"Cannot get values for a logical PETSc distributed matrix.",err,error,*999)
5360 local_error=
"The distributed matrix library type of "// &
5362 CALL flagerror(local_error,err,error,*999)
5365 CALL flagerror(
"The distributed matrix has not been finished.",err,error,*999)
5368 CALL flagerror(
"Distributed matrix is not associated.",err,error,*999)
5371 exits(
"DISTRIBUTED_MATRIX_VALUES_SET_L2")
5373 999 errorsexits(
"DISTRIBUTED_MATRIX_VALUES_SET_L2",err,error)
5389 INTEGER(INTG),
INTENT(IN) :: ROW_SELECTION_TYPE
5390 REAL(DP),
INTENT(IN) :: ALPHA
5394 INTEGER(INTG),
INTENT(OUT) :: ERR
5397 INTEGER(INTG) :: column_idx,local_column,global_column,NUMBER_OF_COLUMNS,NUMBER_OF_ROWS,row,row_idx
5405 enters(
"DISTRIBUTED_MATRIX_BY_VECTOR_ADD",err,error,*999)
5407 IF(abs(alpha)>zero_tolerance)
THEN 5408 IF(
ASSOCIATED(distributed_matrix))
THEN 5409 IF(distributed_matrix%MATRIX_FINISHED)
THEN 5410 IF(
ASSOCIATED(distributed_vector))
THEN 5411 IF(distributed_vector%VECTOR_FINISHED)
THEN 5412 IF(
ASSOCIATED(distributed_product))
THEN 5413 IF(distributed_product%VECTOR_FINISHED)
THEN 5414 IF(distributed_matrix%LIBRARY_TYPE==distributed_vector%LIBRARY_TYPE)
THEN 5415 IF(distributed_matrix%LIBRARY_TYPE==distributed_product%LIBRARY_TYPE)
THEN 5416 column_mapping=>distributed_matrix%COLUMN_DOMAIN_MAPPING
5417 IF(
ASSOCIATED(column_mapping))
THEN 5418 row_mapping=>distributed_matrix%ROW_DOMAIN_MAPPING
5419 IF(
ASSOCIATED(row_mapping))
THEN 5420 IF(
ASSOCIATED(column_mapping,distributed_vector%DOMAIN_MAPPING))
THEN 5421 IF(
ASSOCIATED(row_mapping,distributed_product%DOMAIN_MAPPING))
THEN 5422 SELECT CASE(distributed_matrix%LIBRARY_TYPE)
5424 cmiss_matrix=>distributed_matrix%CMISS
5425 IF(
ASSOCIATED(cmiss_matrix))
THEN 5426 matrix=>cmiss_matrix%MATRIX
5427 IF(
ASSOCIATED(matrix))
THEN 5428 cmiss_vector=>distributed_vector%CMISS
5429 IF(
ASSOCIATED(cmiss_vector))
THEN 5430 cmiss_product=>distributed_product%CMISS
5431 IF(
ASSOCIATED(cmiss_product))
THEN 5432 SELECT CASE(row_selection_type)
5434 number_of_rows=row_mapping%TOTAL_NUMBER_OF_LOCAL
5436 number_of_rows=row_mapping%NUMBER_OF_LOCAL
5438 local_error=
"The row selection type of "// &
5440 CALL flagerror(local_error,err,error,*999)
5442 number_of_columns=column_mapping%NUMBER_OF_GLOBAL
5443 IF(matrix%DATA_TYPE==distributed_vector%DATA_TYPE)
THEN 5444 IF(matrix%DATA_TYPE==distributed_product%DATA_TYPE)
THEN 5445 SELECT CASE(matrix%DATA_TYPE)
5447 CALL flagerror(
"Not implemented.",err,error,*999)
5449 CALL flagerror(
"Not implemented.",err,error,*999)
5451 SELECT CASE(matrix%STORAGE_TYPE)
5453 DO row=1,number_of_rows
5455 DO local_column=1,column_mapping%TOTAL_NUMBER_OF_LOCAL
5456 global_column=column_mapping%LOCAL_TO_GLOBAL_MAP(local_column)
5457 sum=sum+matrix%DATA_DP(row+(global_column-1)*matrix%M)* &
5458 & cmiss_vector%DATA_DP(local_column)
5460 cmiss_product%DATA_DP(row)=cmiss_product%DATA_DP(row)+(alpha*sum)
5463 DO row=1,number_of_rows
5464 sum=matrix%DATA_DP(row)*cmiss_vector%DATA_DP(row)
5465 cmiss_product%DATA_DP(row)=cmiss_product%DATA_DP(row)+(alpha*sum)
5468 DO row=1,number_of_rows
5470 DO local_column=1,column_mapping%TOTAL_NUMBER_OF_LOCAL
5471 global_column=column_mapping%LOCAL_TO_GLOBAL_MAP(local_column)
5472 sum=sum+matrix%DATA_DP(row+(global_column-1)*matrix%MAX_M)* &
5473 & cmiss_vector%DATA_DP(local_column)
5475 cmiss_product%DATA_DP(row)=cmiss_product%DATA_DP(row)+(alpha*sum)
5478 DO row=1,number_of_rows
5480 DO local_column=1,column_mapping%TOTAL_NUMBER_OF_LOCAL
5481 global_column=column_mapping%LOCAL_TO_GLOBAL_MAP(local_column)
5482 sum=sum+matrix%DATA_DP((row-1)*matrix%MAX_N+global_column)* &
5483 & cmiss_vector%DATA_DP(local_column)
5485 cmiss_product%DATA_DP(row)=cmiss_product%DATA_DP(row)+(alpha*sum)
5488 DO row=1,number_of_rows
5490 DO column_idx=matrix%ROW_INDICES(row),matrix%ROW_INDICES(row+1)-1
5491 global_column=matrix%COLUMN_INDICES(column_idx)
5493 local_column=column_mapping%GLOBAL_TO_LOCAL_MAP(global_column)%LOCAL_NUMBER(1)
5494 sum=sum+matrix%DATA_DP(column_idx)* &
5495 & cmiss_vector%DATA_DP(local_column)
5497 cmiss_product%DATA_DP(row)=cmiss_product%DATA_DP(row)+(alpha*sum)
5500 DO column_idx=1,number_of_columns
5501 DO row_idx=matrix%COLUMN_INDICES(column_idx),matrix%COLUMN_INDICES(column_idx+1)-1
5502 row=matrix%ROW_INDICES(row_idx)
5503 local_column=column_mapping%GLOBAL_TO_LOCAL_MAP(column_idx)%LOCAL_NUMBER(1)
5504 sum=matrix%DATA_DP(row_idx)* &
5505 & cmiss_vector%DATA_DP(local_column)
5506 cmiss_product%DATA_DP(row)=cmiss_product%DATA_DP(row)+(alpha*sum)
5510 CALL flagerror(
"Not implemented.",err,error,*999)
5512 local_error=
"The matrix storage type of "// &
5515 CALL flagerror(local_error,err,error,*999)
5518 CALL flagerror(
"Not implemented.",err,error,*999)
5520 local_error=
"The distributed matrix vector data type of "// &
5522 CALL flagerror(local_error,err,error,*999)
5525 local_error=
"The distributed product vector data type of "// &
5527 &
" does not match the distributed matrix data type of "// &
5529 CALL flagerror(local_error,err,error,*999)
5532 local_error=
"The distributed vector data type of "// &
5534 &
" does not match the distributed matrix data type of "// &
5536 CALL flagerror(local_error,err,error,*999)
5539 CALL flagerror(
"Distributed product CMISS vector is not associated.",err,error,*999)
5542 CALL flagerror(
"Distributed vector CMISS vector is not associated.",err,error,*999)
5545 CALL flagerror(
"CMISS matrix matrix is not associated.",err,error,*999)
5548 CALL flagerror(
"Distrubuted matrix CMISS is not associated.",err,error,*999)
5551 CALL flagerror(
"Not implemented.",err,error,*999)
5553 local_error=
"The distributed matrix library type of "// &
5555 CALL flagerror(local_error,err,error,*999)
5558 CALL flagerror(
"The distributed matrix and the distributed product vector have different "// &
5559 &
"domain mappings.",err,error,*999)
5562 CALL flagerror(
"The distributed matrix and the distributed vector have different domain mappings.", &
5566 CALL flagerror(
"The distributed matrix row domain mapping is not associated.",err,error,*999)
5569 CALL flagerror(
"The distributed matrix column domain mapping is not associated.",err,error,*999)
5572 local_error=
"The distributed product vector library type of "// &
5574 &
" does not match the distributed matrix library type of "// &
5576 CALL flagerror(local_error,err,error,*999)
5579 local_error=
"The distributed vector library type of "// &
5581 &
" does not match the distributed matrix library type of "// &
5583 CALL flagerror(local_error,err,error,*999)
5586 CALL flagerror(
"The distributed product vector has not been finished.",err,error,*999)
5589 CALL flagerror(
"The distributed product vector is not associated.",err,error,*999)
5592 CALL flagerror(
"Distributed vector has not been finished.",err,error,*999)
5595 CALL flagerror(
"Distrubuted vector is not associated.",err,error,*999)
5598 CALL flagerror(
"Distributed matrix has not been finished.",err,error,*999)
5601 CALL flagerror(
"Distributed matrix is not associated",err,error,*999)
5604 exits(
"DISTRIBUTED_MATRIX_BY_VECTOR_ADD")
5606 999 errorsexits(
"DISTRIBUTED_MATRIX_BY_VECTOR_ADD",err,error)
5619 INTEGER(INTG),
INTENT(IN) ::
VALUE 5620 INTEGER(INTG),
INTENT(OUT) :: ERR
5625 enters(
"DISTRIBUTED_VECTOR_ALL_VALUES_SET_INTG",err,error,*999)
5627 IF(
ASSOCIATED(distributed_vector))
THEN 5628 IF(distributed_vector%VECTOR_FINISHED)
THEN 5630 SELECT CASE(distributed_vector%LIBRARY_TYPE)
5632 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 5633 distributed_vector%CMISS%DATA_INTG=
VALUE 5635 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
5638 CALL flagerror(
"Cannot get values for an integer PETSc distributed vector.",err,error,*999)
5640 local_error=
"The distributed vector library type of "// &
5642 CALL flagerror(local_error,err,error,*999)
5645 local_error=
"The data type of "//
trim(
numbertovstring(distributed_vector%DATA_TYPE,
"*",err,error))// &
5646 &
" does not correspond to the integer data type of the given value." 5647 CALL flagerror(local_error,err,error,*999)
5650 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
5653 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
5656 exits(
"DISTRIBUTED_VECTOR_ALL_VALUES_SET_INTG")
5658 999 errorsexits(
"DISTRIBUTED_VECTOR_ALL_VALUES_SET_INTG",err,error)
5671 REAL(SP),
INTENT(IN) ::
VALUE 5672 INTEGER(INTG),
INTENT(OUT) :: ERR
5677 enters(
"DISTRIBUTED_VECTOR_ALL_VALUES_SET_SP",err,error,*999)
5679 IF(
ASSOCIATED(distributed_vector))
THEN 5680 IF(distributed_vector%VECTOR_FINISHED)
THEN 5682 SELECT CASE(distributed_vector%LIBRARY_TYPE)
5684 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 5685 distributed_vector%CMISS%DATA_SP=
VALUE 5687 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
5690 CALL flagerror(
"Cannot get values for a single precision PETSc distributed vector.",err,error,*999)
5692 local_error=
"The distributed vector library type of "// &
5694 CALL flagerror(local_error,err,error,*999)
5697 local_error=
"The data type of "//
trim(
numbertovstring(distributed_vector%DATA_TYPE,
"*",err,error))// &
5698 &
" does not correspond to the single precision data type of the given value." 5699 CALL flagerror(local_error,err,error,*999)
5702 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
5705 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
5708 exits(
"DISTRIBUTED_VECTOR_ALL_VALUES_SET_SP")
5710 999 errorsexits(
"DISTRIBUTED_VECTOR_ALL_VALUES_SET_SP",err,error)
5723 REAL(DP),
INTENT(IN) ::
VALUE 5724 INTEGER(INTG),
INTENT(OUT) :: ERR
5729 enters(
"DISTRIBUTED_VECTOR_ALL_VALUES_SET_DP",err,error,*999)
5731 IF(
ASSOCIATED(distributed_vector))
THEN 5732 IF(distributed_vector%VECTOR_FINISHED)
THEN 5734 SELECT CASE(distributed_vector%LIBRARY_TYPE)
5736 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 5737 distributed_vector%CMISS%DATA_DP=
VALUE 5739 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
5742 IF(
ASSOCIATED(distributed_vector%PETSC))
THEN 5743 IF(distributed_vector%PETSC%USE_OVERRIDE_VECTOR)
THEN 5744 CALL petsc_vecset(distributed_vector%PETSC%OVERRIDE_VECTOR,
VALUE,err,error,*999)
5746 CALL petsc_vecset(distributed_vector%PETSC%VECTOR,
VALUE,err,error,*999)
5749 CALL flagerror(
"Distributed vector PETSc is not associated.",err,error,*999)
5752 local_error=
"The distributed vector library type of "// &
5754 CALL flagerror(local_error,err,error,*999)
5757 local_error=
"The data type of "//
trim(
numbertovstring(distributed_vector%DATA_TYPE,
"*",err,error))// &
5758 &
" does not correspond to the double precision data type of the given value." 5759 CALL flagerror(local_error,err,error,*999)
5762 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
5765 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
5768 exits(
"DISTRIBUTED_VECTOR_ALL_VALUES_SET_DP")
5770 999 errorsexits(
"DISTRIBUTED_VECTOR_ALL_VALUES_SET_DP",err,error)
5783 LOGICAL,
INTENT(IN) ::
VALUE 5784 INTEGER(INTG),
INTENT(OUT) :: ERR
5789 enters(
"DISTRIBUTED_VECTOR_ALL_VALUES_SET_L",err,error,*999)
5791 IF(
ASSOCIATED(distributed_vector))
THEN 5792 IF(distributed_vector%VECTOR_FINISHED)
THEN 5794 SELECT CASE(distributed_vector%LIBRARY_TYPE)
5796 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 5797 distributed_vector%CMISS%DATA_L=
VALUE 5799 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
5802 CALL flagerror(
"Cannot get values for a logical PETSc distributed vector.",err,error,*999)
5804 local_error=
"The distributed vector library type of "// &
5806 CALL flagerror(local_error,err,error,*999)
5809 local_error=
"The data type of "//
trim(
numbertovstring(distributed_vector%DATA_TYPE,
"*",err,error))// &
5810 &
" does not correspond to the logical data type of the given value." 5811 CALL flagerror(local_error,err,error,*999)
5814 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
5817 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
5820 exits(
"DISTRIBUTED_VECTOR_ALL_VALUES_SET_L")
5822 999 errorsexits(
"DISTRIBUTED_VECTOR_ALL_VALUES_SET_L",err,error)
5836 INTEGER(INTG),
INTENT(IN) :: ALPHA
5837 INTEGER(INTG),
INTENT(OUT) :: ERR
5842 enters(
"DISTRIBUTED_VECTOR_COPY_INTG",err,error,*999)
5844 IF(
ASSOCIATED(from_vector))
THEN 5845 IF(from_vector%VECTOR_FINISHED)
THEN 5846 IF(
ASSOCIATED(to_vector))
THEN 5847 IF(to_vector%VECTOR_FINISHED)
THEN 5848 IF(from_vector%DATA_TYPE==to_vector%DATA_TYPE)
THEN 5850 IF(from_vector%LIBRARY_TYPE==to_vector%LIBRARY_TYPE)
THEN 5852 SELECT CASE(from_vector%LIBRARY_TYPE)
5854 IF(
ASSOCIATED(from_vector%CMISS))
THEN 5855 IF(
ASSOCIATED(to_vector%CMISS))
THEN 5856 IF(
ASSOCIATED(from_vector%DOMAIN_MAPPING,to_vector%DOMAIN_MAPPING))
THEN 5857 to_vector%CMISS%DATA_INTG(1:to_vector%CMISS%N)=alpha*from_vector%CMISS%DATA_INTG(1:from_vector%CMISS%N)
5859 CALL flagerror(
"The from vector does not have the same domain mapping as the to vector.",err,error,*999)
5862 CALL flagerror(
"To vector CMISS is not associated.",err,error,*999)
5865 CALL flagerror(
"From vector CMISS is not associated.",err,error,*999)
5868 CALL flagerror(
"Cannot copy a vector fro an integer PETSc distributed vector.",err,error,*999)
5870 local_error=
"The from vector library type of "// &
5872 CALL flagerror(local_error,err,error,*999)
5876 CALL flagerror(
"Not implemented.",err,error,*999)
5879 local_error=
"The from vector data type of "//
trim(
numbertovstring(from_vector%DATA_TYPE,
"*",err,error))// &
5880 &
" does not match the integer data type of the supplied alpha value." 5881 CALL flagerror(local_error,err,error,*999)
5884 local_error=
"The from vector data type of "// &
5886 &
" does not match the to vector data type of "// &
5888 CALL flagerror(local_error,err,error,*999)
5891 CALL flagerror(
"To vector has not been finished.",err,error,*999)
5894 CALL flagerror(
"To vector is not associated.",err,error,*999)
5897 CALL flagerror(
"From vector has not been finished.",err,error,*999)
5900 CALL flagerror(
"From vector is not associated.",err,error,*999)
5903 exits(
"DISTRIBUTED_VECTOR_COPY_INTG")
5905 999 errorsexits(
"DISTRIBUTED_VECTOR_COPY_INTG",err,error)
5920 REAL(DP),
INTENT(IN) :: ALPHA
5921 INTEGER(INTG),
INTENT(OUT) :: ERR
5926 enters(
"DISTRIBUTED_VECTOR_COPY_DP",err,error,*999)
5928 IF(
ASSOCIATED(from_vector))
THEN 5929 IF(from_vector%VECTOR_FINISHED)
THEN 5930 IF(
ASSOCIATED(to_vector))
THEN 5931 IF(to_vector%VECTOR_FINISHED)
THEN 5932 IF(from_vector%DATA_TYPE==to_vector%DATA_TYPE)
THEN 5934 IF(from_vector%LIBRARY_TYPE==to_vector%LIBRARY_TYPE)
THEN 5936 SELECT CASE(from_vector%LIBRARY_TYPE)
5938 IF(
ASSOCIATED(from_vector%CMISS))
THEN 5939 IF(
ASSOCIATED(to_vector%CMISS))
THEN 5940 IF(
ASSOCIATED(from_vector%DOMAIN_MAPPING,to_vector%DOMAIN_MAPPING))
THEN 5941 to_vector%CMISS%DATA_DP(1:to_vector%CMISS%N)=alpha*from_vector%CMISS%DATA_DP(1:from_vector%CMISS%N)
5943 CALL flagerror(
"The from vector does not have the same domain mapping as the to vector.",err,error,*999)
5946 CALL flagerror(
"To vector CMISS is not associated.",err,error,*999)
5949 CALL flagerror(
"From vector CMISS is not associated.",err,error,*999)
5952 IF(
ASSOCIATED(from_vector%PETSC))
THEN 5953 IF(
ASSOCIATED(to_vector%PETSC))
THEN 5954 IF(from_vector%PETSC%USE_OVERRIDE_VECTOR)
THEN 5955 IF(to_vector%PETSC%USE_OVERRIDE_VECTOR)
THEN 5956 CALL petsc_veccopy(from_vector%PETSC%OVERRIDE_VECTOR,to_vector%PETSC%OVERRIDE_VECTOR,err,error,*999)
5957 CALL petsc_vecscale(to_vector%PETSC%OVERRIDE_VECTOR,alpha,err,error,*999)
5959 CALL petsc_veccopy(from_vector%PETSC%OVERRIDE_VECTOR,to_vector%PETSC%VECTOR,err,error,*999)
5963 IF(to_vector%PETSC%USE_OVERRIDE_VECTOR)
THEN 5964 CALL petsc_veccopy(from_vector%PETSC%VECTOR,to_vector%PETSC%OVERRIDE_VECTOR,err,error,*999)
5965 CALL petsc_vecscale(to_vector%PETSC%OVERRIDE_VECTOR,alpha,err,error,*999)
5967 CALL petsc_veccopy(from_vector%PETSC%VECTOR,to_vector%PETSC%VECTOR,err,error,*999)
5972 CALL flagerror(
"To vector PETSc is not associated.",err,error,*999)
5975 CALL flagerror(
"From vector PETSc is not associated.",err,error,*999)
5978 local_error=
"The from vector library type of "// &
5980 CALL flagerror(local_error,err,error,*999)
5984 CALL flagerror(
"Not implemented.",err,error,*999)
5987 local_error=
"The from vector data type of "//
trim(
numbertovstring(from_vector%DATA_TYPE,
"*",err,error))// &
5988 &
" does not match the double precision data type of the supplied alpha value." 5989 CALL flagerror(local_error,err,error,*999)
5992 local_error=
"The from vector data type of "// &
5994 &
" does not match the to vector data type of "// &
5996 CALL flagerror(local_error,err,error,*999)
5999 CALL flagerror(
"To vector has not been finished.",err,error,*999)
6002 CALL flagerror(
"To vector is not associated.",err,error,*999)
6005 CALL flagerror(
"From vector has not been finished.",err,error,*999)
6008 CALL flagerror(
"From vector is not associated.",err,error,*999)
6011 exits(
"DISTRIBUTED_VECTOR_COPY_DP")
6013 999 errorsexits(
"DISTRIBUTED_VECTOR_COPY_DP",err,error)
6028 REAL(SP),
INTENT(IN) :: ALPHA
6029 INTEGER(INTG),
INTENT(OUT) :: ERR
6034 enters(
"DISTRIBUTED_VECTOR_COPY_SP",err,error,*999)
6036 IF(
ASSOCIATED(from_vector))
THEN 6037 IF(from_vector%VECTOR_FINISHED)
THEN 6038 IF(
ASSOCIATED(to_vector))
THEN 6039 IF(to_vector%VECTOR_FINISHED)
THEN 6040 IF(from_vector%DATA_TYPE==to_vector%DATA_TYPE)
THEN 6042 IF(from_vector%LIBRARY_TYPE==to_vector%LIBRARY_TYPE)
THEN 6044 SELECT CASE(from_vector%LIBRARY_TYPE)
6046 IF(
ASSOCIATED(from_vector%CMISS))
THEN 6047 IF(
ASSOCIATED(to_vector%CMISS))
THEN 6048 IF(
ASSOCIATED(from_vector%DOMAIN_MAPPING,to_vector%DOMAIN_MAPPING))
THEN 6049 to_vector%CMISS%DATA_SP(1:to_vector%CMISS%N)=alpha*from_vector%CMISS%DATA_SP(1:from_vector%CMISS%N)
6051 CALL flagerror(
"The from vector does not have the same domain mapping as the to vector.",err,error,*999)
6054 CALL flagerror(
"To vector CMISS is not associated.",err,error,*999)
6057 CALL flagerror(
"From vector CMISS is not associated.",err,error,*999)
6060 CALL flagerror(
"Cannot copy a vector for a single precision PETSc distributed vector.",err,error,*999)
6062 local_error=
"The from vector library type of "// &
6064 CALL flagerror(local_error,err,error,*999)
6068 CALL flagerror(
"Not implemented.",err,error,*999)
6071 local_error=
"The from vector data type of "//
trim(
numbertovstring(from_vector%DATA_TYPE,
"*",err,error))// &
6072 &
" does not match the single precision data type of the supplied alpha value." 6073 CALL flagerror(local_error,err,error,*999)
6076 local_error=
"The from vector data type of "// &
6078 &
" does not match the to vector data type of "// &
6080 CALL flagerror(local_error,err,error,*999)
6083 CALL flagerror(
"To vector has not been finished.",err,error,*999)
6086 CALL flagerror(
"To vector is not associated.",err,error,*999)
6089 CALL flagerror(
"From vector has not been finished.",err,error,*999)
6092 CALL flagerror(
"From vector is not associated.",err,error,*999)
6095 exits(
"DISTRIBUTED_VECTOR_COPY_SP")
6097 999 errorsexits(
"DISTRIBUTED_VECTOR_COPY_SP",err,error)
6112 LOGICAL,
INTENT(IN) :: ALPHA
6113 INTEGER(INTG),
INTENT(OUT) :: ERR
6118 enters(
"DISTRIBUTED_VECTOR_COPY_L",err,error,*999)
6120 IF(
ASSOCIATED(from_vector))
THEN 6121 IF(from_vector%VECTOR_FINISHED)
THEN 6122 IF(
ASSOCIATED(to_vector))
THEN 6123 IF(to_vector%VECTOR_FINISHED)
THEN 6124 IF(from_vector%DATA_TYPE==to_vector%DATA_TYPE)
THEN 6126 IF(from_vector%LIBRARY_TYPE==to_vector%LIBRARY_TYPE)
THEN 6128 SELECT CASE(from_vector%LIBRARY_TYPE)
6130 IF(
ASSOCIATED(from_vector%CMISS))
THEN 6131 IF(
ASSOCIATED(to_vector%CMISS))
THEN 6132 IF(
ASSOCIATED(from_vector%DOMAIN_MAPPING,to_vector%DOMAIN_MAPPING))
THEN 6133 to_vector%CMISS%DATA_L(1:to_vector%CMISS%N)=alpha.AND.from_vector%CMISS%DATA_L(1:from_vector%CMISS%N)
6135 CALL flagerror(
"The from vector does not have the same domain mapping as the to vector.",err,error,*999)
6138 CALL flagerror(
"To vector CMISS is not associated.",err,error,*999)
6141 CALL flagerror(
"From vector CMISS is not associated.",err,error,*999)
6144 CALL flagerror(
"Cannot copy a vector for an integer PETSc distributed vector.",err,error,*999)
6146 local_error=
"The from vector library type of "// &
6148 CALL flagerror(local_error,err,error,*999)
6152 CALL flagerror(
"Not implemented.",err,error,*999)
6155 local_error=
"The from vector data type of "//
trim(
numbertovstring(from_vector%DATA_TYPE,
"*",err,error))// &
6156 &
" does not match the logical data type of the supplied alpha value." 6157 CALL flagerror(local_error,err,error,*999)
6160 local_error=
"The from vector data type of "// &
6162 &
" does not match the to vector data type of "// &
6164 CALL flagerror(local_error,err,error,*999)
6167 CALL flagerror(
"To vector has not been finished.",err,error,*999)
6170 CALL flagerror(
"To vector is not associated.",err,error,*999)
6173 CALL flagerror(
"From vector has not been finished.",err,error,*999)
6176 CALL flagerror(
"From vector is not associated.",err,error,*999)
6179 exits(
"DISTRIBUTED_VECTOR_COPY_L")
6181 999 errorsexits(
"DISTRIBUTED_VECTOR_COPY_L",err,error)
6195 INTEGER(INTG),
INTENT(OUT) :: ERR
6198 INTEGER(INTG) :: domain_idx
6200 enters(
"DISTRIBUTED_VECTOR_CMISS_FINALISE",err,error,*999)
6202 IF(
ASSOCIATED(cmiss_vector))
THEN 6203 IF(
ALLOCATED(cmiss_vector%DATA_INTG))
DEALLOCATE(cmiss_vector%DATA_INTG)
6204 IF(
ALLOCATED(cmiss_vector%DATA_SP))
DEALLOCATE(cmiss_vector%DATA_SP)
6205 IF(
ALLOCATED(cmiss_vector%DATA_DP))
DEALLOCATE(cmiss_vector%DATA_DP)
6206 IF(
ALLOCATED(cmiss_vector%DATA_L))
DEALLOCATE(cmiss_vector%DATA_L)
6207 IF(
ALLOCATED(cmiss_vector%TRANSFERS))
THEN 6208 DO domain_idx=1,
SIZE(cmiss_vector%TRANSFERS)
6211 DEALLOCATE(cmiss_vector%TRANSFERS)
6213 DEALLOCATE(cmiss_vector)
6216 exits(
"DISTRIBUTED_VECTOR_CMISS_FINALSE")
6218 999 errorsexits(
"DISTRIBUTED_VECTOR_CMISS_FINALISE",err,error)
6231 INTEGER(INTG),
INTENT(OUT) :: ERR
6234 INTEGER(INTG) :: DUMMY_ERR
6237 enters(
"DISTRIBUTED_VECTOR_CMISS_INITIALISE",err,error,*998)
6239 IF(
ASSOCIATED(distributed_vector))
THEN 6240 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 6241 CALL flagerror(
"CMISS is already associated for this distributed vector.",err,error,*998)
6243 IF(
ASSOCIATED(distributed_vector%DOMAIN_MAPPING))
THEN 6244 ALLOCATE(distributed_vector%CMISS,stat=err)
6245 IF(err/=0)
CALL flagerror(
"Could not allocated CMISS distributed vector.",err,error,*999)
6246 distributed_vector%CMISS%DISTRIBUTED_VECTOR=>distributed_vector
6249 distributed_vector%CMISS%BASE_TAG_NUMBER=0
6250 SELECT CASE(distributed_vector%GHOSTING_TYPE)
6252 distributed_vector%CMISS%N=distributed_vector%DOMAIN_MAPPING%TOTAL_NUMBER_OF_LOCAL
6254 distributed_vector%CMISS%N=distributed_vector%DOMAIN_MAPPING%NUMBER_OF_LOCAL
6256 local_error=
"The distributed vector ghosting type of "// &
6258 CALL flagerror(local_error,err,error,*999)
6260 distributed_vector%CMISS%DATA_SIZE=0
6262 CALL flagerror(
"Distributed vector domain mapping is not associated.",err,error,*998)
6266 CALL flagerror(
"Distributed vector is not associated.",err,error,*998)
6269 exits(
"DISTRIBUTED_VECTOR_CMISS_INITIALSE")
6271 999
IF(
ASSOCIATED(distributed_vector%CMISS)) &
6273 998 errorsexits(
"DISTRIBUTED_VECTOR_CMISS_INITIALISE",err,error)
6286 INTEGER(INTG),
INTENT(OUT) :: ERR
6289 INTEGER(INTG) :: domain_idx,domain_idx2,domain_no,DUMMY_ERR,my_computational_node_number
6295 enters(
"DISTRIBUTED_VECTOR_CMISS_CREATE_FINISH",err,error,*999)
6297 IF(
ASSOCIATED(cmiss_vector))
THEN 6298 distributed_vector=>cmiss_vector%DISTRIBUTED_VECTOR
6299 IF(
ASSOCIATED(distributed_vector))
THEN 6300 domain_mapping=>distributed_vector%DOMAIN_MAPPING
6301 IF(
ASSOCIATED(domain_mapping))
THEN 6302 cmiss_vector%DATA_SIZE=cmiss_vector%N
6303 SELECT CASE(distributed_vector%DATA_TYPE)
6305 ALLOCATE(cmiss_vector%DATA_INTG(cmiss_vector%DATA_SIZE),stat=err)
6306 IF(err/=0)
CALL flagerror(
"Could not allocate CMISS distributed vector integer data.",err,error,*999)
6308 ALLOCATE(cmiss_vector%DATA_SP(cmiss_vector%DATA_SIZE),stat=err)
6309 IF(err/=0)
CALL flagerror(
"Could not allocate CMISS distributed vector single precsion data.",err,error,*999)
6311 ALLOCATE(cmiss_vector%DATA_DP(cmiss_vector%DATA_SIZE),stat=err)
6312 IF(err/=0)
CALL flagerror(
"Could not allocate CMISS distributed vector double precsion data.",err,error,*999)
6314 ALLOCATE(cmiss_vector%DATA_L(cmiss_vector%DATA_SIZE),stat=err)
6315 IF(err/=0)
CALL flagerror(
"Could not allocate CMISS distributed vector logical data.",err,error,*999)
6317 local_error=
"The distributed vector data type of "// &
6319 CALL flagerror(local_error,err,error,*999)
6322 IF(domain_mapping%NUMBER_OF_DOMAINS==1)
THEN 6326 & domain_mapping%ADJACENT_DOMAINS_PTR(domain_mapping%NUMBER_OF_DOMAINS)
6328 IF(domain_mapping%NUMBER_OF_ADJACENT_DOMAINS>0)
THEN 6332 ALLOCATE(cmiss_vector%TRANSFERS(domain_mapping%NUMBER_OF_ADJACENT_DOMAINS),stat=err)
6333 IF(err/=0)
CALL flagerror(
"Could not allocate CMISS distributed vector transfer buffers.",err,error,*999)
6334 DO domain_idx=1,domain_mapping%NUMBER_OF_ADJACENT_DOMAINS
6336 cmiss_vector%TRANSFERS(domain_idx)%SEND_BUFFER_SIZE=domain_mapping%ADJACENT_DOMAINS(domain_idx)% &
6337 & number_of_send_ghosts
6338 cmiss_vector%TRANSFERS(domain_idx)%RECEIVE_BUFFER_SIZE= &
6339 & domain_mapping%ADJACENT_DOMAINS(domain_idx)%NUMBER_OF_RECEIVE_GHOSTS
6340 cmiss_vector%TRANSFERS(domain_idx)%DATA_TYPE=distributed_vector%DATA_TYPE
6341 cmiss_vector%TRANSFERS(domain_idx)%SEND_TAG_NUMBER=cmiss_vector%BASE_TAG_NUMBER + &
6342 & domain_mapping%ADJACENT_DOMAINS_PTR(my_computational_node_number)+domain_idx-1
6343 domain_no=domain_mapping%ADJACENT_DOMAINS(domain_idx)%DOMAIN_NUMBER
6345 DO domain_idx2=domain_mapping%ADJACENT_DOMAINS_PTR(domain_no),domain_mapping%ADJACENT_DOMAINS_PTR(domain_no+1)-1
6346 IF(domain_mapping%ADJACENT_DOMAINS_LIST(domain_idx2)==my_computational_node_number)
THEN 6352 domain_idx2=domain_idx2-domain_mapping%ADJACENT_DOMAINS_PTR(domain_no)+1
6353 cmiss_vector%TRANSFERS(domain_idx)%RECEIVE_TAG_NUMBER=cmiss_vector%BASE_TAG_NUMBER + &
6354 & domain_mapping%ADJACENT_DOMAINS_PTR(domain_no)+domain_idx2-1
6356 CALL flagerror(
"Could not find domain to set the receive tag number.",err,error,*999)
6358 SELECT CASE(distributed_vector%DATA_TYPE)
6360 ALLOCATE(cmiss_vector%TRANSFERS(domain_idx)%SEND_BUFFER_INTG(cmiss_vector%TRANSFERS(domain_idx)% &
6361 & send_buffer_size),stat=err)
6362 IF(err/=0)
CALL flagerror(
"Could not allocate distributed vector send integer transfer buffer.",err,error,*999)
6363 ALLOCATE(cmiss_vector%TRANSFERS(domain_idx)%RECEIVE_BUFFER_INTG(cmiss_vector%TRANSFERS(domain_idx)% &
6364 & receive_buffer_size),stat=err)
6365 IF(err/=0)
CALL flagerror(
"Could not allocate distributed vector receive integer transfer buffer.", &
6368 ALLOCATE(cmiss_vector%TRANSFERS(domain_idx)%SEND_BUFFER_SP(cmiss_vector%TRANSFERS(domain_idx)% &
6369 & send_buffer_size),stat=err)
6370 IF(err/=0)
CALL flagerror(
"Could not allocate distributed vector send single precision transfer buffer.", &
6372 ALLOCATE(cmiss_vector%TRANSFERS(domain_idx)%RECEIVE_BUFFER_SP(cmiss_vector%TRANSFERS(domain_idx)% &
6373 & receive_buffer_size),stat=err)
6374 IF(err/=0)
CALL flagerror(
"Could not allocate distributed vector receive single precision transfer buffer.", &
6377 ALLOCATE(cmiss_vector%TRANSFERS(domain_idx)%SEND_BUFFER_DP(cmiss_vector%TRANSFERS(domain_idx)% &
6378 & send_buffer_size),stat=err)
6379 IF(err/=0)
CALL flagerror(
"Could not allocate distributed vector send double precision transfer buffer.", &
6381 ALLOCATE(cmiss_vector%TRANSFERS(domain_idx)%RECEIVE_BUFFER_DP(cmiss_vector%TRANSFERS(domain_idx)% &
6382 & receive_buffer_size),stat=err)
6383 IF(err/=0)
CALL flagerror(
"Could not allocate distributed vector receive double precision transfer buffer.", &
6386 ALLOCATE(cmiss_vector%TRANSFERS(domain_idx)%SEND_BUFFER_L(cmiss_vector%TRANSFERS(domain_idx)%SEND_BUFFER_SIZE), &
6388 IF(err/=0)
CALL flagerror(
"Could not allocate distributed vector send logical transfer buffer.",err,error,*999)
6389 ALLOCATE(cmiss_vector%TRANSFERS(domain_idx)%RECEIVE_BUFFER_L(cmiss_vector%TRANSFERS(domain_idx)% &
6390 & receive_buffer_size),stat=err)
6391 IF(err/=0)
CALL flagerror(
"Could not allocate distributed vector receive logical transfer buffer.", &
6394 local_error=
"The distributed vector data type of "// &
6396 CALL flagerror(local_error,err,error,*999)
6402 CALL flagerror(
"CMISS vector distributed vector domain mapping is not associated.",err,error,*999)
6405 CALL flagerror(
"CMISS vector distributed vector is not associated.",err,error,*999)
6408 CALL flagerror(
"CMISS vector is not associated.",err,error,*999)
6411 exits(
"DISTRIBUTED_VECTOR_CMISS_CREATE_FINISH")
6414 998 errorsexits(
"DISTRIBUTED_VECTOR_CMISS_CREATE_FINISH",err,error)
6427 INTEGER(INTG),
INTENT(OUT) :: ERR
6430 INTEGER(INTG) :: DUMMY_ERR
6433 enters(
"DISTRIBUTED_VECTOR_CREATE_FINISH",err,error,*999)
6435 IF(
ASSOCIATED(distributed_vector))
THEN 6436 IF(distributed_vector%VECTOR_FINISHED)
THEN 6437 CALL flagerror(
"The distributed vector has already been finished.",err,error,*999)
6439 SELECT CASE(distributed_vector%LIBRARY_TYPE)
6441 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 6444 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
6447 IF(
ASSOCIATED(distributed_vector%PETSC))
THEN 6450 CALL flagerror(
"Distributed vector PETSc is not associated.",err,error,*999)
6453 local_error=
"The distributed vector library type of "// &
6455 CALL flagerror(local_error,err,error,*999)
6457 distributed_vector%VECTOR_FINISHED=.true.
6461 exits(
"DISTRIBUTED_VECTOR_CREATE_FINISH")
6464 DEALLOCATE(distributed_vector)
6465 998 errorsexits(
"DISTRIBUTED_VECTOR_CREATE_FINISH",err,error)
6479 INTEGER(INTG),
INTENT(OUT) :: ERR
6482 INTEGER(INTG) :: DUMMY_ERR
6485 enters(
"DISTRIBUTED_VECTOR_CREATE_START",err,error,*998)
6487 IF(
ASSOCIATED(domain_mapping))
THEN 6488 IF(
ASSOCIATED(distributed_vector))
THEN 6489 CALL flagerror(
"Distributed vector is already associated.",err,error,*998)
6495 CALL flagerror(
"Domain mapping is not associated.",err,error,*998)
6498 exits(
"DISTRIBUTED_VECTOR_CREATE_START")
6501 998 errorsexits(
"DISTRIBUTED_VECTOR_CREATE_START",err,error)
6514 INTEGER(INTG),
INTENT(OUT) :: dataType
6515 INTEGER(INTG),
INTENT(OUT) :: err
6518 enters(
"DistributedVector_DataTypeGet",err,error,*999)
6520 IF(
ASSOCIATED(vector))
THEN 6521 IF(.NOT.vector%vector_finished)
THEN 6522 CALL flag_error(
"The vector has not been finished.",err,error,*999)
6524 datatype=vector%data_type
6527 CALL flag_error(
"Distributed vector is not associated.",err,error,*999)
6530 exits(
"DistributedVector_DataTypeGet")
6532 999 errorsexits(
"DistributedVector_DataTypeGet",err,error)
6545 INTEGER(INTG),
INTENT(IN) :: DATA_TYPE
6546 INTEGER(INTG),
INTENT(OUT) :: ERR
6551 enters(
"DISTRIBUTED_VECTOR_DATA_TYPE_SET",err,error,*999)
6553 IF(
ASSOCIATED(distributed_vector))
THEN 6554 IF(distributed_vector%VECTOR_FINISHED)
THEN 6555 CALL flagerror(
"The distributed vector has been finished.",err,error,*999)
6557 SELECT CASE(distributed_vector%LIBRARY_TYPE)
6559 SELECT CASE(data_type)
6569 local_error=
"The distributed data type of "//
trim(
numbertovstring(data_type,
"*",err,error))//
" is invalid." 6570 CALL flagerror(local_error,err,error,*999)
6573 SELECT CASE(data_type)
6575 CALL flagerror(
"An integer distributed PETSc vector is not implemented.",err,error,*999)
6577 CALL flagerror(
"A single precision distributed PETSc vector is not implemented.",err,error,*999)
6581 CALL flagerror(
"A logical distributed PETSc vector is not implemented.",err,error,*999)
6583 local_error=
"The distributed data type of "//
trim(
numbertovstring(data_type,
"*",err,error))//
" is invalid." 6584 CALL flagerror(local_error,err,error,*999)
6587 local_error=
"The distributed vector library type of "// &
6589 CALL flagerror(local_error,err,error,*999)
6593 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
6596 exits(
"DISTRIBUTED_VECTOR_DATA_TYPE_SET")
6598 999 errorsexits(
"DISTRIBUTED_VECTOR_DATA_TYPE_SET",err,error)
6611 INTEGER(INTG),
INTENT(OUT) :: ERR
6615 enters(
"DISTRIBUTED_VECTOR_DESTROY",err,error,*999)
6617 IF(
ASSOCIATED(distributed_vector))
THEN 6620 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
6623 exits(
"DISTRIBUTED_VECTOR_DESTROY")
6625 999 errorsexits(
"DISTRIBUTED_VECTOR_DESTROY",err,error)
6639 INTEGER(INTG),
INTENT(OUT) :: ERR
6642 INTEGER(INTG) :: DUMMY_ERR
6645 enters(
"DISTRIBUTED_VECTOR_DUPLICATE",err,error,*998)
6647 IF(
ASSOCIATED(distributed_vector))
THEN 6648 IF(
ASSOCIATED(new_distributed_vector))
THEN 6649 CALL flagerror(
"New distributed vector is already associated.",err,error,*998)
6657 CALL flagerror(
"Distributed vector is not associated.",err,error,*998)
6660 exits(
"DISTRIBUTED_VECTOR_DUPLICATE")
6663 998 errorsexits(
"DISTRIBUTED_VECTOR_DUPLICATE",err,error)
6676 INTEGER(INTG),
INTENT(OUT) :: ERR
6680 enters(
"DISTRIBUTED_VECTOR_FINALISE",err,error,*999)
6682 IF(
ASSOCIATED(distributed_vector))
THEN 6685 DEALLOCATE(distributed_vector)
6688 exits(
"DISTRIBUTED_VECTOR_FINALISE")
6690 999 errorsexits(
"DISTRIBUTED_VECTOR_FINALISE",err,error)
6704 INTEGER(INTG),
INTENT(OUT) :: ERR
6707 INTEGER(INTG) :: DUMMY_ERR
6710 enters(
"DISTRIBUTED_VECTOR_INITIALISE",err,error,*998)
6712 IF(
ASSOCIATED(domain_mapping))
THEN 6713 IF(
ASSOCIATED(distributed_vector))
THEN 6714 CALL flagerror(
"Distributed vector is already associated.",err,error,*998)
6716 ALLOCATE(distributed_vector,stat=err)
6717 IF(err/=0)
CALL flagerror(
"Could not allocated the distributed vector.",err,error,*999)
6718 distributed_vector%VECTOR_FINISHED=.false.
6719 distributed_vector%LIBRARY_TYPE=0
6721 distributed_vector%DOMAIN_MAPPING=>domain_mapping
6723 NULLIFY(distributed_vector%CMISS)
6724 NULLIFY(distributed_vector%PETSC)
6728 CALL flagerror(
"Domain mapping is not associated.",err,error,*998)
6731 exits(
"DISTRIBUTED_VECTOR_INITIALISE")
6734 998 errorsexits(
"DISTRIBUTED_VECTOR_INITIALISE",err,error)
6747 INTEGER(INTG),
POINTER :: DATA(:)
6748 INTEGER(INTG),
INTENT(OUT) :: ERR
6753 enters(
"DISTRIBUTED_VECTOR_DATA_GET_INTG",err,error,*999)
6755 IF(
ASSOCIATED(distributed_vector))
THEN 6756 IF(
ASSOCIATED(data))
THEN 6757 CALL flagerror(
"Data is already associated.",err,error,*999)
6760 IF(distributed_vector%VECTOR_FINISHED)
THEN 6762 SELECT CASE(distributed_vector%LIBRARY_TYPE)
6764 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 6765 data=>distributed_vector%CMISS%DATA_INTG
6767 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
6770 CALL flagerror(
"Cannot get data for an integer PETSc distributed vector.",err,error,*999)
6772 local_error=
"The distributed vector library type of "// &
6774 CALL flagerror(local_error,err,error,*999)
6777 local_error=
"The distributed data type of "// &
6779 &
" does not correspond to the integer data type of the requested values." 6780 CALL flagerror(local_error,err,error,*999)
6783 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
6787 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
6790 exits(
"DISTRIBUTED_VECTOR_DATA_GET_INTG")
6792 999 errorsexits(
"DISTRIBUTED_VECTOR_DATA_GET_INTG",err,error)
6805 REAL(SP),
POINTER :: DATA(:)
6806 INTEGER(INTG),
INTENT(OUT) :: ERR
6811 enters(
"DISTRIBUTED_VECTOR_DATA_GET_SP",err,error,*999)
6813 IF(
ASSOCIATED(distributed_vector))
THEN 6814 IF(
ASSOCIATED(data))
THEN 6815 CALL flagerror(
"Data is already associated.",err,error,*999)
6818 IF(distributed_vector%VECTOR_FINISHED)
THEN 6820 SELECT CASE(distributed_vector%LIBRARY_TYPE)
6822 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 6823 data=>distributed_vector%CMISS%DATA_SP
6825 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
6828 CALL flagerror(
"Cannot get values for a single precision PETSc distributed vector.",err,error,*999)
6830 local_error=
"The distributed vector library type of "// &
6832 CALL flagerror(local_error,err,error,*999)
6835 local_error=
"The distributed data type of "// &
6837 &
" does not correspond to the single precision data type of the requested values." 6838 CALL flagerror(local_error,err,error,*999)
6841 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
6845 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
6848 exits(
"DISTRIBUTED_VECTOR_DATA_GET_SP")
6850 999 errorsexits(
"DISTRIBUTED_VECTOR_DATA_GET_SP",err,error)
6863 REAL(DP),
POINTER :: DATA(:)
6864 INTEGER(INTG),
INTENT(OUT) :: ERR
6869 enters(
"DISTRIBUTED_VECTOR_DATA_GET_DP",err,error,*999)
6871 IF(
ASSOCIATED(distributed_vector))
THEN 6872 IF(
ASSOCIATED(data))
THEN 6873 CALL flagerror(
"Data is already associated.",err,error,*999)
6876 IF(distributed_vector%VECTOR_FINISHED)
THEN 6878 SELECT CASE(distributed_vector%LIBRARY_TYPE)
6880 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 6881 data=>distributed_vector%CMISS%DATA_DP
6883 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
6886 IF(
ASSOCIATED(distributed_vector%PETSC))
THEN 6887 IF(distributed_vector%PETSC%USE_OVERRIDE_VECTOR)
THEN 6893 CALL flagerror(
"Distributed vector PETSc is not associated.",err,error,*999)
6896 local_error=
"The distributed vector library type of "// &
6898 CALL flagerror(local_error,err,error,*999)
6901 local_error=
"The distributed data type of "// &
6903 &
" does not correspond to the double precision data type of the requested values." 6904 CALL flagerror(local_error,err,error,*999)
6907 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
6911 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
6914 exits(
"DISTRIBUTED_VECTOR_DATA_GET_DP")
6916 999 errorsexits(
"DISTRIBUTED_VECTOR_DATA_GET_DP",err,error)
6929 LOGICAL,
POINTER :: DATA(:)
6930 INTEGER(INTG),
INTENT(OUT) :: ERR
6935 enters(
"DISTRIBUTED_VECTOR_DATA_GET_L",err,error,*999)
6937 IF(
ASSOCIATED(distributed_vector))
THEN 6938 IF(
ASSOCIATED(data))
THEN 6939 CALL flagerror(
"Data is already associated.",err,error,*999)
6942 IF(distributed_vector%VECTOR_FINISHED)
THEN 6944 SELECT CASE(distributed_vector%LIBRARY_TYPE)
6946 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 6947 data=>distributed_vector%CMISS%DATA_L
6949 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
6952 CALL flagerror(
"Cannot get values for a logical PETSc distributed vector.",err,error,*999)
6954 local_error=
"The distributed vector library type of "// &
6956 CALL flagerror(local_error,err,error,*999)
6959 local_error=
"The distributed data type of "//
trim(
numbertovstring(distributed_vector%DATA_TYPE,
"*",err,error))// &
6960 &
" does not correspond to the logical data type of the requested values." 6961 CALL flagerror(local_error,err,error,*999)
6964 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
6968 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
6971 exits(
"DISTRIBUTED_VECTOR_DATA_GET_L")
6973 999 errorsexits(
"DISTRIBUTED_VECTOR_DATA_GET_L",err,error)
6986 INTEGER(INTG),
POINTER :: DATA(:)
6987 INTEGER(INTG),
INTENT(OUT) :: ERR
6992 enters(
"DISTRIBUTED_VECTOR_DATA_RESTORE_INTG",err,error,*999)
6994 IF(
ASSOCIATED(distributed_vector))
THEN 6995 IF(
ASSOCIATED(data))
THEN 6996 IF(distributed_vector%VECTOR_FINISHED)
THEN 6997 SELECT CASE(distributed_vector%LIBRARY_TYPE)
7001 CALL flagerror(
"Cannot restore data for an integer PETSc distributed vector.",err,error,*999)
7003 local_error=
"The distributed vector library type of "// &
7005 CALL flagerror(local_error,err,error,*999)
7008 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
7011 CALL flagerror(
"Data is not associated.",err,error,*999)
7014 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
7017 exits(
"DISTRIBUTED_VECTOR_DATA_RESTORE_INTG")
7019 999 errorsexits(
"DISTRIBUTED_VECTOR_DATA_RESTORE_INTG",err,error)
7032 REAL(SP),
POINTER :: DATA(:)
7033 INTEGER(INTG),
INTENT(OUT) :: ERR
7038 enters(
"DISTRIBUTED_VECTOR_DATA_RESTORE_SP",err,error,*999)
7040 IF(
ASSOCIATED(distributed_vector))
THEN 7041 IF(
ASSOCIATED(data))
THEN 7042 IF(distributed_vector%VECTOR_FINISHED)
THEN 7043 SELECT CASE(distributed_vector%LIBRARY_TYPE)
7047 CALL flagerror(
"Cannot restore data for a single precision PETSc distributed vector.",err,error,*999)
7049 local_error=
"The distributed vector library type of "// &
7051 CALL flagerror(local_error,err,error,*999)
7054 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
7057 CALL flagerror(
"Data is not associated.",err,error,*999)
7060 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
7063 exits(
"DISTRIBUTED_VECTOR_DATA_RESTORE_SP")
7065 999 errorsexits(
"DISTRIBUTED_VECTOR_DATA_RESTORE_SP",err,error)
7078 REAL(DP),
POINTER :: DATA(:)
7079 INTEGER(INTG),
INTENT(OUT) :: ERR
7084 enters(
"DISTRIBUTED_VECTOR_DATA_RESTORE_DP",err,error,*999)
7086 IF(
ASSOCIATED(distributed_vector))
THEN 7087 IF(
ASSOCIATED(data))
THEN 7088 IF(distributed_vector%VECTOR_FINISHED)
THEN 7089 SELECT CASE(distributed_vector%LIBRARY_TYPE)
7093 IF(
ASSOCIATED(distributed_vector%PETSC))
THEN 7094 IF(distributed_vector%PETSC%USE_OVERRIDE_VECTOR)
THEN 7100 CALL flagerror(
"Distributed vector PETSc is not associated.",err,error,*999)
7103 local_error=
"The distributed vector library type of "// &
7105 CALL flagerror(local_error,err,error,*999)
7108 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
7111 CALL flagerror(
"Data is not associated.",err,error,*999)
7114 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
7117 exits(
"DISTRIBUTED_VECTOR_DATA_RESTORE_DP")
7119 999 errorsexits(
"DISTRIBUTED_VECTOR_DATA_RESTORE_DP",err,error)
7132 LOGICAL,
POINTER :: DATA(:)
7133 INTEGER(INTG),
INTENT(OUT) :: ERR
7138 enters(
"DISTRIBUTED_VECTOR_DATA_RESTORE_L",err,error,*999)
7140 IF(
ASSOCIATED(distributed_vector))
THEN 7141 IF(
ASSOCIATED(data))
THEN 7142 IF(distributed_vector%VECTOR_FINISHED)
THEN 7143 SELECT CASE(distributed_vector%LIBRARY_TYPE)
7147 CALL flagerror(
"Cannot restore data for a logical PETSc distributed vector.",err,error,*999)
7149 local_error=
"The distributed matrix library type of "// &
7151 CALL flagerror(local_error,err,error,*999)
7154 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
7157 CALL flagerror(
"Data is not associated.",err,error,*999)
7160 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
7163 exits(
"DISTRIBUTED_VECTOR_DATA_RESTORE_L")
7165 999 errorsexits(
"DISTRIBUTED_VECTOR_DATA_RESTORE_L",err,error)
7178 INTEGER(INTG),
INTENT(IN) :: GHOSTING_TYPE
7179 INTEGER(INTG),
INTENT(OUT) :: ERR
7184 enters(
"DISTRIBUTED_VECTOR_GHOSTING_TYPE_SET",err,error,*999)
7186 IF(
ASSOCIATED(distributed_vector))
THEN 7187 IF(distributed_vector%VECTOR_FINISHED)
THEN 7188 CALL flagerror(
"The distributed vector has already been finished.",err,error,*999)
7190 IF(
ASSOCIATED(distributed_vector%DOMAIN_MAPPING))
THEN 7191 SELECT CASE(distributed_vector%LIBRARY_TYPE)
7193 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 7194 SELECT CASE(ghosting_type)
7196 distributed_vector%CMISS%N=distributed_vector%DOMAIN_MAPPING%TOTAL_NUMBER_OF_LOCAL
7198 distributed_vector%CMISS%N=distributed_vector%DOMAIN_MAPPING%NUMBER_OF_LOCAL
7200 local_error=
"The given ghosting type of "//
trim(
numbertovstring(ghosting_type,
"*",err,error))//
" is invalid." 7201 CALL flagerror(local_error,err,error,*999)
7204 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
7207 IF(
ASSOCIATED(distributed_vector%PETSC))
THEN 7208 SELECT CASE(ghosting_type)
7210 distributed_vector%PETSC%N=distributed_vector%DOMAIN_MAPPING%TOTAL_NUMBER_OF_LOCAL
7212 distributed_vector%PETSC%N=distributed_vector%DOMAIN_MAPPING%NUMBER_OF_LOCAL
7214 local_error=
"The given ghosting type of "//
trim(
numbertovstring(ghosting_type,
"*",err,error))//
" is invalid." 7215 CALL flagerror(local_error,err,error,*999)
7218 CALL flagerror(
"Distributed vector PETSc is not associated.",err,error,*999)
7221 local_error=
"The distributed vector library type of "// &
7223 CALL flagerror(local_error,err,error,*999)
7225 distributed_vector%GHOSTING_TYPE=ghosting_type
7227 CALL flagerror(
"Distributed vector domain mapping is not associated.",err,error,*999)
7231 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
7234 exits(
"DISTRIBUTED_VECTOR_GHOSTING_TYPE_SET")
7236 999 errorsexits(
"DISTRIBUTED_VECTOR_GHOSTING_TYPE_SET",err,error)
7249 INTEGER(INTG),
INTENT(IN) :: LIBRARY_TYPE
7250 INTEGER(INTG),
INTENT(OUT) :: ERR
7253 INTEGER(INTG) :: DUMMY_ERR,OLD_LIBRARY_TYPE
7256 enters(
"DISTRIBUTED_VECTOR_LIBRARY_TYPE_SET",err,error,*998)
7258 IF(
ASSOCIATED(distributed_vector))
THEN 7259 IF(distributed_vector%VECTOR_FINISHED)
THEN 7260 CALL flagerror(
"The distributed vector has already been finished.",err,error,*998)
7262 old_library_type=distributed_vector%LIBRARY_TYPE
7263 IF(library_type/=old_library_type)
THEN 7265 SELECT CASE(library_type)
7271 local_error=
"The distributed vector library type of "//
trim(
numbertovstring(library_type,
"*",err,error))// &
7273 CALL flagerror(local_error,err,error,*999)
7276 SELECT CASE(old_library_type)
7282 local_error=
"The distributed vector library type of "// &
7284 CALL flagerror(local_error,err,error,*999)
7286 distributed_vector%LIBRARY_TYPE=library_type
7290 CALL flagerror(
"Distributed vector is not associated.",err,error,*998)
7293 exits(
"DISTRIBUTED_VECTOR_LIBRARY_TYPE_SET")
7295 999
SELECT CASE(library_type)
7301 998 errorsexits(
"DISTRIBUTED_VECTOR_LIBRARY_TYPE_SET",err,error)
7313 INTEGER(INTG),
INTENT(IN) :: ID
7315 INTEGER(INTG),
INTENT(OUT) :: ERR
7318 REAL(DP),
POINTER :: VECTOR(:)
7321 enters(
"DISTRIBUTED_VECTOR_OUTPUT",err,error,*999)
7323 IF(
ASSOCIATED(distributed_vector))
THEN 7324 IF(distributed_vector%VECTOR_FINISHED)
THEN 7325 SELECT CASE(distributed_vector%LIBRARY_TYPE)
7327 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 7328 SELECT CASE(distributed_vector%DATA_TYPE)
7330 CALL write_string_vector(id,1,1,distributed_vector%CMISS%N,8,8,distributed_vector%CMISS%DATA_INTG, &
7331 &
'("Vector(:) :",8(X,I13))',
'(20X,8(X,I13))',err,error,*999)
7333 CALL write_string_vector(id,1,1,distributed_vector%CMISS%N,8,8,distributed_vector%CMISS%DATA_SP, &
7334 &
'("Vector(:) :",8(X,E13.6))',
'(20X,8(X,E13.6))',err,error,*999)
7336 CALL write_string_vector(id,1,1,distributed_vector%CMISS%N,8,8,distributed_vector%CMISS%DATA_DP, &
7337 &
'("Vector(:) :",8(X,E13.6))',
'(20X,8(X,E13.6))',err,error,*999)
7339 CALL write_string_vector(id,1,1,distributed_vector%CMISS%N,8,8,distributed_vector%CMISS%DATA_INTG, &
7340 &
'("Vector(:) :",8(X,L13))',
'(20X,8(X,L13))',err,error,*999)
7342 local_error=
"The distributed vector data type of "// &
7344 CALL flagerror(local_error,err,error,*999)
7347 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
7350 IF(
ASSOCIATED(distributed_vector%PETSC))
THEN 7352 IF(distributed_vector%PETSC%USE_OVERRIDE_VECTOR)
THEN 7358 &
'("Vector(:) :",8(X,E13.6))',
'(20X,8(X,E13.6))',err,error,*999)
7359 IF(distributed_vector%PETSC%USE_OVERRIDE_VECTOR)
THEN 7365 CALL flagerror(
"Distributed vector PETSc is not associated.",err,error,*999)
7368 local_error=
"The distributed vector library type of "// &
7370 CALL flagerror(local_error,err,error,*999)
7373 CALL flagerror(
"Distributed vector has not been finished.",err,error,*999)
7376 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
7379 exits(
"DISTRIBUTED_VECTOR_OUTPUT")
7381 999 errorsexits(
"DISTRIBUTED_VECTOR_OUTPUT",err,error)
7394 TYPE(petscvectype),
INTENT(IN) :: OVERRIDE_VECTOR
7395 INTEGER(INTG),
INTENT(OUT) :: ERR
7400 enters(
"DISTRIBUTED_VECTOR_OVERRIDE_SET_ON",err,error,*999)
7402 IF(
ASSOCIATED(distributed_vector))
THEN 7403 IF(distributed_vector%VECTOR_FINISHED)
THEN 7404 SELECT CASE(distributed_vector%LIBRARY_TYPE)
7406 CALL flagerror(
"Not implemented.",err,error,*999)
7408 IF(
ASSOCIATED(distributed_vector%PETSC))
THEN 7409 distributed_vector%PETSC%USE_OVERRIDE_VECTOR=.true.
7410 distributed_vector%PETSC%OVERRIDE_VECTOR=override_vector
7412 CALL flagerror(
"Distributed vector PETSc is not associated.",err,error,*999)
7415 local_error=
"The distributed vector library type of "// &
7417 CALL flagerror(local_error,err,error,*999)
7420 CALL flagerror(
"Distributed vector has not been finished.",err,error,*999)
7423 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
7426 exits(
"DISTRIBUTED_VECTOR_OVERRIDE_SET_ON")
7428 999 errorsexits(
"DISTRIBUTED_VECTOR_OVERRIDE_SET_ON",err,error)
7441 INTEGER(INTG),
INTENT(OUT) :: ERR
7446 enters(
"DISTRIBUTED_VECTOR_OVERRIDE_SET_OFF",err,error,*999)
7448 IF(
ASSOCIATED(distributed_vector))
THEN 7449 IF(distributed_vector%VECTOR_FINISHED)
THEN 7450 SELECT CASE(distributed_vector%LIBRARY_TYPE)
7452 CALL flagerror(
"Not implemented.",err,error,*999)
7454 IF(
ASSOCIATED(distributed_vector%PETSC))
THEN 7455 distributed_vector%PETSC%USE_OVERRIDE_VECTOR=.false.
7458 CALL flagerror(
"Distributed vector PETSc is not associated.",err,error,*999)
7461 local_error=
"The distributed vector library type of "// &
7463 CALL flagerror(local_error,err,error,*999)
7466 CALL flagerror(
"Distributed vector has not been finished.",err,error,*999)
7469 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
7472 exits(
"DISTRIBUTED_VECTOR_OVERRIDE_SET_OFF")
7474 999 errorsexits(
"DISTRIBUTED_VECTOR_OVERRIDE_SET_OFF",err,error)
7487 INTEGER(INTG),
INTENT(OUT) :: ERR
7490 INTEGER(INTG) :: DUMMY_ERR,i
7491 INTEGER(INTG),
ALLOCATABLE :: GLOBAL_NUMBERS(:)
7496 enters(
"DISTRIBUTED_VECTOR_PETSC_CREATE_FINISH",err,error,*998)
7498 IF(
ASSOCIATED(petsc_vector))
THEN 7499 distributed_vector=>petsc_vector%DISTRIBUTED_VECTOR
7500 IF(
ASSOCIATED(distributed_vector))
THEN 7501 domain_mapping=>distributed_vector%DOMAIN_MAPPING
7502 IF(
ASSOCIATED(domain_mapping))
THEN 7504 petsc_vector%DATA_SIZE=petsc_vector%N
7508 DO i=1,petsc_vector%N
7509 petsc_vector%GLOBAL_NUMBERS(i)=domain_mapping%LOCAL_TO_GLOBAL_MAP(i)-1
7512 CALL flagerror(
"PETSc vector distributed vector domain mapping is not associated.",err,error,*999)
7516 CALL flagerror(
"PETSc vector is not associated.",err,error,*998)
7519 exits(
"DISTRIBUTED_VECTOR_PETSC_CREATE_FINISH")
7521 999
IF(
ALLOCATED(global_numbers))
DEALLOCATE(global_numbers)
7523 998 errorsexits(
"DISTRIBUTED_VECTOR_PETSC_CREATE_FINISH",err,error)
7536 INTEGER(INTG),
INTENT(OUT) :: ERR
7540 enters(
"DISTRIBUTED_VECTOR_PETSC_FINALISE",err,error,*999)
7542 IF(
ASSOCIATED(petsc_vector))
THEN 7543 IF(
ALLOCATED(petsc_vector%GLOBAL_NUMBERS))
DEALLOCATE(petsc_vector%GLOBAL_NUMBERS)
7546 DEALLOCATE(petsc_vector)
7549 exits(
"DISTRIBUTED_VECTOR_PETSC_FINALSE")
7551 999 errorsexits(
"DISTRIBUTED_VECTOR_PETSC_FINALISE",err,error)
7564 INTEGER(INTG),
INTENT(OUT) :: ERR
7567 INTEGER(INTG) :: DUMMY_ERR
7570 enters(
"DISTRIBUTED_VECTOR_PETSC_INITIALISE",err,error,*998)
7572 IF(
ASSOCIATED(distributed_vector))
THEN 7573 IF(
ASSOCIATED(distributed_vector%PETSC))
THEN 7574 CALL flagerror(
"PETSc is already associated for this distributed vector.",err,error,*998)
7576 IF(
ASSOCIATED(distributed_vector%DOMAIN_MAPPING))
THEN 7577 ALLOCATE(distributed_vector%PETSC,stat=err)
7578 IF(err/=0)
CALL flagerror(
"Could not allocate PETSc distributed vector.",err,error,*999)
7579 distributed_vector%PETSC%DISTRIBUTED_VECTOR=>distributed_vector
7582 SELECT CASE(distributed_vector%GHOSTING_TYPE)
7584 distributed_vector%PETSC%N=distributed_vector%DOMAIN_MAPPING%TOTAL_NUMBER_OF_LOCAL
7586 distributed_vector%PETSC%N=distributed_vector%DOMAIN_MAPPING%NUMBER_OF_LOCAL
7588 local_error=
"The distributed vector ghosting type of "// &
7590 CALL flagerror(local_error,err,error,*999)
7592 distributed_vector%PETSC%GLOBAL_N=distributed_vector%DOMAIN_MAPPING%NUMBER_OF_GLOBAL
7593 ALLOCATE(distributed_vector%PETSC%GLOBAL_NUMBERS(distributed_vector%PETSC%N),stat=err)
7594 IF(err/=0)
CALL flagerror(
"Could not allocate PETSc distributed vector global numbers.",err,error,*999)
7595 distributed_vector%PETSC%USE_OVERRIDE_VECTOR=.false.
7599 CALL flagerror(
"Distributed vector domain mapping is not associated",err,error,*998)
7603 CALL flagerror(
"Distributed vector is not associated",err,error,*998)
7606 exits(
"DISTRIBUTED_VECTOR_PETSC_INITIALSE")
7608 999
IF(
ASSOCIATED(distributed_vector%PETSC)) &
7610 998 errorsexits(
"DISTRIBUTED_VECTOR_PETSC_INITIALISE",err,error)
7623 INTEGER(INTG),
INTENT(IN) :: domain_idx
7624 INTEGER(INTG),
INTENT(OUT) :: ERR
7629 enters(
"DISTRIBUTED_VECTOR_CMISS_TRANSFER_FINALISE",err,error,*999)
7631 IF(
ASSOCIATED(cmiss_vector))
THEN 7632 IF(
ALLOCATED(cmiss_vector%TRANSFERS))
THEN 7633 IF(domain_idx>0.AND.domain_idx<=
SIZE(cmiss_vector%TRANSFERS,1))
THEN 7634 NULLIFY(cmiss_vector%TRANSFERS(domain_idx)%CMISS_VECTOR)
7635 cmiss_vector%TRANSFERS(domain_idx)%DATA_TYPE=0
7636 cmiss_vector%TRANSFERS(domain_idx)%RECEIVE_TAG_NUMBER=-1
7637 cmiss_vector%TRANSFERS(domain_idx)%SEND_TAG_NUMBER=-1
7638 cmiss_vector%TRANSFERS(domain_idx)%SEND_BUFFER_SIZE=0
7639 cmiss_vector%TRANSFERS(domain_idx)%RECEIVE_BUFFER_SIZE=0
7640 cmiss_vector%TRANSFERS(domain_idx)%MPI_SEND_REQUEST=mpi_request_null
7641 cmiss_vector%TRANSFERS(domain_idx)%MPI_RECEIVE_REQUEST=mpi_request_null
7642 IF(
ALLOCATED(cmiss_vector%TRANSFERS(domain_idx)%SEND_BUFFER_INTG)) &
7643 &
DEALLOCATE(cmiss_vector%TRANSFERS(domain_idx)%SEND_BUFFER_INTG)
7644 IF(
ALLOCATED(cmiss_vector%TRANSFERS(domain_idx)%SEND_BUFFER_SP)) &
7645 &
DEALLOCATE(cmiss_vector%TRANSFERS(domain_idx)%SEND_BUFFER_SP)
7646 IF(
ALLOCATED(cmiss_vector%TRANSFERS(domain_idx)%SEND_BUFFER_DP)) &
7647 &
DEALLOCATE(cmiss_vector%TRANSFERS(domain_idx)%SEND_BUFFER_DP)
7648 IF(
ALLOCATED(cmiss_vector%TRANSFERS(domain_idx)%SEND_BUFFER_L)) &
7649 &
DEALLOCATE(cmiss_vector%TRANSFERS(domain_idx)%SEND_BUFFER_L)
7650 IF(
ALLOCATED(cmiss_vector%TRANSFERS(domain_idx)%RECEIVE_BUFFER_INTG)) &
7651 &
DEALLOCATE(cmiss_vector%TRANSFERS(domain_idx)%RECEIVE_BUFFER_INTG)
7652 IF(
ALLOCATED(cmiss_vector%TRANSFERS(domain_idx)%RECEIVE_BUFFER_SP)) &
7653 &
DEALLOCATE(cmiss_vector%TRANSFERS(domain_idx)%RECEIVE_BUFFER_SP)
7654 IF(
ALLOCATED(cmiss_vector%TRANSFERS(domain_idx)%RECEIVE_BUFFER_DP)) &
7655 &
DEALLOCATE(cmiss_vector%TRANSFERS(domain_idx)%RECEIVE_BUFFER_DP)
7656 IF(
ALLOCATED(cmiss_vector%TRANSFERS(domain_idx)%RECEIVE_BUFFER_L)) &
7657 &
DEALLOCATE(cmiss_vector%TRANSFERS(domain_idx)%RECEIVE_BUFFER_L)
7660 &
" is invalid. It must be between 1 and "//
trim(
numbertovstring(
SIZE(cmiss_vector%TRANSFERS,1),
"*",err,error))//
"." 7661 CALL flagerror(local_error,err,error,*999)
7666 exits(
"DISTRIBUTED_VECTOR_CMISS_TRANSFER_FINALISE")
7668 999 errorsexits(
"DISTRIBUTED_VECTOR_CMISS_TRANSFER_FINALISE",err,error)
7681 INTEGER(INTG),
INTENT(IN) :: domain_idx
7682 INTEGER(INTG),
INTENT(OUT) :: ERR
7687 enters(
"DistributedVector_CmissTransferInitialise",err,error,*999)
7689 IF(
ASSOCIATED(cmiss_vector))
THEN 7690 IF(
ALLOCATED(cmiss_vector%TRANSFERS))
THEN 7691 IF(domain_idx>0.AND.domain_idx<=
SIZE(cmiss_vector%TRANSFERS,1))
THEN 7692 cmiss_vector%TRANSFERS(domain_idx)%CMISS_VECTOR=>cmiss_vector
7693 cmiss_vector%TRANSFERS(domain_idx)%DATA_TYPE=0
7694 cmiss_vector%TRANSFERS(domain_idx)%SEND_BUFFER_SIZE=0
7695 cmiss_vector%TRANSFERS(domain_idx)%RECEIVE_BUFFER_SIZE=0
7696 cmiss_vector%TRANSFERS(domain_idx)%SEND_TAG_NUMBER=-1
7697 cmiss_vector%TRANSFERS(domain_idx)%RECEIVE_TAG_NUMBER=-1
7698 cmiss_vector%TRANSFERS(domain_idx)%MPI_SEND_REQUEST=mpi_request_null
7699 cmiss_vector%TRANSFERS(domain_idx)%MPI_RECEIVE_REQUEST=mpi_request_null
7702 &
" is invalid. It must be between 1 and "// &
7704 CALL flagerror(local_error,err,error,*999)
7707 CALL flagerror(
"CMISS vector transfers is not allocated.",err,error,*999)
7710 CALL flagerror(
"CMISS vector is not associated.",err,error,*999)
7713 exits(
"DistributedVector_CmissTransferInitialise")
7715 999 errorsexits(
"DistributedVector_CmissTransferInitialise",err,error)
7728 INTEGER(INTG),
INTENT(OUT) :: ERR
7731 INTEGER(INTG) :: domain_idx,i,NUMBER_OF_COMPUTATIONAL_NODES
7734 enters(
"DISTRIBUTED_VECTOR_UPDATE_FINISH",err,error,*999)
7736 IF(
ASSOCIATED(distributed_vector))
THEN 7737 IF(distributed_vector%VECTOR_FINISHED)
THEN 7738 SELECT CASE(distributed_vector%LIBRARY_TYPE)
7740 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 7741 IF(
ASSOCIATED(distributed_vector%DOMAIN_MAPPING))
THEN 7744 IF(number_of_computational_nodes>1)
THEN 7747 DO domain_idx=1,distributed_vector%DOMAIN_MAPPING%NUMBER_OF_ADJACENT_DOMAINS
7748 SELECT CASE(distributed_vector%DATA_TYPE)
7750 DO i=1,distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)%NUMBER_OF_RECEIVE_GHOSTS
7751 distributed_vector%CMISS%DATA_INTG(distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)% &
7752 & local_ghost_receive_indices(i))=distributed_vector%CMISS%TRANSFERS(domain_idx)%RECEIVE_BUFFER_INTG(i)
7755 DO i=1,distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)%NUMBER_OF_RECEIVE_GHOSTS
7756 distributed_vector%CMISS%DATA_SP(distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)% &
7757 & local_ghost_receive_indices(i))=distributed_vector%CMISS%TRANSFERS(domain_idx)%RECEIVE_BUFFER_SP(i)
7760 DO i=1,distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)%NUMBER_OF_RECEIVE_GHOSTS
7761 distributed_vector%CMISS%DATA_DP(distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)% &
7762 & local_ghost_receive_indices(i))=distributed_vector%CMISS%TRANSFERS(domain_idx)%RECEIVE_BUFFER_DP(i)
7765 DO i=1,distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)%NUMBER_OF_RECEIVE_GHOSTS
7766 distributed_vector%CMISS%DATA_L(distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)% &
7767 & local_ghost_receive_indices(i))=distributed_vector%CMISS%TRANSFERS(domain_idx)%RECEIVE_BUFFER_L(i)
7770 local_error=
"The distributed vector data type of "// &
7772 CALL flagerror(local_error,err,error,*999)
7777 CALL flagerror(
"Distributed vector domain mapping is not associated.",err,error,*999)
7780 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
7783 IF(
ASSOCIATED(distributed_vector%PETSC))
THEN 7784 IF(distributed_vector%PETSC%USE_OVERRIDE_VECTOR)
THEN 7790 CALL flagerror(
"Distributed vector PETSc is not associated.",err,error,*999)
7793 local_error=
"The distributed vector library type of "// &
7795 CALL flagerror(local_error,err,error,*999)
7798 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
7801 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
7805 SELECT CASE(distributed_vector%LIBRARY_TYPE)
7807 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 7813 & number_of_adjacent_domains,err,error,*999)
7814 DO domain_idx=1,distributed_vector%DOMAIN_MAPPING%NUMBER_OF_ADJACENT_DOMAINS
7817 & adjacent_domains(domain_idx)%DOMAIN_NUMBER,err,error,*999)
7819 & transfers(domain_idx)%RECEIVE_TAG_NUMBER,err,error,*999)
7821 & transfers(domain_idx)%SEND_TAG_NUMBER,err,error,*999)
7823 & transfers(domain_idx)%MPI_SEND_REQUEST,err,error,*999)
7825 & transfers(domain_idx)%MPI_RECEIVE_REQUEST,err,error,*999)
7828 SELECT CASE(distributed_vector%DATA_TYPE)
7831 & data_intg,
'(" Data :",5(X,I13))',
'(8X,5(X,I13))',err,error,*999)
7834 & data_sp,
'(" Data :",5(X,E13.6))',
'(8X,5(X,E13.6))',err,error,*999)
7837 & data_dp,
'(" Data :",5(X,E13.6))',
'(8X,5(X,E13.6))',err,error,*999)
7840 & data_l,
'(" Data :",8(X,L))',
'(8X,8(X,L))',err,error,*999)
7842 local_error=
"The distributed vector data type of "// &
7844 CALL flagerror(local_error,err,error,*999)
7847 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
7852 local_error=
"The distributed vector library type of "// &
7854 CALL flagerror(local_error,err,error,*999)
7858 exits(
"DISTRIBUTED_VECTOR_UPDATE_FINISH")
7860 999 errorsexits(
"DISTRIBUTED_VECTOR_UPDATE_FINISH",err,error)
7873 LOGICAL,
INTENT(OUT) :: ISFINISHED
7874 INTEGER(INTG),
INTENT(OUT) :: ERR
7877 INTEGER(INTG) :: domain_idx
7878 INTEGER(INTG) :: MPI_IERROR,STATUS(mpi_status_size)
7881 enters(
"DISTRIBUTED_VECTOR_UPDATE_ISFINISHED",err,error,*999)
7884 IF(
ASSOCIATED(distributed_vector))
THEN 7885 IF(distributed_vector%VECTOR_FINISHED)
THEN 7886 SELECT CASE(distributed_vector%LIBRARY_TYPE)
7888 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 7889 IF(
ASSOCIATED(distributed_vector%DOMAIN_MAPPING))
THEN 7891 DO domain_idx=1,distributed_vector%DOMAIN_MAPPING%NUMBER_OF_ADJACENT_DOMAINS
7892 CALL mpi_test(distributed_vector%CMISS%TRANSFERS(domain_idx)%MPI_RECEIVE_REQUEST,isfinished,status,mpi_ierror)
7894 IF(.NOT.isfinished)
EXIT 7900 CALL flagerror(
"Distributed vector domain mapping is not associated.",err,error,*999)
7903 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
7906 CALL flagerror(
"Cannot test if update isfinished for a PETSc distributed vector.",err,error,*999)
7908 local_error=
"The distributed vector library type of "// &
7910 CALL flagerror(local_error,err,error,*999)
7913 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
7916 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
7919 exits(
"DISTRIBUTED_VECTOR_UPDATE_ISFINISHED")
7921 999 errorsexits(
"DISTRIBUTED_VECTOR_UPDATE_ISFINISHED",err,error)
7934 INTEGER(INTG),
INTENT(OUT) :: ERR
7937 INTEGER(INTG) :: domain_idx
7938 INTEGER(INTG) :: MPI_IERROR,STATUS(mpi_status_size)
7941 enters(
"DISTRIBUTED_VECTOR_UPDATE_WAITFINISHED",err,error,*999)
7943 IF(
ASSOCIATED(distributed_vector))
THEN 7944 IF(distributed_vector%VECTOR_FINISHED)
THEN 7945 SELECT CASE(distributed_vector%LIBRARY_TYPE)
7947 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 7948 IF(
ASSOCIATED(distributed_vector%DOMAIN_MAPPING))
THEN 7950 DO domain_idx=1,distributed_vector%DOMAIN_MAPPING%NUMBER_OF_ADJACENT_DOMAINS
7951 CALL mpi_wait(distributed_vector%CMISS%TRANSFERS(domain_idx)%MPI_RECEIVE_REQUEST,status,mpi_ierror)
7955 CALL flagerror(
"Distributed vector domain mapping is not associated.",err,error,*999)
7958 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
7961 CALL flagerror(
"Cannot wait for finished for a PETSc distributed vector.",err,error,*999)
7963 local_error=
"The distributed vector library type of "// &
7965 CALL flagerror(local_error,err,error,*999)
7968 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
7971 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
7974 exits(
"DISTRIBUTED_VECTOR_UPDATE_WAITFINISHED")
7976 999 errorsexits(
"DISTRIBUTED_VECTOR_UPDATE_WAITFINISHED",err,error)
7989 INTEGER(INTG),
INTENT(OUT) :: ERR
7992 INTEGER(INTG) :: domain_idx,i,MPI_IERROR,NUMBER_OF_COMPUTATIONAL_NODES
7995 enters(
"DISTRIBUTED_VECTOR_UPDATE_START",err,error,*999)
7997 IF(
ASSOCIATED(distributed_vector))
THEN 7998 IF(distributed_vector%VECTOR_FINISHED)
THEN 7999 SELECT CASE(distributed_vector%LIBRARY_TYPE)
8001 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 8002 IF(
ASSOCIATED(distributed_vector%DOMAIN_MAPPING))
THEN 8005 IF(number_of_computational_nodes>1)
THEN 8006 IF(distributed_vector%DOMAIN_MAPPING%NUMBER_OF_ADJACENT_DOMAINS>0)
THEN 8008 DO domain_idx=1,distributed_vector%DOMAIN_MAPPING%NUMBER_OF_ADJACENT_DOMAINS
8009 SELECT CASE(distributed_vector%DATA_TYPE)
8011 DO i=1,distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)%NUMBER_OF_SEND_GHOSTS
8012 distributed_vector%CMISS%TRANSFERS(domain_idx)%SEND_BUFFER_INTG(i)= &
8013 & distributed_vector%CMISS%DATA_INTG(distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)% &
8014 & local_ghost_send_indices(i))
8017 DO i=1,distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)%NUMBER_OF_SEND_GHOSTS
8018 distributed_vector%CMISS%TRANSFERS(domain_idx)%SEND_BUFFER_SP(i)= &
8019 & distributed_vector%CMISS%DATA_SP(distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)% &
8020 & local_ghost_send_indices(i))
8023 DO i=1,distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)%NUMBER_OF_SEND_GHOSTS
8024 distributed_vector%CMISS%TRANSFERS(domain_idx)%SEND_BUFFER_DP(i)= &
8025 & distributed_vector%CMISS%DATA_DP(distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)% &
8026 & local_ghost_send_indices(i))
8029 DO i=1,distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)%NUMBER_OF_SEND_GHOSTS
8030 distributed_vector%CMISS%TRANSFERS(domain_idx)%SEND_BUFFER_L(i)= &
8031 & distributed_vector%CMISS%DATA_L(distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)% &
8032 & local_ghost_send_indices(i))
8035 local_error=
"The distributed vector data type of "// &
8037 CALL flagerror(local_error,err,error,*999)
8041 DO domain_idx=1,distributed_vector%DOMAIN_MAPPING%NUMBER_OF_ADJACENT_DOMAINS
8042 SELECT CASE(distributed_vector%DATA_TYPE)
8044 CALL mpi_irecv(distributed_vector%CMISS%TRANSFERS(domain_idx)%RECEIVE_BUFFER_INTG, &
8045 & distributed_vector%CMISS%TRANSFERS(domain_idx)%RECEIVE_BUFFER_SIZE,mpi_integer, &
8046 & distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)%DOMAIN_NUMBER, &
8048 & distributed_vector%CMISS%TRANSFERS(domain_idx)%MPI_RECEIVE_REQUEST,mpi_ierror)
8053 &
cmiss%TRANSFERS(domain_idx)%RECEIVE_BUFFER_SIZE,err,error,*999)
8056 & adjacent_domains(domain_idx)%DOMAIN_NUMBER,err,error,*999)
8058 &
cmiss%TRANSFERS(domain_idx)%RECEIVE_TAG_NUMBER,err,error,*999)
8062 &
cmiss%TRANSFERS(domain_idx)%MPI_RECEIVE_REQUEST,err,error,*999)
8065 CALL mpi_irecv(distributed_vector%CMISS%TRANSFERS(domain_idx)%RECEIVE_BUFFER_SP, &
8066 & distributed_vector%CMISS%TRANSFERS(domain_idx)%RECEIVE_BUFFER_SIZE,mpi_real, &
8067 & distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)%DOMAIN_NUMBER, &
8069 & distributed_vector%CMISS%TRANSFERS(domain_idx)%MPI_RECEIVE_REQUEST,mpi_ierror)
8074 &
cmiss%TRANSFERS(domain_idx)%RECEIVE_BUFFER_SIZE,err,error,*999)
8077 & adjacent_domains(domain_idx)%DOMAIN_NUMBER,err,error,*999)
8079 &
cmiss%TRANSFERS(domain_idx)%RECEIVE_TAG_NUMBER,err,error,*999)
8083 &
cmiss%TRANSFERS(domain_idx)%MPI_RECEIVE_REQUEST,err,error,*999)
8086 CALL mpi_irecv(distributed_vector%CMISS%TRANSFERS(domain_idx)%RECEIVE_BUFFER_DP, &
8087 & distributed_vector%CMISS%TRANSFERS(domain_idx)%RECEIVE_BUFFER_SIZE,mpi_double_precision, &
8088 & distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)%DOMAIN_NUMBER, &
8090 & distributed_vector%CMISS%TRANSFERS(domain_idx)%MPI_RECEIVE_REQUEST,mpi_ierror)
8095 &
cmiss%TRANSFERS(domain_idx)%RECEIVE_BUFFER_SIZE,err,error,*999)
8098 & adjacent_domains(domain_idx)%DOMAIN_NUMBER,err,error,*999)
8100 &
cmiss%TRANSFERS(domain_idx)%RECEIVE_TAG_NUMBER,err,error,*999)
8104 &
cmiss%TRANSFERS(domain_idx)%MPI_RECEIVE_REQUEST,err,error,*999)
8107 CALL mpi_irecv(distributed_vector%CMISS%TRANSFERS(domain_idx)%RECEIVE_BUFFER_L, &
8108 & distributed_vector%CMISS%TRANSFERS(domain_idx)%RECEIVE_BUFFER_SIZE,mpi_logical, &
8109 & distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)%DOMAIN_NUMBER, &
8111 & distributed_vector%CMISS%TRANSFERS(domain_idx)%MPI_RECEIVE_REQUEST,mpi_ierror)
8116 &
cmiss%TRANSFERS(domain_idx)%RECEIVE_BUFFER_SIZE,err,error,*999)
8119 & adjacent_domains(domain_idx)%DOMAIN_NUMBER,err,error,*999)
8121 &
cmiss%TRANSFERS(domain_idx)%RECEIVE_TAG_NUMBER,err,error,*999)
8125 &
cmiss%TRANSFERS(domain_idx)%MPI_RECEIVE_REQUEST,err,error,*999)
8128 local_error=
"The distributed vector data type of "// &
8130 CALL flagerror(local_error,err,error,*999)
8134 DO domain_idx=1,distributed_vector%DOMAIN_MAPPING%NUMBER_OF_ADJACENT_DOMAINS
8135 SELECT CASE(distributed_vector%DATA_TYPE)
8137 CALL mpi_isend(distributed_vector%CMISS%TRANSFERS(domain_idx)%SEND_BUFFER_INTG, &
8138 & distributed_vector%CMISS%TRANSFERS(domain_idx)%SEND_BUFFER_SIZE,mpi_integer, &
8139 & distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)%DOMAIN_NUMBER, &
8141 & distributed_vector%CMISS%TRANSFERS(domain_idx)%MPI_SEND_REQUEST,mpi_ierror)
8146 &
cmiss%TRANSFERS(domain_idx)%SEND_BUFFER_SIZE,err,error,*999)
8149 & adjacent_domains(domain_idx)%DOMAIN_NUMBER,err,error,*999)
8151 &
cmiss%TRANSFERS(domain_idx)%SEND_TAG_NUMBER,err,error,*999)
8155 &
cmiss%TRANSFERS(domain_idx)%MPI_SEND_REQUEST,err,error,*999)
8158 CALL mpi_isend(distributed_vector%CMISS%TRANSFERS(domain_idx)%SEND_BUFFER_SP, &
8159 & distributed_vector%CMISS%TRANSFERS(domain_idx)%SEND_BUFFER_SIZE,mpi_real, &
8160 & distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)%DOMAIN_NUMBER, &
8162 & distributed_vector%CMISS%TRANSFERS(domain_idx)%MPI_SEND_REQUEST,mpi_ierror)
8167 &
cmiss%TRANSFERS(domain_idx)%SEND_BUFFER_SIZE,err,error,*999)
8170 & adjacent_domains(domain_idx)%DOMAIN_NUMBER,err,error,*999)
8172 &
cmiss%TRANSFERS(domain_idx)%SEND_TAG_NUMBER,err,error,*999)
8176 &
cmiss%TRANSFERS(domain_idx)%MPI_SEND_REQUEST,err,error,*999)
8179 CALL mpi_isend(distributed_vector%CMISS%TRANSFERS(domain_idx)%SEND_BUFFER_DP, &
8180 & distributed_vector%CMISS%TRANSFERS(domain_idx)%SEND_BUFFER_SIZE,mpi_double_precision, &
8181 & distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)%DOMAIN_NUMBER, &
8183 & distributed_vector%CMISS%TRANSFERS(domain_idx)%MPI_SEND_REQUEST,mpi_ierror)
8188 &
cmiss%TRANSFERS(domain_idx)%SEND_BUFFER_SIZE,err,error,*999)
8191 & adjacent_domains(domain_idx)%DOMAIN_NUMBER,err,error,*999)
8193 &
cmiss%TRANSFERS(domain_idx)%SEND_TAG_NUMBER,err,error,*999)
8197 &
cmiss%TRANSFERS(domain_idx)%MPI_SEND_REQUEST,err,error,*999)
8200 CALL mpi_isend(distributed_vector%CMISS%TRANSFERS(domain_idx)%SEND_BUFFER_L, &
8201 & distributed_vector%CMISS%TRANSFERS(domain_idx)%SEND_BUFFER_SIZE,mpi_logical, &
8202 & distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)%DOMAIN_NUMBER, &
8204 & distributed_vector%CMISS%TRANSFERS(domain_idx)%MPI_SEND_REQUEST,mpi_ierror)
8209 &
cmiss%TRANSFERS(domain_idx)%SEND_BUFFER_SIZE,err,error,*999)
8212 & adjacent_domains(domain_idx)%DOMAIN_NUMBER,err,error,*999)
8214 &
cmiss%TRANSFERS(domain_idx)%SEND_TAG_NUMBER,err,error,*999)
8218 &
cmiss%TRANSFERS(domain_idx)%MPI_SEND_REQUEST,err,error,*999)
8221 local_error=
"The distributed vector data type of "// &
8223 CALL flagerror(local_error,err,error,*999)
8229 CALL flagerror(
"Domain mapping is not associated for the distributed vector.",err,error,*999)
8232 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
8235 IF(
ASSOCIATED(distributed_vector%PETSC))
THEN 8236 IF(distributed_vector%PETSC%USE_OVERRIDE_VECTOR)
THEN 8242 CALL flagerror(
"Distributed vector PETSc is not associated.",err,error,*999)
8245 local_error=
"The distributed vector library type of "// &
8247 CALL flagerror(local_error,err,error,*999)
8250 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
8253 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
8257 SELECT CASE(distributed_vector%LIBRARY_TYPE)
8259 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 8265 & number_of_adjacent_domains,err,error,*999)
8266 DO domain_idx=1,distributed_vector%DOMAIN_MAPPING%NUMBER_OF_ADJACENT_DOMAINS
8269 & adjacent_domains(domain_idx)%DOMAIN_NUMBER,err,error,*999)
8271 &
cmiss%TRANSFERS(domain_idx)%RECEIVE_TAG_NUMBER,err,error,*999)
8273 &
cmiss%TRANSFERS(domain_idx)%SEND_TAG_NUMBER,err,error,*999)
8275 &
cmiss%TRANSFERS(domain_idx)%MPI_SEND_REQUEST,err,error,*999)
8277 &
cmiss%TRANSFERS(domain_idx)%MPI_RECEIVE_REQUEST,err,error,*999)
8280 SELECT CASE(distributed_vector%DATA_TYPE)
8283 & data_intg,
'(" Data :",5(X,I13))',
'(8X,5(X,I13))',err,error,*999)
8286 & data_sp,
'(" Data :",5(X,E13.6))',
'(8X,5(X,E13.6))',err,error,*999)
8289 & data_dp,
'(" Data :",5(X,E13.6))',
'(8X,5(X,E13.6))',err,error,*999)
8292 & data_l,
'(" Data :",8(X,L))',
'(8X,8(X,L))',err,error,*999)
8294 local_error=
"The distributed vector data type of "// &
8296 CALL flagerror(local_error,err,error,*999)
8299 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
8304 local_error=
"The distributed vector library type of "// &
8306 CALL flagerror(local_error,err,error,*999)
8310 exits(
"DISTRIBUTED_VECTOR_UPDATE_START")
8312 999 errorsexits(
"DISTRIBUTED_VECTOR_UPDATE_START",err,error)
8325 REAL(DP),
INTENT(OUT) :: norm
8326 INTEGER(INTG),
INTENT(OUT) :: err
8332 enters(
"DistributedVector_L2Norm",err,error,*999)
8334 IF(
ASSOCIATED(distributedvector))
THEN 8335 IF(distributedvector%VECTOR_FINISHED)
THEN 8336 SELECT CASE(distributedvector%LIBRARY_TYPE)
8338 SELECT CASE(distributedvector%DATA_TYPE)
8340 IF(
ASSOCIATED(distributedvector%CMISS))
THEN 8342 DO i=1,distributedvector%CMISS%DATA_SIZE
8343 norm=norm+(distributedvector%CMISS%DATA_DP(i)**2)
8347 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
8350 CALL flagerror(
"Not implemented.",err,error,*999)
8352 CALL flagerror(
"Not implemented.",err,error,*999)
8354 CALL flagerror(
"Not implemented.",err,error,*999)
8356 localerror=
"The distributed data type of "// &
8359 CALL flagerror(localerror,err,error,*999)
8362 CALL flagerror(
"Cannot calculate norm for a PETSc distributed vector.",err,error,*999)
8364 localerror=
"The distributed vector library type of "// &
8366 CALL flagerror(localerror,err,error,*999)
8369 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
8372 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
8375 exits(
"DistributedVector_L2Norm")
8377 999 errorsexits(
"DistributedVector_L2Norm",err,error)
8391 INTEGER(INTG),
INTENT(OUT) :: dotProduct
8392 INTEGER(INTG),
INTENT(OUT) :: err
8395 INTEGER(INTG) :: dataTypeA,dataTypeB,i
8398 enters(
"DistributedVector_VecDotIntg",err,error,*999)
8400 IF(
ASSOCIATED(distributedvectora) .AND.
ASSOCIATED(distributedvectorb))
THEN 8401 IF(distributedvectora%VECTOR_FINISHED .AND. distributedvectorb%VECTOR_FINISHED)
THEN 8402 IF (distributedvectora%LIBRARY_TYPE==distributedvectorb%LIBRARY_TYPE)
THEN 8405 IF(datatypea==datatypeb)
THEN 8406 SELECT CASE(distributedvectora%LIBRARY_TYPE)
8408 IF(
ASSOCIATED(distributedvectora%CMISS))
THEN 8409 IF(distributedvectora%CMISS%DATA_SIZE==distributedvectorb%CMISS%DATA_SIZE)
THEN 8412 DO i=1,distributedvectora%CMISS%DATA_SIZE
8413 dotproduct=dotproduct+(distributedvectora%CMISS%DATA_INTG(i)*distributedvectorb%CMISS%DATA_INTG(i))
8416 CALL flagerror(
"Input distributed vector data type does not match output.",err,error,*999)
8419 CALL flagerror(
"The distributed vectors do not have the same size.",err,error,*999)
8422 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
8425 CALL flagerror(
"Distributed vector PETSC is double-precision, output scalar should be DP",err,error,*999)
8427 localerror=
"The distributed vector library type of "// &
8429 CALL flagerror(localerror,err,error,*999)
8432 CALL flagerror(
"The distributed vectors do not have the same data type.",err,error,*999)
8435 CALL flagerror(
"The distributed vectors do not have the same library type.",err,error,*999)
8438 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
8441 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
8444 exits(
"DistributedVector_VecDotIntg")
8446 999 errorsexits(
"DistributedVector_VecDotIntg",err,error)
8460 REAL(SP),
INTENT(OUT) :: dotProduct
8461 INTEGER(INTG),
INTENT(OUT) :: err
8464 INTEGER(INTG) :: dataTypeA,dataTypeB,i
8467 enters(
"DistributedVector_VecDotSp",err,error,*999)
8469 IF(
ASSOCIATED(distributedvectora) .AND.
ASSOCIATED(distributedvectorb))
THEN 8470 IF(distributedvectora%VECTOR_FINISHED .AND. distributedvectorb%VECTOR_FINISHED)
THEN 8471 IF (distributedvectora%LIBRARY_TYPE==distributedvectorb%LIBRARY_TYPE)
THEN 8474 IF(datatypea==datatypeb)
THEN 8475 SELECT CASE(distributedvectora%LIBRARY_TYPE)
8477 IF(
ASSOCIATED(distributedvectora%CMISS))
THEN 8478 IF(distributedvectora%CMISS%DATA_SIZE==distributedvectorb%CMISS%DATA_SIZE)
THEN 8481 DO i=1,distributedvectora%CMISS%DATA_SIZE
8482 dotproduct=dotproduct+(distributedvectora%CMISS%DATA_SP(i)*distributedvectorb%CMISS%DATA_SP(i))
8485 CALL flagerror(
"Input distributed vector data type does not match output.",err,error,*999)
8488 CALL flagerror(
"The distributed vectors do not have the same size.",err,error,*999)
8491 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
8494 CALL flagerror(
"Distributed vector PETSC is double-precision, output scalar should be DP",err,error,*999)
8496 localerror=
"The distributed vector library type of "// &
8498 CALL flagerror(localerror,err,error,*999)
8501 CALL flagerror(
"The distributed vectors do not have the same data type.",err,error,*999)
8504 CALL flagerror(
"The distributed vectors do not have the same library type.",err,error,*999)
8507 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
8510 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
8513 exits(
"DistributedVector_VecDotSp")
8515 999 errorsexits(
"DistributedVector_VecDotSp",err,error)
8529 REAL(DP),
INTENT(OUT) :: dotProduct
8530 INTEGER(INTG),
INTENT(OUT) :: err
8533 INTEGER(INTG) :: dataTypeA,dataTypeB,i
8536 enters(
"DistributedVector_VecDotDp",err,error,*999)
8538 IF(
ASSOCIATED(distributedvectora) .AND.
ASSOCIATED(distributedvectorb))
THEN 8539 IF(distributedvectora%VECTOR_FINISHED .AND. distributedvectorb%VECTOR_FINISHED)
THEN 8540 IF (distributedvectora%LIBRARY_TYPE==distributedvectorb%LIBRARY_TYPE)
THEN 8543 IF(datatypea==datatypeb)
THEN 8544 SELECT CASE(distributedvectora%LIBRARY_TYPE)
8546 IF(
ASSOCIATED(distributedvectora%CMISS))
THEN 8547 IF(distributedvectora%CMISS%DATA_SIZE==distributedvectorb%CMISS%DATA_SIZE)
THEN 8550 DO i=1,distributedvectora%CMISS%DATA_SIZE
8551 dotproduct=dotproduct+(distributedvectora%CMISS%DATA_DP(i)*distributedvectorb%CMISS%DATA_DP(i))
8554 CALL flagerror(
"Input distributed vector data type does not match output.",err,error,*999)
8557 CALL flagerror(
"The distributed vectors do not have the same size.",err,error,*999)
8560 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
8563 IF(
ASSOCIATED(distributedvectora%PETSC))
THEN 8564 CALL petsc_vecdot(distributedvectora%PETSC%VECTOR,distributedvectorb%PETSC%VECTOR, &
8565 & dotproduct,err,error,*999)
8567 CALL flagerror(
"Distributed vector PETSC is not associated.",err,error,*999)
8570 localerror=
"The distributed vector library type of "// &
8572 CALL flagerror(localerror,err,error,*999)
8575 CALL flagerror(
"The distributed vectors do not have the same data type.",err,error,*999)
8578 CALL flagerror(
"The distributed vectors do not have the same library type.",err,error,*999)
8581 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
8584 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
8587 exits(
"DistributedVector_VecDotDp")
8589 999 errorsexits(
"DistributedVector_VecDotDp",err,error)
8602 INTEGER(INTG),
INTENT(IN) :: INDICES(:)
8603 INTEGER(INTG),
INTENT(IN) :: VALUES(:)
8604 INTEGER(INTG),
INTENT(OUT) :: ERR
8610 enters(
"DISTRIBUTED_VECTOR_VALUES_ADD_INTG",err,error,*999)
8612 IF(
ASSOCIATED(distributed_vector))
THEN 8613 IF(distributed_vector%VECTOR_FINISHED)
THEN 8614 IF(
SIZE(indices,1)==
SIZE(values,1))
THEN 8616 SELECT CASE(distributed_vector%LIBRARY_TYPE)
8618 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 8619 DO i=1,
SIZE(indices,1)
8621 IF(indices(i)>0.AND.indices(i)<=distributed_vector%CMISS%DATA_SIZE)
THEN 8622 distributed_vector%CMISS%DATA_INTG(indices(i))=distributed_vector%CMISS%DATA_INTG(indices(i))+values(i)
8625 &
" is invalid. The index must be between 1 and "// &
8627 CALL flagerror(local_error,err,error,*999)
8631 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
8634 CALL flagerror(
"Cannot add values for an integer PETSc distributed vector.",err,error,*999)
8636 local_error=
"The distributed vector library type of "// &
8638 CALL flagerror(local_error,err,error,*999)
8641 local_error=
"The distributed data type of "// &
8643 &
" does not correspond to the integer data type of the given values." 8644 CALL flagerror(local_error,err,error,*999)
8647 local_error=
"The size of the indicies array ("//
trim(
numbertovstring(
SIZE(indices,1),
"*",err,error))// &
8648 &
") does not conform to the size of the values array ("//
trim(
numbertovstring(
SIZE(values,1),
"*",err,error))//
")." 8649 CALL flagerror(local_error,err,error,*999)
8652 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
8655 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
8658 exits(
"DISTRIBUTED_VECTOR_VALUES_ADD_INTG")
8660 999 errorsexits(
"DISTRIBUTED_VECTOR_VALUES_ADD_INTG",err,error)
8673 INTEGER(INTG),
INTENT(IN) :: INDEX
8674 INTEGER(INTG),
INTENT(IN) ::
VALUE 8675 INTEGER(INTG),
INTENT(OUT) :: ERR
8680 enters(
"DISTRIBUTED_VECTOR_VALUES_ADD_INTG1",err,error,*999)
8682 IF(
ASSOCIATED(distributed_vector))
THEN 8683 IF(distributed_vector%VECTOR_FINISHED)
THEN 8685 SELECT CASE(distributed_vector%LIBRARY_TYPE)
8687 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 8689 IF(
index>0.AND.
index<=distributed_vector%CMISS%DATA_SIZE)
THEN 8690 distributed_vector%CMISS%DATA_INTG(
index)=distributed_vector%CMISS%DATA_INTG(
index)+
VALUE 8693 &
" is invalid. The index must be between 1 and "// &
8695 CALL flagerror(local_error,err,error,*999)
8698 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
8701 CALL flagerror(
"Cannot add values for an integer PETSc distributed vector.",err,error,*999)
8703 local_error=
"The distributed vector library type of "// &
8705 CALL flagerror(local_error,err,error,*999)
8708 local_error=
"The distributed data type of "// &
8710 &
" does not correspond to the integer data type of the given value." 8711 CALL flagerror(local_error,err,error,*999)
8714 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
8717 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
8720 exits(
"DISTRIBUTED_VECTOR_VALUES_ADD_INTG1")
8722 999 errorsexits(
"DISTRIBUTED_VECTOR_VALUES_ADD_INTG1",err,error)
8735 INTEGER(INTG),
INTENT(IN) :: INDICES(:)
8736 REAL(SP),
INTENT(IN) :: VALUES(:)
8737 INTEGER(INTG),
INTENT(OUT) :: ERR
8743 enters(
"DISTRIBUTED_VECTOR_VALUES_ADD_SP",err,error,*999)
8745 IF(
ASSOCIATED(distributed_vector))
THEN 8746 IF(distributed_vector%VECTOR_FINISHED)
THEN 8747 IF(
SIZE(indices,1)==
SIZE(values,1))
THEN 8749 SELECT CASE(distributed_vector%LIBRARY_TYPE)
8751 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 8752 DO i=1,
SIZE(indices,1)
8754 IF(indices(i)>0.AND.indices(i)<=distributed_vector%CMISS%DATA_SIZE)
THEN 8755 distributed_vector%CMISS%DATA_SP(indices(i))=distributed_vector%CMISS%DATA_SP(indices(i))+values(i)
8758 &
" is invalid. The index must be between 1 and "// &
8760 CALL flagerror(local_error,err,error,*999)
8764 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
8767 CALL flagerror(
"Cannot add values for a single precision PETSc distributed vector.",err,error,*999)
8769 local_error=
"The distributed vector library type of "// &
8771 CALL flagerror(local_error,err,error,*999)
8774 local_error=
"The distributed data type of "// &
8776 &
" does not correspond to the single precision data type of the given values." 8777 CALL flagerror(local_error,err,error,*999)
8780 local_error=
"The size of the indices array ("//
trim(
numbertovstring(
SIZE(indices,1),
"*",err,error))// &
8781 &
") does not conform to the size of the values array ("//
trim(
numbertovstring(
SIZE(values,1),
"*",err,error))//
")." 8782 CALL flagerror(local_error,err,error,*999)
8785 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
8788 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
8791 exits(
"DISTRIBUTED_VECTOR_VALUES_ADD_SP")
8793 999 errorsexits(
"DISTRIBUTED_VECTOR_VALUES_ADD_SP",err,error)
8806 INTEGER(INTG),
INTENT(IN) :: INDEX
8807 REAL(SP),
INTENT(IN) ::
VALUE 8808 INTEGER(INTG),
INTENT(OUT) :: ERR
8813 enters(
"DISTRIBUTED_VECTOR_VALUES_ADD_SP1",err,error,*999)
8815 IF(
ASSOCIATED(distributed_vector))
THEN 8816 IF(distributed_vector%VECTOR_FINISHED)
THEN 8818 SELECT CASE(distributed_vector%LIBRARY_TYPE)
8820 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 8822 IF(
index>0.AND.
index<=distributed_vector%CMISS%DATA_SIZE)
THEN 8823 distributed_vector%CMISS%DATA_SP(
index)=distributed_vector%CMISS%DATA_SP(
index)+
VALUE 8826 &
" is invalid. The index must be between 1 and "// &
8828 CALL flagerror(local_error,err,error,*999)
8831 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
8834 CALL flagerror(
"Cannot add values for a single precision PETSc distributed vector.",err,error,*999)
8836 local_error=
"The distributed vector library type of "// &
8838 CALL flagerror(local_error,err,error,*999)
8841 local_error=
"The distributed data type of "// &
8843 &
" does not correspond to the single precision data type of the given value." 8844 CALL flagerror(local_error,err,error,*999)
8847 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
8850 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
8853 exits(
"DISTRIBUTED_VECTOR_VALUES_ADD_SP1")
8855 999 errorsexits(
"DISTRIBUTED_VECTOR_VALUES_ADD_SP1",err,error)
8868 INTEGER(INTG),
INTENT(IN) :: INDICES(:)
8869 REAL(DP),
INTENT(IN) :: VALUES(:)
8870 INTEGER(INTG),
INTENT(OUT) :: ERR
8876 enters(
"DISTRIBUTED_VECTOR_VALUES_ADD_DP",err,error,*999)
8878 IF(
ASSOCIATED(distributed_vector))
THEN 8879 IF(distributed_vector%VECTOR_FINISHED)
THEN 8880 IF(
SIZE(indices,1)==
SIZE(values,1))
THEN 8882 SELECT CASE(distributed_vector%LIBRARY_TYPE)
8884 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 8885 DO i=1,
SIZE(indices,1)
8887 IF(indices(i)>0.AND.indices(i)<=distributed_vector%CMISS%DATA_SIZE)
THEN 8888 distributed_vector%CMISS%DATA_DP(indices(i))=distributed_vector%CMISS%DATA_DP(indices(i))+values(i)
8891 &
" is invalid. The index must be between 1 and "// &
8893 CALL flagerror(local_error,err,error,*999)
8897 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
8900 IF(
ASSOCIATED(distributed_vector%PETSC))
THEN 8901 IF(distributed_vector%PETSC%USE_OVERRIDE_VECTOR)
THEN 8902 CALL petsc_vecsetvalues(distributed_vector%PETSC%OVERRIDE_VECTOR,
SIZE(indices,1),distributed_vector%PETSC% &
8903 & global_numbers(indices),values,petsc_add_values,err,error,*999)
8905 CALL petsc_vecsetvalues(distributed_vector%PETSC%VECTOR,
SIZE(indices,1),distributed_vector%PETSC% &
8906 & global_numbers(indices),values,petsc_add_values,err,error,*999)
8909 CALL flagerror(
"Distributed vector PETSc is not associated.",err,error,*999)
8912 local_error=
"The distributed vector library type of "// &
8914 CALL flagerror(local_error,err,error,*999)
8917 local_error=
"The distributed data type of "// &
8919 &
" does not correspond to the double precision data type of the given values." 8920 CALL flagerror(local_error,err,error,*999)
8923 local_error=
"The size of the indices array ("//
trim(
numbertovstring(
SIZE(indices,1),
"*",err,error))// &
8924 &
") does not conform to the size of the values array ("//
trim(
numbertovstring(
SIZE(values,1),
"*",err,error))//
")." 8925 CALL flagerror(local_error,err,error,*999)
8928 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
8931 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
8934 exits(
"DISTRIBUTED_VECTOR_VALUES_ADD_DP")
8936 999 errorsexits(
"DISTRIBUTED_VECTOR_VALUES_ADD_DP",err,error)
8949 INTEGER(INTG),
INTENT(IN) :: INDEX
8950 REAL(DP),
INTENT(IN) ::
VALUE 8951 INTEGER(INTG),
INTENT(OUT) :: ERR
8954 REAL(DP) :: PETSC_VALUE(1)
8957 enters(
"DISTRIBUTED_VECTOR_VALUES_ADD_DP1",err,error,*999)
8959 IF(
ASSOCIATED(distributed_vector))
THEN 8960 IF(distributed_vector%VECTOR_FINISHED)
THEN 8962 SELECT CASE(distributed_vector%LIBRARY_TYPE)
8964 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 8966 IF(
index>0.AND.
index<=distributed_vector%CMISS%DATA_SIZE)
THEN 8967 distributed_vector%CMISS%DATA_DP(
index)=distributed_vector%CMISS%DATA_DP(
index)+
VALUE 8970 &
" is invalid. The index must be between 1 and "// &
8972 CALL flagerror(local_error,err,error,*999)
8975 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
8978 IF(
ASSOCIATED(distributed_vector%PETSC))
THEN 8979 petsc_value(1)=
VALUE 8980 IF(distributed_vector%PETSC%USE_OVERRIDE_VECTOR)
THEN 8981 CALL petsc_vecsetvalues(distributed_vector%PETSC%OVERRIDE_VECTOR,1,distributed_vector%PETSC%GLOBAL_NUMBERS(
index), &
8982 & petsc_value,petsc_add_values,err,error,*999)
8985 & petsc_value,petsc_add_values,err,error,*999)
8988 CALL flagerror(
"Distributed vector PETSc is not associated.",err,error,*999)
8991 local_error=
"The distributed vector library type of "// &
8993 CALL flagerror(local_error,err,error,*999)
8996 local_error=
"The distributed data type of "// &
8998 &
" does not correspond to the double precision data type of the given value." 8999 CALL flagerror(local_error,err,error,*999)
9002 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
9005 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
9008 exits(
"DISTRIBUTED_VECTOR_VALUES_ADD_DP1")
9010 999 errorsexits(
"DISTRIBUTED_VECTOR_VALUES_ADD_DP1",err,error)
9023 INTEGER(INTG),
INTENT(IN) :: INDICES(:)
9024 LOGICAL,
INTENT(IN) :: VALUES(:)
9025 INTEGER(INTG),
INTENT(OUT) :: ERR
9031 enters(
"DISTRIBUTED_VECTOR_VALUES_ADDED_L",err,error,*999)
9033 IF(
ASSOCIATED(distributed_vector))
THEN 9034 IF(distributed_vector%VECTOR_FINISHED)
THEN 9035 IF(
SIZE(indices,1)==
SIZE(values,1))
THEN 9037 SELECT CASE(distributed_vector%LIBRARY_TYPE)
9039 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 9040 DO i=1,
SIZE(indices,1)
9042 IF(indices(i)>0.AND.indices(i)<=distributed_vector%CMISS%DATA_SIZE)
THEN 9043 distributed_vector%CMISS%DATA_L(indices(i))=distributed_vector%CMISS%DATA_L(indices(i)).OR.values(i)
9046 &
" is invalid. The index must be between 1 and "// &
9048 CALL flagerror(local_error,err,error,*999)
9052 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
9055 CALL flagerror(
"Cannot add values for a logical PETSc distributed vector.",err,error,*999)
9057 local_error=
"The distributed vector library type of "// &
9059 CALL flagerror(local_error,err,error,*999)
9062 local_error=
"The distributed data type of "// &
9064 &
" does not correspond to the logical data type of the given values." 9065 CALL flagerror(local_error,err,error,*999)
9068 local_error=
"The size of the indices array ("//
trim(
numbertovstring(
SIZE(indices,1),
"*",err,error))// &
9069 &
") does not conform to the size of the values array ("//
trim(
numbertovstring(
SIZE(values,1),
"*",err,error))//
")." 9070 CALL flagerror(local_error,err,error,*999)
9073 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
9076 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
9079 exits(
"DISTRIBUTED_VECTOR_VALUES_ADD_L")
9081 999 errorsexits(
"DISTRIBUTED_VECTOR_VALUES_ADD_L",err,error)
9094 INTEGER(INTG),
INTENT(IN) :: INDEX
9095 LOGICAL,
INTENT(IN) ::
VALUE 9096 INTEGER(INTG),
INTENT(OUT) :: ERR
9101 enters(
"DISTRIBUTED_VECTOR_VALUES_ADD_L1",err,error,*999)
9103 IF(
ASSOCIATED(distributed_vector))
THEN 9104 IF(distributed_vector%VECTOR_FINISHED)
THEN 9106 SELECT CASE(distributed_vector%LIBRARY_TYPE)
9108 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 9110 IF(
index>0.AND.
index<=distributed_vector%CMISS%DATA_SIZE)
THEN 9111 distributed_vector%CMISS%DATA_L(
index)=distributed_vector%CMISS%DATA_L(
index).OR.
VALUE 9114 &
" is invalid. The index must be between 1 and "// &
9116 CALL flagerror(local_error,err,error,*999)
9119 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
9122 CALL flagerror(
"Cannot add values for a logical PETSc distributed vector.",err,error,*999)
9124 local_error=
"The distributed vector library type of "// &
9126 CALL flagerror(local_error,err,error,*999)
9129 local_error=
"The distributed data type of "// &
9131 &
" does not correspond to the logical data type of the given value." 9132 CALL flagerror(local_error,err,error,*999)
9135 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
9138 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
9141 exits(
"DISTRIBUTED_VECTOR_VALUES_ADD_L1")
9143 999 errorsexits(
"DISTRIBUTED_VECTOR_VALUES_ADD_L1",err,error)
9156 INTEGER(INTG),
INTENT(IN) :: INDICES(:)
9157 INTEGER(INTG),
INTENT(OUT) :: VALUES(:)
9158 INTEGER(INTG),
INTENT(OUT) :: ERR
9164 enters(
"DISTRIBUTED_VECTOR_VALUES_GET_INTG",err,error,*999)
9166 IF(
ASSOCIATED(distributed_vector))
THEN 9167 IF(distributed_vector%VECTOR_FINISHED)
THEN 9168 IF(
SIZE(indices,1)==
SIZE(values,1))
THEN 9170 SELECT CASE(distributed_vector%LIBRARY_TYPE)
9172 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 9173 DO i=1,
SIZE(indices,1)
9174 IF(indices(i)>0.AND.indices(i)<=distributed_vector%CMISS%DATA_SIZE)
THEN 9175 values(i)=distributed_vector%CMISS%DATA_INTG(indices(i))
9178 &
" is invalid. The index must be between 1 and "// &
9181 CALL flagerror(local_error,err,error,*999)
9185 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
9188 CALL flagerror(
"Cannot set values for an integer PETSc distributed vector.",err,error,*999)
9190 local_error=
"The distributed vector library type of "// &
9192 CALL flagerror(local_error,err,error,*999)
9195 local_error=
"The distributed data type of "// &
9197 &
" does not correspond to the integer data type of the given values." 9198 CALL flagerror(local_error,err,error,*999)
9201 local_error=
"The size of the indicies array ("//
trim(
numbertovstring(
SIZE(indices,1),
"*",err,error))// &
9202 &
") does not conform to the size of the values array ("//
trim(
numbertovstring(
SIZE(values,1),
"*",err,error))//
")." 9203 CALL flagerror(local_error,err,error,*999)
9206 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
9209 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
9212 exits(
"DISTRIBUTED_VECTOR_VALUES_GET_INTG")
9214 999 errorsexits(
"DISTRIBUTED_VECTOR_VALUES_GET_INTG",err,error)
9227 INTEGER(INTG),
INTENT(IN) :: INDEX
9228 INTEGER(INTG),
INTENT(OUT) ::
VALUE 9229 INTEGER(INTG),
INTENT(OUT) :: ERR
9234 enters(
"DISTRIBUTED_VECTOR_VALUES_GET_INTG1",err,error,*999)
9236 IF(
ASSOCIATED(distributed_vector))
THEN 9237 IF(distributed_vector%VECTOR_FINISHED)
THEN 9239 SELECT CASE(distributed_vector%LIBRARY_TYPE)
9241 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 9242 IF(
index>0.AND.
index<=distributed_vector%CMISS%DATA_SIZE)
THEN 9243 VALUE=distributed_vector%CMISS%DATA_INTG(
index)
9246 &
" is invalid. The index must be between 1 and "// &
9248 CALL flagerror(local_error,err,error,*999)
9251 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
9254 CALL flagerror(
"Cannot set values for an integer PETSc distributed vector.",err,error,*999)
9256 local_error=
"The distributed vector library type of "// &
9258 CALL flagerror(local_error,err,error,*999)
9261 local_error=
"The distributed data type of "// &
9263 &
" does not correspond to the integer data type of the given value." 9264 CALL flagerror(local_error,err,error,*999)
9267 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
9270 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
9273 exits(
"DISTRIBUTED_VECTOR_VALUES_GET_INTG1")
9275 999 errorsexits(
"DISTRIBUTED_VECTOR_VALUES_GET_INTG1",err,error)
9288 INTEGER(INTG),
INTENT(IN) :: INDICES(:)
9289 REAL(SP),
INTENT(OUT) :: VALUES(:)
9290 INTEGER(INTG),
INTENT(OUT) :: ERR
9296 enters(
"DISTRIBUTED_VECTOR_VALUES_GET_SP",err,error,*999)
9298 IF(
ASSOCIATED(distributed_vector))
THEN 9299 IF(distributed_vector%VECTOR_FINISHED)
THEN 9300 IF(
SIZE(indices,1)==
SIZE(values,1))
THEN 9302 SELECT CASE(distributed_vector%LIBRARY_TYPE)
9304 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 9305 DO i=1,
SIZE(indices,1)
9306 IF(indices(i)>0.AND.indices(i)<=distributed_vector%CMISS%DATA_SIZE)
THEN 9307 values(i)=distributed_vector%CMISS%DATA_SP(indices(i))
9310 &
" is invalid. The index must be between 1 and "// &
9312 CALL flagerror(local_error,err,error,*999)
9316 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
9319 CALL flagerror(
"Cannot get values for a single precision PETSc distributed vector.",err,error,*999)
9321 local_error=
"The distributed vector library type of "// &
9323 CALL flagerror(local_error,err,error,*999)
9326 local_error=
"The distributed data type of "// &
9328 &
" does not correspond to the single precision data type of the given values." 9329 CALL flagerror(local_error,err,error,*999)
9332 local_error=
"The size of the indices array ("//
trim(
numbertovstring(
SIZE(indices,1),
"*",err,error))// &
9333 &
") does not conform to the size of the values array ("//
trim(
numbertovstring(
SIZE(values,1),
"*",err,error))//
")." 9334 CALL flagerror(local_error,err,error,*999)
9337 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
9340 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
9343 exits(
"DISTRIBUTED_VECTOR_VALUES_GET_SP")
9345 999 errorsexits(
"DISTRIBUTED_VECTOR_VALUES_GET_SP",err,error)
9358 INTEGER(INTG),
INTENT(IN) :: INDEX
9359 REAL(SP),
INTENT(OUT) ::
VALUE 9360 INTEGER(INTG),
INTENT(OUT) :: ERR
9365 enters(
"DISTRIBUTED_VECTOR_VALUES_GET_SP1",err,error,*999)
9367 IF(
ASSOCIATED(distributed_vector))
THEN 9368 IF(distributed_vector%VECTOR_FINISHED)
THEN 9370 SELECT CASE(distributed_vector%LIBRARY_TYPE)
9372 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 9373 IF(
index>0.AND.
index<=distributed_vector%CMISS%DATA_SIZE)
THEN 9374 VALUE=distributed_vector%CMISS%DATA_SP(
index)
9377 &
" is invalid. The index must be between 1 and "// &
9379 CALL flagerror(local_error,err,error,*999)
9382 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
9385 CALL flagerror(
"Cannot set values for a single precision PETSc distributed vector.",err,error,*999)
9387 local_error=
"The distributed vector library type of "// &
9389 CALL flagerror(local_error,err,error,*999)
9392 local_error=
"The distributed data type of "// &
9394 &
" does not correspond to the single precision data type of the given value." 9395 CALL flagerror(local_error,err,error,*999)
9398 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
9401 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
9404 exits(
"DISTRIBUTED_VECTOR_VALUES_GET_SP1")
9406 999 errorsexits(
"DISTRIBUTED_VECTOR_VALUES_GET_SP1",err,error)
9419 INTEGER(INTG),
INTENT(IN) :: INDICES(:)
9420 REAL(DP),
INTENT(OUT) :: VALUES(:)
9421 INTEGER(INTG),
INTENT(OUT) :: ERR
9424 INTEGER(INTG) :: i,PETSC_INDICES(size(indices,1))
9427 enters(
"DISTRIBUTED_VECTOR_VALUES_GET_DP",err,error,*999)
9429 IF(
ASSOCIATED(distributed_vector))
THEN 9430 IF(distributed_vector%VECTOR_FINISHED)
THEN 9431 IF(
SIZE(indices,1)==
SIZE(values,1))
THEN 9433 SELECT CASE(distributed_vector%LIBRARY_TYPE)
9435 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 9436 DO i=1,
SIZE(indices,1)
9437 IF(indices(i)>0.AND.indices(i)<=distributed_vector%CMISS%DATA_SIZE)
THEN 9438 values(i)=distributed_vector%CMISS%DATA_DP(indices(i))
9441 &
" is invalid. The index must be between 1 and "// &
9443 CALL flagerror(local_error,err,error,*999)
9447 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
9450 IF(
ASSOCIATED(distributed_vector%PETSC))
THEN 9451 DO i=1,
SIZE(indices,1)
9452 petsc_indices(i)=distributed_vector%DOMAIN_MAPPING%LOCAL_TO_GLOBAL_MAP(indices(i))-1
9454 IF(distributed_vector%PETSC%USE_OVERRIDE_VECTOR)
THEN 9455 CALL petsc_vecgetvalues(distributed_vector%PETSC%OVERRIDE_VECTOR,
SIZE(indices,1),petsc_indices,values, &
9458 CALL petsc_vecgetvalues(distributed_vector%PETSC%VECTOR,
SIZE(indices,1),petsc_indices,values,err,error,*999)
9461 CALL flagerror(
"Distributed vector PETSc is not associated.",err,error,*999)
9464 local_error=
"The distributed vector library type of "// &
9466 CALL flagerror(local_error,err,error,*999)
9469 local_error=
"The distributed data type of "// &
9471 &
" does not correspond to the double precision data type of the given values." 9472 CALL flagerror(local_error,err,error,*999)
9475 local_error=
"The size of the indices array ("//
trim(
numbertovstring(
SIZE(indices,1),
"*",err,error))// &
9476 &
") does not conform to the size of the values array ("//
trim(
numbertovstring(
SIZE(values,1),
"*",err,error))//
")." 9477 CALL flagerror(local_error,err,error,*999)
9480 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
9483 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
9486 exits(
"DISTRIBUTED_VECTOR_VALUES_GET_DP")
9488 999 errorsexits(
"DISTRIBUTED_VECTOR_VALUES_GET_DP",err,error)
9501 INTEGER(INTG),
INTENT(IN) :: INDEX
9502 REAL(DP),
INTENT(OUT) ::
VALUE 9503 INTEGER(INTG),
INTENT(OUT) :: ERR
9506 INTEGER(INTG) :: PETSC_INDEX(1)
9507 REAL(DP) :: PETSC_VALUE(1)
9510 enters(
"DISTRIBUTED_VECTOR_VALUES_GET_DP1",err,error,*999)
9512 IF(
ASSOCIATED(distributed_vector))
THEN 9513 IF(distributed_vector%VECTOR_FINISHED)
THEN 9515 SELECT CASE(distributed_vector%LIBRARY_TYPE)
9517 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 9518 IF(
index>0.AND.
index<=distributed_vector%CMISS%DATA_SIZE)
THEN 9519 VALUE=distributed_vector%CMISS%DATA_DP(
index)
9522 &
" is invalid. The index must be between 1 and "// &
9524 CALL flagerror(local_error,err,error,*999)
9527 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
9530 IF(
ASSOCIATED(distributed_vector%PETSC))
THEN 9531 petsc_index=distributed_vector%DOMAIN_MAPPING%LOCAL_TO_GLOBAL_MAP(
index)-1
9532 IF(distributed_vector%PETSC%USE_OVERRIDE_VECTOR)
THEN 9533 CALL petsc_vecgetvalues(distributed_vector%PETSC%OVERRIDE_VECTOR,1,petsc_index,petsc_value,err,error,*999)
9535 CALL petsc_vecgetvalues(distributed_vector%PETSC%VECTOR,1,petsc_index,petsc_value,err,error,*999)
9537 VALUE=petsc_value(1)
9539 CALL flagerror(
"Distributed vector PETSc is not associated.",err,error,*999)
9542 local_error=
"The distributed vector library type of "// &
9544 CALL flagerror(local_error,err,error,*999)
9547 local_error=
"The distributed data type of "// &
9549 &
" does not correspond to the double precision data type of the given value." 9550 CALL flagerror(local_error,err,error,*999)
9553 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
9556 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
9559 exits(
"DISTRIBUTED_VECTOR_VALUES_GET_DP1")
9561 999 errorsexits(
"DISTRIBUTED_VECTOR_VALUES_GET_DP1",err,error)
9574 INTEGER(INTG),
INTENT(IN) :: INDICES(:)
9575 LOGICAL,
INTENT(OUT) :: VALUES(:)
9576 INTEGER(INTG),
INTENT(OUT) :: ERR
9582 enters(
"DISTRIBUTED_VECTOR_VALUES_GET_L",err,error,*999)
9584 IF(
ASSOCIATED(distributed_vector))
THEN 9585 IF(distributed_vector%VECTOR_FINISHED)
THEN 9586 IF(
SIZE(indices,1)==
SIZE(values,1))
THEN 9588 SELECT CASE(distributed_vector%LIBRARY_TYPE)
9590 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 9591 DO i=1,
SIZE(indices,1)
9592 IF(indices(i)>0.AND.indices(i)<=distributed_vector%CMISS%DATA_SIZE)
THEN 9593 values(i)=distributed_vector%CMISS%DATA_L(indices(i))
9596 &
" is invalid. The index must be between 1 and "// &
9598 CALL flagerror(local_error,err,error,*999)
9602 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
9605 CALL flagerror(
"Cannot set values for a logical PETSc distributed vector.",err,error,*999)
9607 local_error=
"The distributed vector library type of "// &
9609 CALL flagerror(local_error,err,error,*999)
9612 local_error=
"The distributed data type of "// &
9614 &
" does not correspond to the logical data type of the given values." 9615 CALL flagerror(local_error,err,error,*999)
9618 local_error=
"The size of the indices array ("//
trim(
numbertovstring(
SIZE(indices,1),
"*",err,error))// &
9619 &
") does not conform to the size of the values array ("//
trim(
numbertovstring(
SIZE(values,1),
"*",err,error))//
")." 9620 CALL flagerror(local_error,err,error,*999)
9623 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
9626 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
9629 exits(
"DISTRIBUTED_VECTOR_VALUES_GET_L")
9631 999 errorsexits(
"DISTRIBUTED_VECTOR_VALUES_GET_L",err,error)
9644 INTEGER(INTG),
INTENT(IN) :: INDEX
9645 LOGICAL,
INTENT(OUT) ::
VALUE 9646 INTEGER(INTG),
INTENT(OUT) :: ERR
9651 enters(
"DISTRIBUTED_VECTOR_VALUES_GET_L1",err,error,*999)
9653 IF(
ASSOCIATED(distributed_vector))
THEN 9654 IF(distributed_vector%VECTOR_FINISHED)
THEN 9656 SELECT CASE(distributed_vector%LIBRARY_TYPE)
9658 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 9659 IF(
index>0.AND.
index<=distributed_vector%CMISS%DATA_SIZE)
THEN 9660 VALUE=distributed_vector%CMISS%DATA_L(
index)
9663 &
" is invalid. The index must be between 1 and "// &
9665 CALL flagerror(local_error,err,error,*999)
9668 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
9671 CALL flagerror(
"Cannot set values for a logical PETSc distributed vector.",err,error,*999)
9673 local_error=
"The distributed vector library type of "// &
9675 CALL flagerror(local_error,err,error,*999)
9678 local_error=
"The distributed data type of "// &
9680 &
" does not correspond to the logical data type of the given value." 9681 CALL flagerror(local_error,err,error,*999)
9684 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
9687 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
9690 exits(
"DISTRIBUTED_VECTOR_VALUES_GET_L1")
9692 999 errorsexits(
"DISTRIBUTED_VECTOR_VALUES_GET_L1",err,error)
9705 INTEGER(INTG),
INTENT(IN) :: INDICES(:)
9706 INTEGER(INTG),
INTENT(IN) :: VALUES(:)
9707 INTEGER(INTG),
INTENT(OUT) :: ERR
9713 enters(
"DISTRIBUTED_VECTOR_VALUES_SET_INTG",err,error,*999)
9715 IF(
ASSOCIATED(distributed_vector))
THEN 9716 IF(distributed_vector%VECTOR_FINISHED)
THEN 9717 IF(
SIZE(indices,1)==
SIZE(values,1))
THEN 9719 SELECT CASE(distributed_vector%LIBRARY_TYPE)
9721 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 9722 DO i=1,
SIZE(indices,1)
9724 IF(indices(i)>0.AND.indices(i)<=distributed_vector%CMISS%DATA_SIZE)
THEN 9725 distributed_vector%CMISS%DATA_INTG(indices(i))=values(i)
9728 &
" is invalid. The index must be between 1 and "// &
9730 CALL flagerror(local_error,err,error,*999)
9734 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
9737 CALL flagerror(
"Cannot set values for an integer PETSc distributed vector.",err,error,*999)
9739 local_error=
"The distributed vector library type of "// &
9741 CALL flagerror(local_error,err,error,*999)
9744 local_error=
"The distributed data type of "// &
9746 &
" does not correspond to the integer data type of the given values." 9747 CALL flagerror(local_error,err,error,*999)
9750 local_error=
"The size of the indicies array ("//
trim(
numbertovstring(
SIZE(indices,1),
"*",err,error))// &
9751 &
") does not conform to the size of the values array ("//
trim(
numbertovstring(
SIZE(values,1),
"*",err,error))//
")." 9752 CALL flagerror(local_error,err,error,*999)
9755 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
9758 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
9761 exits(
"DISTRIBUTED_VECTOR_VALUES_SET_INTG")
9763 999 errorsexits(
"DISTRIBUTED_VECTOR_VALUES_SET_INTG",err,error)
9776 INTEGER(INTG),
INTENT(IN) :: INDEX
9777 INTEGER(INTG),
INTENT(IN) ::
VALUE 9778 INTEGER(INTG),
INTENT(OUT) :: ERR
9783 enters(
"DISTRIBUTED_VECTOR_VALUES_SET_INTG1",err,error,*999)
9785 IF(
ASSOCIATED(distributed_vector))
THEN 9786 IF(distributed_vector%VECTOR_FINISHED)
THEN 9788 SELECT CASE(distributed_vector%LIBRARY_TYPE)
9790 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 9792 IF(
index>0.AND.
index<=distributed_vector%CMISS%DATA_SIZE)
THEN 9793 distributed_vector%CMISS%DATA_INTG(
index)=
VALUE 9796 &
" is invalid. The index must be between 1 and "// &
9798 CALL flagerror(local_error,err,error,*999)
9801 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
9804 CALL flagerror(
"Cannot set values for an integer PETSc distributed vector.",err,error,*999)
9806 local_error=
"The distributed vector library type of "// &
9808 CALL flagerror(local_error,err,error,*999)
9811 local_error=
"The distributed data type of "// &
9813 &
" does not correspond to the integer data type of the given value." 9814 CALL flagerror(local_error,err,error,*999)
9817 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
9820 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
9823 exits(
"DISTRIBUTED_VECTOR_VALUES_SET_INTG1")
9825 999 errorsexits(
"DISTRIBUTED_VECTOR_VALUES_SET_INTG1",err,error)
9838 INTEGER(INTG),
INTENT(IN) :: INDICES(:)
9839 REAL(SP),
INTENT(IN) :: VALUES(:)
9840 INTEGER(INTG),
INTENT(OUT) :: ERR
9846 enters(
"DISTRIBUTED_VECTOR_VALUES_SET_SP",err,error,*999)
9848 IF(
ASSOCIATED(distributed_vector))
THEN 9849 IF(distributed_vector%VECTOR_FINISHED)
THEN 9850 IF(
SIZE(indices,1)==
SIZE(values,1))
THEN 9852 SELECT CASE(distributed_vector%LIBRARY_TYPE)
9854 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 9855 DO i=1,
SIZE(indices,1)
9857 IF(indices(i)>0.AND.indices(i)<=distributed_vector%CMISS%DATA_SIZE)
THEN 9858 distributed_vector%CMISS%DATA_SP(indices(i))=values(i)
9861 &
" is invalid. The index must be between 1 and "// &
9863 CALL flagerror(local_error,err,error,*999)
9867 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
9870 CALL flagerror(
"Cannot get values for a single precision PETSc distributed vector.",err,error,*999)
9872 local_error=
"The distributed vector library type of "// &
9874 CALL flagerror(local_error,err,error,*999)
9877 local_error=
"The distributed data type of "// &
9879 &
" does not correspond to the single precision data type of the given values." 9880 CALL flagerror(local_error,err,error,*999)
9883 local_error=
"The size of the indices array ("//
trim(
numbertovstring(
SIZE(indices,1),
"*",err,error))// &
9884 &
") does not conform to the size of the values array ("//
trim(
numbertovstring(
SIZE(values,1),
"*",err,error))//
")." 9885 CALL flagerror(local_error,err,error,*999)
9888 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
9891 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
9894 exits(
"DISTRIBUTED_VECTOR_VALUES_SET_SP")
9896 999 errorsexits(
"DISTRIBUTED_VECTOR_VALUES_SET_SP",err,error)
9909 INTEGER(INTG),
INTENT(IN) :: INDEX
9910 REAL(SP),
INTENT(IN) ::
VALUE 9911 INTEGER(INTG),
INTENT(OUT) :: ERR
9916 enters(
"DISTRIBUTED_VECTOR_VALUES_SET_SP1",err,error,*999)
9918 IF(
ASSOCIATED(distributed_vector))
THEN 9919 IF(distributed_vector%VECTOR_FINISHED)
THEN 9921 SELECT CASE(distributed_vector%LIBRARY_TYPE)
9923 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 9925 IF(
index>0.AND.
index<=distributed_vector%CMISS%DATA_SIZE)
THEN 9926 distributed_vector%CMISS%DATA_SP(
index)=
VALUE 9929 &
" is invalid. The index must be between 1 and "// &
9931 CALL flagerror(local_error,err,error,*999)
9934 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
9937 CALL flagerror(
"Cannot set values for a single precision PETSc distributed vector.",err,error,*999)
9939 local_error=
"The distributed vector library type of "// &
9941 CALL flagerror(local_error,err,error,*999)
9944 local_error=
"The distributed data type of "// &
9946 &
" does not correspond to the single precision data type of the given value." 9947 CALL flagerror(local_error,err,error,*999)
9950 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
9953 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
9956 exits(
"DISTRIBUTED_VECTOR_VALUES_SET_SP1")
9958 999 errorsexits(
"DISTRIBUTED_VECTOR_VALUES_SET_SP1",err,error)
9971 INTEGER(INTG),
INTENT(IN) :: INDICES(:)
9972 REAL(DP),
INTENT(IN) :: VALUES(:)
9973 INTEGER(INTG),
INTENT(OUT) :: ERR
9979 enters(
"DISTRIBUTED_VECTOR_VALUES_SET_DP",err,error,*999)
9981 IF(
ASSOCIATED(distributed_vector))
THEN 9982 IF(distributed_vector%VECTOR_FINISHED)
THEN 9983 IF(
SIZE(indices,1)==
SIZE(values,1))
THEN 9985 SELECT CASE(distributed_vector%LIBRARY_TYPE)
9987 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 9988 DO i=1,
SIZE(indices,1)
9990 IF(indices(i)>0.AND.indices(i)<=distributed_vector%CMISS%DATA_SIZE)
THEN 9991 distributed_vector%CMISS%DATA_DP(indices(i))=values(i)
9994 &
" is invalid. The index must be between 1 and "// &
9996 CALL flagerror(local_error,err,error,*999)
10000 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
10003 IF(
ASSOCIATED(distributed_vector%PETSC))
THEN 10004 IF(distributed_vector%PETSC%USE_OVERRIDE_VECTOR)
THEN 10005 CALL petsc_vecsetvalues(distributed_vector%PETSC%OVERRIDE_VECTOR,
SIZE(indices,1),distributed_vector%PETSC% &
10006 & global_numbers(indices),values,petsc_insert_values,err,error,*999)
10008 CALL petsc_vecsetvalues(distributed_vector%PETSC%VECTOR,
SIZE(indices,1),distributed_vector%PETSC%GLOBAL_NUMBERS( &
10009 & indices),values,petsc_insert_values,err,error,*999)
10012 CALL flagerror(
"Distributed vector PETSc is not associated.",err,error,*999)
10015 local_error=
"The distributed vector library type of "// &
10017 CALL flagerror(local_error,err,error,*999)
10020 local_error=
"The distributed data type of "// &
10022 &
" does not correspond to the double precision data type of the given values." 10023 CALL flagerror(local_error,err,error,*999)
10026 local_error=
"The size of the indices array ("//
trim(
numbertovstring(
SIZE(indices,1),
"*",err,error))// &
10027 &
") does not conform to the size of the values array ("//
trim(
numbertovstring(
SIZE(values,1),
"*",err,error))//
")." 10028 CALL flagerror(local_error,err,error,*999)
10031 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
10034 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
10037 exits(
"DISTRIBUTED_VECTOR_VALUES_SET_DP")
10039 999 errorsexits(
"DISTRIBUTED_VECTOR_VALUES_SET_DP",err,error)
10052 INTEGER(INTG),
INTENT(IN) :: INDEX
10053 REAL(DP),
INTENT(IN) ::
VALUE 10054 INTEGER(INTG),
INTENT(OUT) :: ERR
10057 INTEGER(INTG) :: PETSC_INDEX(1)
10058 REAL(DP) :: PETSC_VALUE(1)
10061 enters(
"DISTRIBUTED_VECTOR_VALUES_SET_DP1",err,error,*999)
10063 IF(
ASSOCIATED(distributed_vector))
THEN 10064 IF(distributed_vector%VECTOR_FINISHED)
THEN 10066 SELECT CASE(distributed_vector%LIBRARY_TYPE)
10068 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 10070 IF(
index>0.AND.
index<=distributed_vector%CMISS%DATA_SIZE)
THEN 10071 distributed_vector%CMISS%DATA_DP(
index)=
VALUE 10074 &
" is invalid. The index must be between 1 and "// &
10076 CALL flagerror(local_error,err,error,*999)
10079 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
10082 IF(
ASSOCIATED(distributed_vector%PETSC))
THEN 10083 petsc_index(1)=distributed_vector%PETSC%GLOBAL_NUMBERS(
index)
10084 petsc_value(1)=
VALUE 10085 IF(distributed_vector%PETSC%USE_OVERRIDE_VECTOR)
THEN 10086 CALL petsc_vecsetvalues(distributed_vector%PETSC%OVERRIDE_VECTOR,1,petsc_index,petsc_value,petsc_insert_values, &
10089 CALL petsc_vecsetvalues(distributed_vector%PETSC%VECTOR,1,petsc_index,petsc_value,petsc_insert_values, &
10093 CALL flagerror(
"Distributed vector PETSc is not associated.",err,error,*999)
10096 local_error=
"The distributed vector library type of "// &
10098 CALL flagerror(local_error,err,error,*999)
10101 local_error=
"The distributed data type of "// &
10103 &
" does not correspond to the double precision data type of the given value." 10104 CALL flagerror(local_error,err,error,*999)
10107 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
10110 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
10113 exits(
"DISTRIBUTED_VECTOR_VALUES_SET_DP1")
10115 999 errorsexits(
"DISTRIBUTED_VECTOR_VALUES_SET_DP1",err,error)
10128 INTEGER(INTG),
INTENT(IN) :: INDICES(:)
10129 LOGICAL,
INTENT(IN) :: VALUES(:)
10130 INTEGER(INTG),
INTENT(OUT) :: ERR
10136 enters(
"DISTRIBUTED_VECTOR_VALUES_SET_L",err,error,*999)
10138 IF(
ASSOCIATED(distributed_vector))
THEN 10139 IF(distributed_vector%VECTOR_FINISHED)
THEN 10140 IF(
SIZE(indices,1)==
SIZE(values,1))
THEN 10142 SELECT CASE(distributed_vector%LIBRARY_TYPE)
10144 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 10145 DO i=1,
SIZE(indices,1)
10147 IF(indices(i)>0.AND.indices(i)<=distributed_vector%CMISS%DATA_SIZE)
THEN 10148 distributed_vector%CMISS%DATA_L(indices(i))=values(i)
10151 &
" is invalid. The index must be between 1 and "// &
10153 CALL flagerror(local_error,err,error,*999)
10157 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
10160 CALL flagerror(
"Cannot set values for a logical PETSc distributed vector.",err,error,*999)
10162 local_error=
"The distributed vector library type of "// &
10164 CALL flagerror(local_error,err,error,*999)
10167 local_error=
"The distributed data type of "// &
10169 &
" does not correspond to the logical data type of the given values." 10170 CALL flagerror(local_error,err,error,*999)
10173 local_error=
"The size of the indices array ("//
trim(
numbertovstring(
SIZE(indices,1),
"*",err,error))// &
10174 &
") does not conform to the size of the values array ("//
trim(
numbertovstring(
SIZE(values,1),
"*",err,error))//
")." 10175 CALL flagerror(local_error,err,error,*999)
10178 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
10181 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
10184 exits(
"DISTRIBUTED_VECTOR_VALUES_SET_L")
10186 999 errorsexits(
"DISTRIBUTED_VECTOR_VALUES_SET_L",err,error)
10199 INTEGER(INTG),
INTENT(IN) :: INDEX
10200 LOGICAL,
INTENT(IN) ::
VALUE 10201 INTEGER(INTG),
INTENT(OUT) :: ERR
10206 enters(
"DISTRIBUTED_VECTOR_VALUES_SET_L1",err,error,*999)
10208 IF(
ASSOCIATED(distributed_vector))
THEN 10209 IF(distributed_vector%VECTOR_FINISHED)
THEN 10211 SELECT CASE(distributed_vector%LIBRARY_TYPE)
10213 IF(
ASSOCIATED(distributed_vector%CMISS))
THEN 10215 IF(
index>0.AND.
index<=distributed_vector%CMISS%DATA_SIZE)
THEN 10216 distributed_vector%CMISS%DATA_L(
index)=
VALUE 10219 &
" is invalid. The index must be between 1 and "// &
10221 CALL flagerror(local_error,err,error,*999)
10224 CALL flagerror(
"Distributed vector CMISS is not associated.",err,error,*999)
10227 CALL flagerror(
"Cannot set values for a logical PETSc distributed vector.",err,error,*999)
10229 local_error=
"The distributed vector library type of "// &
10231 CALL flagerror(local_error,err,error,*999)
10234 local_error=
"The distributed data type of "// &
10236 &
" does not correspond to the logical data type of the given value." 10237 CALL flagerror(local_error,err,error,*999)
10240 CALL flagerror(
"The distributed vector has not been finished.",err,error,*999)
10243 CALL flagerror(
"Distributed vector is not associated.",err,error,*999)
10246 exits(
"DISTRIBUTED_VECTOR_VALUES_SET_L1")
10248 999 errorsexits(
"DISTRIBUTED_VECTOR_VALUES_SET_L1",err,error)
subroutine distributedvector_vecdotintg(distributedVectorA, distributedVectorB, dotProduct, err, error,)
Calculates the dot product of 2 distributed integer vectors on this computational node...
subroutine distributed_vector_data_restore_intg(DISTRIBUTED_VECTOR, DATA, ERR, ERROR,)
Restores the integer data pointer returned from DISTRIBUTED_VECTOR_DATA_GET once the data has finishe...
subroutine, public enters(NAME, ERR, ERROR,)
Records the entry into the named procedure and initialises the error code.
subroutine, public distributed_matrix_create_finish(DISTRIBUTED_MATRIX, ERR, ERROR,)
Finishes the creation of a distributed matrix.
subroutine distributed_matrix_all_values_set_sp(DISTRIBUTED_MATRIX, VALUE, ERR, ERROR,)
Sets all values in a single precision distributed matrix to the specified value.
subroutine, public distributed_matrix_library_type_set(DISTRIBUTED_MATRIX, LIBRARY_TYPE, ERR, ERROR,)
Sets/changes the library type for a distributed matrix.
subroutine, public distributed_vector_override_set_on(DISTRIBUTED_VECTOR, OVERRIDE_VECTOR, ERR, ERROR,)
Sets the override vector for a distributed vector.
subroutine distributed_matrix_values_add_sp1(DISTRIBUTED_MATRIX, ROW_INDEX, COLUMN_INDEX, VALUE, ERR, ERROR,)
Adds one value to a distributed single precision matrix.
integer(intg), parameter, public distributed_matrix_vector_include_ghosts_type
Include ghost values in the distributed matrix/vector.
integer(intg), parameter, public matrix_vector_dp_type
Double precision real matrix-vector data type.
subroutine, public petsc_matseqaijgetarrayf90(a, array, err, error,)
Buffer routine to the PETSc MatSeqAIJGetArrayF90 routine.
subroutine distributed_matrix_values_set_l(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Sets values in a distributed logical matrix.
Converts a number to its equivalent varying string representation.
subroutine, public distributed_vector_create_start(DOMAIN_MAPPING, DISTRIBUTED_VECTOR, ERR, ERROR,)
Starts the creation a distributed vector.
This module is a CMISS buffer module to the PETSc library.
subroutine, public petsc_vecsetvalues(x, n, indices, values, insertMode, err, error,)
Buffer routine to the PETSc VecSetValues routine.
subroutine, public distributed_vector_create_finish(DISTRIBUTED_VECTOR, ERR, ERROR,)
Finishes the creation a distributed vector.
subroutine distributed_matrix_values_set_intg(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Sets values in a distributed integer matrix.
subroutine distributed_vector_all_values_set_l(DISTRIBUTED_VECTOR, VALUE, ERR, ERROR,)
Sets all values in a logical distributed_vector to the specified value.
subroutine, public distributed_matrix_data_type_set(DISTRIBUTED_MATRIX, DATA_TYPE, ERR, ERROR,)
Sets/changes the data type of a distributed matrix.
subroutine, public petsc_matinitialise(a, err, error,)
subroutine, public distributed_matrix_duplicate(DISTRIBUTED_MATRIX, NEW_DISTRIBUTED_MATRIX, ERR, ERROR,)
Duplicates the structure of a distributed matrix and returns a pointer to the new matrix in NEW_DISTR...
integer(intg), parameter, public distributed_matrix_row_major_storage_type
Distributed matrix row major storage type.
subroutine distributed_matrix_values_set_dp1(DISTRIBUTED_MATRIX, ROW_INDEX, COLUMN_INDEX, VALUE, ERR, ERROR,)
Sets one value in a distributed double precision matrix.
subroutine, public distributed_matrix_create_start(ROW_DOMAIN_MAPPING, COLUMN_DOMAIN_MAPPING, DISTRIBUTED_MATRIX, ERR, ERROR,)
Starts the creation of a distributed matrix.
integer(intg), parameter, public matrix_compressed_column_storage_type
Matrix compressed column storage type.
subroutine distributedvector_vecdotsp(distributedVectorA, distributedVectorB, dotProduct, err, error,)
Calculates the dot product of 2 distributed single-precision vectors on this computational node...
subroutine distributed_vector_petsc_initialise(DISTRIBUTED_VECTOR, ERR, ERROR,)
Intialises a PETSc distributed vector.
subroutine, public matrix_max_columns_per_row_get(MATRIX, MAX_COLUMNS_PER_ROW, ERR, ERROR,)
Gets the maximum number of columns in each row of a distributed matrix.
subroutine, public matrix_number_non_zeros_set(MATRIX, NUMBER_NON_ZEROS, ERR, ERROR,)
Sets/changes the number of non zeros for a matrix.
integer(intg), parameter, public distributed_matrix_vector_cmiss_type
CMISS distributed matrix-vector library type.
subroutine, public distributed_matrix_storage_type_set(DISTRIBUTED_MATRIX, STORAGE_TYPE, ERR, ERROR,)
Sets/changes the storage type of a distributed matrix.
subroutine distributed_matrix_values_add_dp1(DISTRIBUTED_MATRIX, ROW_INDEX, COLUMN_INDEX, VALUE, ERR, ERROR,)
Adds one value to a distributed double precision matrix.
subroutine distributed_vector_data_get_intg(DISTRIBUTED_VECTOR, DATA, ERR, ERROR,)
Returns a pointer to the data of an integer distributed vector. Note: the values can be used for read...
subroutine distributed_matrix_values_add_sp2(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Adds a matrix of values to a distributed single precision matrix.
subroutine, public matrix_create_finish(MATRIX, ERR, ERROR,)
Finishes the creation a matrix.
subroutine distributed_vector_cmiss_transfer_finalise(CMISS_VECTOR, domain_idx, ERR, ERROR,)
Finalises a CMISS distributed vector transfer information and deallocates all memory.
subroutine distributed_matrix_values_get_intg(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Gets values in a distributed integer matrix.
subroutine, public matrix_data_type_set(MATRIX, DATA_TYPE, ERR, ERROR,)
Sets/changes the data type of a matrix.
subroutine, public petsc_matview(a, viewer, err, error,)
Buffer routine to the PETSc MatView routine.
subroutine distributed_matrix_values_add_l1(DISTRIBUTED_MATRIX, ROW_INDEX, COLUMN_INDEX, VALUE, ERR, ERROR,)
Adds one value to a distributed logical matrix.
subroutine, public matrix_linklist_get(MATRIX, LIST, ERR, ERROR,)
This module contains all string manipulation and transformation routines.
subroutine distributed_matrix_data_get_l(DISTRIBUTED_MATRIX, DATA, ERR, ERROR,)
Returns a pointer to the data of a logical distributed matrix. Note: the values can be used for read ...
subroutine distributed_matrix_all_values_set_l(DISTRIBUTED_MATRIX, VALUE, ERR, ERROR,)
Sets all values in a logical distributed matrix to the specified value.
subroutine, public distributedmatrix_datatypeget(matrix, dataType, err, error,)
Gets the data type of a distributed matrix.
Contains information for a matrix.
subroutine distributed_vector_values_get_sp(DISTRIBUTED_VECTOR, INDICES, VALUES, ERR, ERROR,)
Gets values in a distributed single precision vector.
subroutine matrix_storage_locations_get(MATRIX, ROW_INDICES, COLUMN_INDICES, ERR, ERROR,)
Gets the storage locations (sparsity pattern) of a matrix.
subroutine distributed_matrix_all_values_set_intg(DISTRIBUTED_MATRIX, VALUE, ERR, ERROR,)
Sets all values in an integer distributed matrix to the specified value.
subroutine distributed_matrix_all_values_set_dp(DISTRIBUTED_MATRIX, VALUE, ERR, ERROR,)
Sets all values in a double precision distributed matrix to the specified value.
subroutine distributed_vector_values_set_sp1(DISTRIBUTED_VECTOR, INDEX, VALUE, ERR, ERROR,)
Sets one value in a distributed single precision vector.
subroutine, public distributed_matrix_update_isfinished(DISTRIBUTED_MATRIX, ISFINISHED, ERR, ERROR,)
Tests to see if a distributed matrix update has finised.
subroutine distributed_vector_values_get_intg(DISTRIBUTED_VECTOR, INDICES, VALUES, ERR, ERROR,)
Gets values in a distributed integer vector.
subroutine distributed_vector_cmiss_create_finish(CMISS_VECTOR, ERR, ERROR,)
Finishes the creation of a CMISS distributed vector.
subroutine, public distributed_matrix_form(DISTRIBUTED_MATRIX, ERR, ERROR,)
Forms a distributed matrix by initialising the structure of the matrix to zero.
subroutine distributed_vector_values_get_intg1(DISTRIBUTED_VECTOR, INDEX, VALUE, ERR, ERROR,)
Gets one value in a distributed integer vector.
subroutine distributed_matrix_cmiss_finalise(CMISS_MATRIX, ERR, ERROR,)
Finalise a CMISS distributed matrix.
subroutine distributed_vector_petsc_finalise(PETSC_VECTOR, ERR, ERROR,)
Finalise a PETSc distributed vector.
subroutine distributed_vector_data_restore_sp(DISTRIBUTED_VECTOR, DATA, ERR, ERROR,)
Restores the single precision data pointer returned from DISTRIBUTED_VECTOR_DATA_GET once the data ha...
subroutine distributed_matrix_values_add_sp(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Adds values to a distributed single precision matrix.
subroutine distributed_matrix_values_get_dp2(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Gets a matrix of values in a distributed double precision matrix.
subroutine distributed_vector_all_values_set_intg(DISTRIBUTED_VECTOR, VALUE, ERR, ERROR,)
Sets all values in an integer distributed vector to the specified value.
subroutine distributed_matrix_cmiss_create_finish(CMISS_MATRIX, ERR, ERROR,)
Finishes the creation of a CMISS distributed matrix.
integer(intg), parameter, public matrix_vector_intg_type
Integer matrix-vector data type.
subroutine, public petsc_matdenserestorearrayf90(a, array, err, error,)
Buffer routine to the PETSc MatDenseRestoreArrayF90 routine.
subroutine, public matrix_destroy(MATRIX, ERR, ERROR,)
Destroys a matrix.
subroutine distributed_matrix_values_get_l1(DISTRIBUTED_MATRIX, ROW_INDEX, COLUMN_INDEX, VALUE, ERR, ERROR,)
Gets one value in a distributed logical matrix.
subroutine distributed_matrix_petsc_create_finish(PETSC_MATRIX, ERR, ERROR,)
Finishes the creation of a CMISS distributed matrix.
subroutine, public petsc_vecfinalise(x, err, error,)
This module provides an iso_varying_string module, conformant to the API specified in ISO/IEC 1539-2:...
subroutine distributed_vector_values_set_sp(DISTRIBUTED_VECTOR, INDICES, VALUES, ERR, ERROR,)
Sets values in a distributed single precision vector.
subroutine, public distributed_matrix_number_non_zeros_get(DISTRIBUTED_MATRIX, NUMBER_NON_ZEROS, ERR, ERROR,)
Gets the number of non zeros for a distributed matrix.
Only for integer data type for now.
subroutine distributed_vector_values_get_sp1(DISTRIBUTED_VECTOR, INDEX, VALUE, ERR, ERROR,)
Gets one value in a distributed single precision vector.
Contains information for a CMISS distributed matrix.
subroutine, public matrix_duplicate(MATRIX, NEW_MATRIX, ERR, ERROR,)
Duplicates the matrix and returns a pointer to the duplicated matrix in NEWMATRIX.
subroutine, public distributed_matrix_update_start(DISTRIBUTED_MATRIX, ERR, ERROR,)
Starts the update procedure for a distributed matrix.
subroutine distributed_matrix_values_get_intg1(DISTRIBUTED_MATRIX, ROW_INDEX, COLUMN_INDEX, VALUE, ERR, ERROR,)
Gets one value in a distributed integer matrix.
subroutine distributed_matrix_values_get_sp2(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Gets a matrix of values in a distributed single precision matrix.
subroutine, public distributedvector_l2norm(distributedVector, norm, err, error,)
Calculates the L2 norm of a distributed vector values on this computational node. ...
subroutine distributed_matrix_values_get_l(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Gets values in a distributed logical matrix.
Contains information for a PETSc distributed vector.
subroutine, public petsc_matassemblyend(A, assemblyType, err, error,)
Buffer routine to the PETSc MatAssemblyEnd routine.
subroutine distributed_matrix_values_get_l2(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Gets a matrix of values in a distributed logical matrix.
subroutine distributed_vector_values_add_intg1(DISTRIBUTED_VECTOR, INDEX, VALUE, ERR, ERROR,)
Adds one value to a distributed integer vector.
integer(intg), parameter, public matrix_row_major_storage_type
Matrix row major storage type.
subroutine distributed_vector_values_get_l(DISTRIBUTED_VECTOR, INDICES, VALUES, ERR, ERROR,)
Gets values in a distributed logical vector.
subroutine distributed_matrix_values_set_dp2(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Sets a matrix of values in a distributed double precision matrix.
subroutine distributed_matrix_values_add_intg2(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Adds a matrix of values to a distributed integer matrix.
integer(intg), parameter, public distributed_matrix_column_major_storage_type
Distributed matrix column major storage type.
subroutine, public petsc_matdensegetarrayf90(a, array, err, error,)
Buffer routine to the PETSc MatDenseGetArrayF90 routine.
subroutine, public petsc_matcreateaij(communicator, localM, localN, globalM, globalN, diagNumberNonZerosPerRow, diagNumberNonZerosEachRow, offDiagNumberNonZerosPerRow, offDiagNumberNonZerosEachRow, a, err, error,)
Buffer routine to the PETSc MatCreateAIJ routine.
subroutine distributed_matrix_values_set_intg1(DISTRIBUTED_MATRIX, ROW_INDEX, COLUMN_INDEX, VALUE, ERR, ERROR,)
Sets one value in a distributed integer matrix.
subroutine, public petsc_veccreatempi(communicator, localN, globalN, x, err, error,)
Buffer routine to the PETSc VecCreateMPI routine.
subroutine, public distributed_matrix_max_columns_per_row_get(DISTRIBUTED_MATRIX, MAX_COLUMNS_PER_ROW, ERR, ERROR,)
Gets the maximum number of columns in each row of a distributed matrix.
subroutine distributed_vector_values_add_sp(DISTRIBUTED_VECTOR, INDICES, VALUES, ERR, ERROR,)
Adds values to a distributed single precision vector.
subroutine distributed_vector_values_get_dp(DISTRIBUTED_VECTOR, INDICES, VALUES, ERR, ERROR,)
Gets values in a distributed double precision vector.
subroutine distributed_matrix_petsc_finalise(PETSC_MATRIX, ERR, ERROR,)
Finalise a PETSc distributed matrix.
subroutine, public matrix_create_start(MATRIX, ERR, ERROR,)
Starts the creation a matrix.
subroutine, public distributed_matrix_by_vector_add(ROW_SELECTION_TYPE, ALPHA, DISTRIBUTED_MATRIX, DISTRIBUTED_VECTOR, DISTRIBUTED_PRODUCT, ERR, ERROR,)
Calculates the matrix vector product of a distrubted matrix times a distributed vector and adds it to...
logical, save, public diagnostics3
.TRUE. if level 3 diagnostic output is active in the current routine
subroutine, public distributed_matrix_override_set_on(DISTRIBUTED_MATRIX, OVERRIDE_MATRIX, ERR, ERROR,)
Sets the override matrix for a distributed matrix.
subroutine, public distributed_matrix_storage_locations_set(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, ERR, ERROR,)
Sets the storage locations (sparsity pattern) in a distributed matrix to that specified by the row an...
subroutine, public distributed_matrix_linklist_set(DISTRIBUTED_MATRIX, LIST, ERR, ERROR,)
Sets/changes the LIST STRUCTURE for a distributed matrix.
subroutine distributed_vector_data_get_l(DISTRIBUTED_VECTOR, DATA, ERR, ERROR,)
Returns a pointer to the data of a logical distributed vector. Note: the values can be used for read ...
subroutine, public distributed_vector_ghosting_type_set(DISTRIBUTED_VECTOR, GHOSTING_TYPE, ERR, ERROR,)
Sets/changes the ghosting type for a distributed vector.
subroutine, public distributed_matrix_update_waitfinished(DISTRIBUTED_MATRIX, ERR, ERROR,)
Waits until a distributed matrix update has finised.
subroutine distributed_matrix_data_get_intg(DISTRIBUTED_MATRIX, DATA, ERR, ERROR,)
Returns a pointer to the data of an integer distributed matrix. Note: the values can be used for read...
integer(intg), save distributed_data_id
subroutine, public exits(NAME)
Records the exit out of the named procedure.
subroutine distributed_matrix_values_get_dp1(DISTRIBUTED_MATRIX, ROW_INDEX, COLUMN_INDEX, VALUE, ERR, ERROR,)
Gets one value in a distributed double precision matrix.
subroutine, public matrix_storage_type_get(MATRIX, STORAGE_TYPE, ERR, ERROR,)
Gets the storage type for a matrix.
subroutine, public petsc_vecset(x, VALUE, err, error,)
Buffer routine to the PETSc VecSet routine.
This module contains all type definitions in order to avoid cyclic module references.
subroutine distributed_matrix_values_add_intg(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Adds values to a distributed integer matrix.
This module contains all the low-level base routines e.g., all debug, control, and low-level communic...
subroutine, public petsc_matfinalise(a, err, error,)
integer(intg), parameter, public distributed_matrix_vector_intg_type
Integer distributed matrix-vector data type.
subroutine distributed_vector_values_add_l1(DISTRIBUTED_VECTOR, INDEX, VALUE, ERR, ERROR,)
Adds one value to a distributed logical vector.
subroutine, public distributed_matrix_destroy(DISTRIBUTED_MATRIX, ERR, ERROR,)
Destroys a distributed matrix.
subroutine distributed_vector_finalise(DISTRIBUTED_VECTOR, ERR, ERROR,)
Finalises a distributed vector and deallocates all memory.
subroutine distributed_vector_all_values_set_sp(DISTRIBUTED_VECTOR, VALUE, ERR, ERROR,)
Sets all values in a single precision distributed vector to the specified value.
subroutine distributed_vector_data_restore_l(DISTRIBUTED_VECTOR, DATA, ERR, ERROR,)
Restores the logical data pointer returned from DISTRIBUTED_VECTOR_DATA_GET once the data has finishe...
subroutine distributed_matrix_values_get_intg2(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Gets a matrix of values in a distributed integer matrix.
subroutine distributedvector_vecdotdp(distributedVectorA, distributedVectorB, dotProduct, err, error,)
Calculates the dot product of 2 distributed double-precision vectors on this computational node...
subroutine, public distributedvector_datatypeget(vector, dataType, err, error,)
Gets the data type of a distributed vector.
integer(intg) function, public computational_nodes_number_get(ERR, ERROR)
Returns the number of computational nodes.
subroutine distributed_matrix_values_set_sp2(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Sets a matrix of values in a distributed single precision matrix.
subroutine distributed_matrix_values_set_sp(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Sets values in a distributed single precision matrix.
subroutine, public matrix_size_set(MATRIX, M, N, ERR, ERROR,)
Sets/changes the size of a matrix.
This module contains all computational environment variables.
Contains information for a CMISS distributed vector.
subroutine distributed_vector_data_restore_dp(DISTRIBUTED_VECTOR, DATA, ERR, ERROR,)
Restores the double precision data pointer returned from DISTRIBUTED_VECTOR_DATA_GET once the data ha...
subroutine distributed_vector_data_get_sp(DISTRIBUTED_VECTOR, DATA, ERR, ERROR,)
Returns a pointer to the data of a single precision distributed vector. Note: the values can be used ...
subroutine distributed_vector_initialise(DOMAIN_MAPPING, DISTRIBUTED_VECTOR, ERR, ERROR,)
Initialises a distributed vector.
This module contains CMISS MPI routines.
subroutine, public petsc_vecinitialise(x, err, error,)
integer(intg), parameter, public matrix_vector_sp_type
Single precision real matrix-vector data type.
subroutine, public matrix_linklist_set(MATRIX, LIST, ERR, ERROR,)
Gets the maximum number of columns in each row of a distributed matrix.
subroutine distributed_matrix_data_restore_dp(DISTRIBUTED_MATRIX, DATA, ERR, ERROR,)
Restores the double precision data pointer returned from DISTRIBUTED_MATRIX_DATA_GET once the data ha...
subroutine, public petsc_vecrestorearrayreadf90(x, array, err, error,)
Buffer routine to the PETSc VecRestoreArrayReadF90 routine.
subroutine, public petsc_vecgetvalues(x, n, indices, values, err, error,)
Buffer routine to the PETSc VecGetValues routine.
integer(intg), parameter, public matrix_compressed_row_storage_type
Matrix compressed row storage type.
subroutine distributed_vector_copy_dp(FROM_VECTOR, TO_VECTOR, ALPHA, ERR, ERROR,)
Copies alpha times a double precision distributed vector to another distributed vector.
type(computational_environment_type), target, public computational_environment
The computational environment the program is running in.
integer(intg), parameter, public distributed_matrix_vector_sp_type
Single precision real distributed matrix-vector data type.
subroutine distributed_vector_values_add_sp1(DISTRIBUTED_VECTOR, INDEX, VALUE, ERR, ERROR,)
Adds one value to a distributed single precision vector.
subroutine, public distributed_matrix_storage_type_get(DISTRIBUTED_MATRIX, STORAGE_TYPE, ERR, ERROR,)
Gets the storage type of a distributed matrix.
subroutine, public petsc_matseqaijrestorearrayf90(a, array, err, error,)
Buffer routine to the PETSc MatSeqAIJRestoreArrayF90 routine.
subroutine, public distributed_vector_duplicate(DISTRIBUTED_VECTOR, NEW_DISTRIBUTED_VECTOR, ERR, ERROR,)
Duplicates the structure of a distributed vector and returns a pointer to the new distributed vector ...
Contains the information for a vector that is distributed across a number of domains.
subroutine, public distributed_vector_output(ID, DISTRIBUTED_VECTOR, ERR, ERROR,)
Outputs a distributed vector to the specified output ID.
integer(intg), parameter, public matrix_diagonal_storage_type
Matrix diagonal storage type.
subroutine distributed_vector_values_set_dp(DISTRIBUTED_VECTOR, INDICES, VALUES, ERR, ERROR,)
Sets values in a distributed double precision vector.
subroutine, public distributed_matrix_storage_locations_get(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, ERR, ERROR,)
Gets the storage locations (sparsity pattern) for a distributed matrix.
subroutine, public distributed_vector_override_set_off(DISTRIBUTED_VECTOR, ERR, ERROR,)
Turns off the override vector for a distributed vector.
subroutine distributed_vector_values_set_dp1(DISTRIBUTED_VECTOR, INDEX, VALUE, ERR, ERROR,)
Sets one value in a distributed double precision vector.
subroutine distributed_matrix_values_add_dp2(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Adds a matrix of values to a distributed double precision matrix.
logical, save, public diagnostics1
.TRUE. if level 1 diagnostic output is active in the current routine
subroutine distributed_matrix_petsc_initialise(DISTRIBUTED_MATRIX, ERR, ERROR,)
Intialises a PETSc distributed matrix.
This module handles all distributed matrix vector routines.
subroutine distributed_matrix_values_get_sp1(DISTRIBUTED_MATRIX, ROW_INDEX, COLUMN_INDEX, VALUE, ERR, ERROR,)
Gets one value in a distributed single precision matrix.
subroutine distributed_matrix_finalise(DISTRIBUTED_MATRIX, ERR, ERROR,)
Finalises a distributed matrix and deallocates all memory.
subroutine, public distributed_vector_update_finish(DISTRIBUTED_VECTOR, ERR, ERROR,)
Finishes the (ghost) update procedure for a distributed vector. This routine will wait until all tran...
integer(intg), parameter, public distributed_matrix_compressed_column_storage_type
Distributed matrix compressed column storage type.
subroutine, public distributed_matrix_linklist_get(DISTRIBUTED_MATRIX, LIST, ERR, ERROR,)
Gets the LINKLIST STURUCTURE for a distributed matrix.
subroutine, public petsc_vecscale(x, alpha, err, error,)
Buffer routine to the PETSc VecScale routine.
subroutine, public petsc_matzeroentries(a, err, error,)
Buffer routine to the PETSc MatZeroEntries routine.
subroutine, public petsc_matgetvalues(a, m, mIndices, n, nIndices, values, err, error,)
Buffer routine to the PETSc MatGetValues routine.
subroutine, public distributed_vector_library_type_set(DISTRIBUTED_VECTOR, LIBRARY_TYPE, ERR, ERROR,)
Sets/changes the library type for a distributed vector.
subroutine distributedvector_cmisstransferinitialise(CMISS_VECTOR, domain_idx, ERR, ERROR,)
Initialises a CMISS distributed vector transfer information.
subroutine, public distributed_matrix_output(ID, DISTRIBUTED_MATRIX, ERR, ERROR,)
Outputs a distributed matrix.
subroutine, public distributed_vector_data_type_set(DISTRIBUTED_VECTOR, DATA_TYPE, ERR, ERROR,)
Sets/changes the data type of a distributed vector.
logical, save, public diagnostics5
.TRUE. if level 5 diagnostic output is active in the current routine
subroutine distributed_vector_values_set_l1(DISTRIBUTED_VECTOR, INDEX, VALUE, ERR, ERROR,)
Sets one value in a distributed logical vector.
subroutine distributed_vector_values_set_l(DISTRIBUTED_VECTOR, INDICES, VALUES, ERR, ERROR,)
Sets values in a distributed logical vector.
This module contains all routines dealing with (non-distributed) matrix and vectors types...
integer(intg), parameter, public distributed_matrix_block_storage_type
Distributed matrix block storage type.
subroutine distributed_vector_all_values_set_dp(DISTRIBUTED_VECTOR, VALUE, ERR, ERROR,)
Sets all values in a double precision distributed vector to the specified value.
subroutine, public distributed_vector_destroy(DISTRIBUTED_VECTOR, ERR, ERROR,)
Destroys a distributed vector.
subroutine distributed_vector_values_set_intg1(DISTRIBUTED_VECTOR, INDEX, VALUE, ERR, ERROR,)
Sets one value in a distributed integer vector.
subroutine, public petsc_vecassemblyend(x, err, error,)
Buffer routine to the PETSc VecAssemblyEnd routine.
subroutine distributed_matrix_values_add_l(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Adds values to a distributed logical matrix.
integer(intg), parameter, public matrix_row_column_storage_type
Matrix row-column storage type.
subroutine, public petsc_matcreatedense(communicator, localM, localN, globalM, globalN, matrixData, a, err, error,)
Buffer routine to the PETSc MatCreateDense routine.
subroutine, public petsc_matsetvalue(a, row, col, value, insertMode, err, error,)
Buffer routine to the PETSc MatSetValue routine.
integer(intg), parameter, public diagnostic_output_type
Diagnostic output type.
subroutine, public distributed_vector_update_waitfinished(DISTRIBUTED_VECTOR, ERR, ERROR,)
Waits until a distributed vector update has finised.
subroutine, public matrix_storage_type_set(MATRIX, STORAGE_TYPE, ERR, ERROR,)
Sets/changes the storage type for a matrix.
subroutine, public distributed_vector_update_start(DISTRIBUTED_VECTOR, ERR, ERROR,)
Starts the (ghost) update procedure for a distributed vector.
subroutine, public petsc_veccopy(x, y, err, error,)
Buffer routine to the PETSc VecCopy routine.
subroutine, public matrix_storage_locations_set(MATRIX, ROW_INDICES, COLUMN_INDICES, ERR, ERROR,)
Sets the storage locations (sparsity pattern) in a matrix to that specified by the row and column ind...
subroutine, public distributed_vector_update_isfinished(DISTRIBUTED_VECTOR, ISFINISHED, ERR, ERROR,)
Tests to see if a distributed vector update has finised!
Contains information on the domain mappings (i.e., local and global numberings).
subroutine, public petsc_vecassemblybegin(x, err, error,)
Buffer routine to the PETSc VecAssemblyBegin routine.
subroutine, public petsc_vecdot(x, y, dotProduct, err, error,)
Buffer routine to the PETSc VecDot routine.
subroutine distributed_vector_copy_l(FROM_VECTOR, TO_VECTOR, ALPHA, ERR, ERROR,)
Copies alpha times a logical distributed vector to another distributed vector.
Contains information for a PETSc distributed matrix.
integer(intg), parameter, public matrix_vector_l_type
Logical matrix-vector data type.
subroutine distributed_matrix_initialise(ROW_DOMAIN_MAPPING, COLUMN_DOMAIN_MAPPING, DISTRIBUTED_MATRIX, ERR, ERROR,)
Intialises a distributed matrix.
subroutine distributed_matrix_data_restore_l(DISTRIBUTED_MATRIX, DATA, ERR, ERROR,)
Restores the logical data pointer returned from DISTRIBUTED_MATRIX_DATA_GET once the data has finishe...
subroutine distributed_vector_values_add_intg(DISTRIBUTED_VECTOR, INDICES, VALUES, ERR, ERROR,)
Adds values to a distributed integer vector.
subroutine distributed_matrix_data_get_sp(DISTRIBUTED_MATRIX, DATA, ERR, ERROR,)
Returns a pointer to the data of a single precision distributed matrix. Note: the values can be used ...
subroutine distributed_matrix_data_restore_sp(DISTRIBUTED_MATRIX, DATA, ERR, ERROR,)
Restores the single precision data pointer returned from DISTRIBUTED_MATRIX_DATA_GET once the data ha...
subroutine distributed_matrix_data_restore_intg(DISTRIBUTED_MATRIX, DATA, ERR, ERROR,)
Restores the integer data pointer returned from DISTRIBUTED_MATRIX_DATA_GET once the data has finishe...
subroutine distributed_matrix_cmiss_initialise(DISTRIBUTED_MATRIX, ERR, ERROR,)
Intialises a CMISS distributed matrix.
subroutine, public petsc_matsetoption(a, option, flag, err, error,)
Buffer routine to the PETSc MatSetOption routine.
integer(intg), parameter, public distributed_matrix_vector_dp_type
Double precision real distributed matrix-vector data type.
subroutine distributed_vector_data_get_dp(DISTRIBUTED_VECTOR, DATA, ERR, ERROR,)
Returns a pointer to the data of a double precision distributed vector. Note: the values can be used ...
Contains the information for a matrix that is distributed across a number of domains.
subroutine distributed_matrix_values_get_dp(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Gets values in a distributed double precision matrix.
subroutine distributed_matrix_values_get_sp(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Gets values in a distributed single precision matrix.
subroutine distributed_vector_cmiss_finalise(CMISS_VECTOR, ERR, ERROR,)
Finalise a CMISS distributed vector.
subroutine, public matrix_output(ID, MATRIX, ERR, ERROR,)
Sets/changes the size of a matrix.
subroutine distributed_vector_values_get_l1(DISTRIBUTED_VECTOR, INDEX, VALUE, ERR, ERROR,)
Gets one value in a distributed logical vector.
subroutine distributed_matrix_values_add_l2(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Adds a matrix of values to a distributed logical matrix.
subroutine, public petsc_matassemblybegin(A, assemblyType, err, error,)
Buffer routine to the PETSc MatAssemblyBegin routine.
integer(intg), parameter, public matrix_block_storage_type
Matrix block storage type.
subroutine, public petsc_matsetvalues(a, m, mIndices, n, nIndices, values, insertMode, err, error,)
Buffer routine to the PETSc MatSetValues routine.
integer(intg), parameter, public distributed_matrix_vector_petsc_type
PETSc distributed matrix-vector library type.
subroutine distributed_vector_values_get_dp1(DISTRIBUTED_VECTOR, INDEX, VALUE, ERR, ERROR,)
Gets one value in a distributed double precision vector.
subroutine distributed_vector_values_set_intg(DISTRIBUTED_VECTOR, INDICES, VALUES, ERR, ERROR,)
Sets values in a distributed integer vector.
Flags an error condition.
subroutine distributed_vector_values_add_dp(DISTRIBUTED_VECTOR, INDICES, VALUES, ERR, ERROR,)
Adds values to a distributed double precision vector.
subroutine distributed_matrix_values_set_sp1(DISTRIBUTED_MATRIX, ROW_INDEX, COLUMN_INDEX, VALUE, ERR, ERROR,)
Sets one value in a distributed single precision matrix.
subroutine distributed_matrix_values_set_intg2(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Sets a matrix of values in a distributed integer matrix.
subroutine, public distributedmatrix_dimensionsget(distributedMatrix, m, n, err, error,)
Gets the dimensions of a matrix on this computational node.
integer(intg), parameter, public distributed_matrix_row_column_storage_type
Distributed matrix row-column storage type.
subroutine, public distributed_matrix_update_finish(DISTRIBUTED_MATRIX, ERR, ERROR,)
Finishes the update procedure for a distributed matrix. This routine will wait until all transfers ha...
subroutine, public distributed_matrix_override_set_off(DISTRIBUTED_MATRIX, ERR, ERROR,)
Turns off the override matrix for a distributed matrix.
subroutine distributed_vector_copy_sp(FROM_VECTOR, TO_VECTOR, ALPHA, ERR, ERROR,)
Copies alpha times a single precision distributed vector to another distributed vector.
subroutine, public distributed_matrix_number_non_zeros_set(DISTRIBUTED_MATRIX, NUMBER_NON_ZEROS, ERR, ERROR,)
Sets/changes the number of non zeros for a distributed matrix.
subroutine distributed_matrix_data_get_dp(DISTRIBUTED_MATRIX, DATA, ERR, ERROR,)
Returns a pointer to the data of a double precision distributed matrix. Note: the values can be used ...
subroutine, public petsc_vecgetarrayreadf90(x, array, err, error,)
Buffer routine to the PETSc VecGetArrayReadF90 routine.
integer(intg), parameter, public matrix_column_major_storage_type
Matrix column major storage type.
Flags an error condition.
subroutine, public distributed_matrix_ghosting_type_set(DISTRIBUTED_MATRIX, GHOSTING_TYPE, ERR, ERROR,)
Sets/changes the ghosting type for a distributed matrix.
integer(intg), parameter, public distributed_matrix_vector_no_ghosts_type
Do not include ghost values/rows in the distributed matrix/vector.
subroutine, public matrix_number_non_zeros_get(MATRIX, NUMBER_NON_ZEROS, ERR, ERROR,)
Gets the number of non zeros for a matrix.
subroutine distributed_matrix_values_set_l1(DISTRIBUTED_MATRIX, ROW_INDEX, COLUMN_INDEX, VALUE, ERR, ERROR,)
Sets one value in a distributed logical matrix.
subroutine distributed_vector_copy_intg(FROM_VECTOR, TO_VECTOR, ALPHA, ERR, ERROR,)
Copies alpha times an integer distributed vector to another distributed vector.
subroutine distributed_matrix_values_set_dp(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Sets values in a distributed double precision matrix.
integer(intg) function, public computational_node_number_get(ERR, ERROR)
Returns the number/rank of the computational nodes.
subroutine distributed_vector_values_add_l(DISTRIBUTED_VECTOR, INDICES, VALUES, ERR, ERROR,)
Adds values to a distributed logical vector.
subroutine distributed_matrix_values_add_intg1(DISTRIBUTED_MATRIX, ROW_INDEX, COLUMN_INDEX, VALUE, ERR, ERROR,)
Adds one value to a distributed integer matrix.
subroutine distributed_vector_values_add_dp1(DISTRIBUTED_VECTOR, INDEX, VALUE, ERR, ERROR,)
Adds one value to a distributed double precision vector.
integer(intg), parameter, public distributed_matrix_diagonal_storage_type
Distributed matrix diagonal storage type.
subroutine distributed_vector_cmiss_initialise(DISTRIBUTED_VECTOR, ERR, ERROR,)
Intialises a CMISS distributed vector.
subroutine distributed_matrix_values_add_dp(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Adds values to a distributed double precision matrix.
This module contains all kind definitions.
subroutine distributed_matrix_values_set_l2(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Sets a matrix of values in a distributed logical matrix.
subroutine distributed_vector_petsc_create_finish(PETSC_VECTOR, ERR, ERROR,)
Finishes the creation of a PETSc distributed vector.
integer(intg), parameter, public distributed_matrix_vector_l_type
Logical distributed matrix-vector data type.
integer(intg), parameter, public distributed_matrix_compressed_row_storage_type
Distributed matrix compressed row storage type.
subroutine, public mpi_error_check(ROUTINE, MPI_ERR_CODE, ERR, ERROR,)
Checks to see if an MPI error has occured during an MPI call and flags a CMISS error it if it has...