OpenCMISS-Iron Internal API Documentation
distributed_matrix_vector.f90
Go to the documentation of this file.
1 
43 
46 
47  USE base_routines
48  USE cmiss_mpi
49  USE cmisspetsc
51  USE input_output
53  USE iso_c_binding
54  USE kinds
55  USE matrix_vector
56 #ifndef NOMPIMOD
57  USE mpi
58 #endif
59  USE strings
60  USE types
62 
63 #include "macros.h"
64 
65  IMPLICIT NONE
66 
67  PRIVATE
68 
69 #ifdef NOMPIMOD
70 #include "mpif.h"
71 #endif
72 #include "petscversion.h"
73 
74  !Module parameters
75 
80  INTEGER(INTG), PARAMETER :: distributed_matrix_vector_cmiss_type=library_cmiss_type
81  INTEGER(INTG), PARAMETER :: distributed_matrix_vector_petsc_type=library_petsc_type
83 
93 
105 
110  INTEGER(INTG), PARAMETER :: distributed_matrix_vector_include_ghosts_type=1
111  INTEGER(INTG), PARAMETER :: distributed_matrix_vector_no_ghosts_type=2
113 
114  !Module types
115 
116  !Module variables
117 
118  INTEGER(INTG), SAVE :: distributed_data_id=100000000
119 
120  !Interfaces
121 
124  MODULE PROCEDURE distributed_matrix_all_values_set_sp
125  MODULE PROCEDURE distributed_matrix_all_values_set_dp
126  MODULE PROCEDURE distributed_matrix_all_values_set_l
128 
131  MODULE PROCEDURE distributed_matrix_all_values_set_sp
132  MODULE PROCEDURE distributed_matrix_all_values_set_dp
133  MODULE PROCEDURE distributed_matrix_all_values_set_l
134  END INTERFACE distributedmatrix_allvaluesset
135 
137  MODULE PROCEDURE distributed_matrix_create_finish
138  END INTERFACE distributedmatrix_createfinish
139 
141  MODULE PROCEDURE distributed_matrix_create_start
142  END INTERFACE distributedmatrix_createstart
143 
145  MODULE PROCEDURE distributed_matrix_data_get_intg
146  MODULE PROCEDURE distributed_matrix_data_get_sp
147  MODULE PROCEDURE distributed_matrix_data_get_dp
148  MODULE PROCEDURE distributed_matrix_data_get_l
149  END INTERFACE distributed_matrix_data_get
150 
152  MODULE PROCEDURE distributed_matrix_data_get_intg
153  MODULE PROCEDURE distributed_matrix_data_get_sp
154  MODULE PROCEDURE distributed_matrix_data_get_dp
155  MODULE PROCEDURE distributed_matrix_data_get_l
156  END INTERFACE distributedmatrix_dataget
157 
159  MODULE PROCEDURE distributed_matrix_data_restore_intg
160  MODULE PROCEDURE distributed_matrix_data_restore_sp
161  MODULE PROCEDURE distributed_matrix_data_restore_dp
162  MODULE PROCEDURE distributed_matrix_data_restore_l
163  END INTERFACE distributed_matrix_data_restore
164 
166  MODULE PROCEDURE distributed_matrix_data_restore_intg
167  MODULE PROCEDURE distributed_matrix_data_restore_sp
168  MODULE PROCEDURE distributed_matrix_data_restore_dp
169  MODULE PROCEDURE distributed_matrix_data_restore_l
170  END INTERFACE distributedmatrix_datarestore
171 
173  MODULE PROCEDURE distributed_matrix_data_type_set
174  END INTERFACE distributedmatrix_datatypeset
175 
177  MODULE PROCEDURE distributed_matrix_destroy
178  END INTERFACE distributedmatrix_destroy
179 
181  MODULE PROCEDURE distributed_matrix_duplicate
182  END INTERFACE distributedmatrix_duplicate
183 
185  MODULE PROCEDURE distributed_matrix_form
186  END INTERFACE distributedmatrix_form
187 
189  MODULE PROCEDURE distributed_matrix_ghosting_type_set
191 
193  MODULE PROCEDURE distributed_matrix_library_type_set
195 
199 
203 
207 
209  MODULE PROCEDURE distributed_matrix_output
210  END INTERFACE distributedmatrix_output
211 
213  MODULE PROCEDURE distributed_matrix_override_set_on
214  END INTERFACE distributedmatrix_overrideseton
215 
217  MODULE PROCEDURE distributed_matrix_override_set_off
219 
223 
227 
229  MODULE PROCEDURE distributed_matrix_storage_type_get
231 
233  MODULE PROCEDURE distributed_matrix_storage_type_set
235 
237  MODULE PROCEDURE distributed_matrix_update_finish
238  END INTERFACE distributedmatrix_updatefinish
239 
241  MODULE PROCEDURE distributed_matrix_update_start
242  END INTERFACE distributedmatrix_updatestart
243 
245  MODULE PROCEDURE distributed_matrix_update_isfinished
247 
251 
253  MODULE PROCEDURE distributed_matrix_values_add_intg
254  MODULE PROCEDURE distributed_matrix_values_add_intg1
255  MODULE PROCEDURE distributed_matrix_values_add_intg2
256  MODULE PROCEDURE distributed_matrix_values_add_sp
257  MODULE PROCEDURE distributed_matrix_values_add_sp1
258  MODULE PROCEDURE distributed_matrix_values_add_sp2
259  MODULE PROCEDURE distributed_matrix_values_add_dp
260  MODULE PROCEDURE distributed_matrix_values_add_dp1
261  MODULE PROCEDURE distributed_matrix_values_add_dp2
262  MODULE PROCEDURE distributed_matrix_values_add_l
263  MODULE PROCEDURE distributed_matrix_values_add_l1
264  MODULE PROCEDURE distributed_matrix_values_add_l2
265  END INTERFACE
266 
268  MODULE PROCEDURE distributed_matrix_values_add_intg
269  MODULE PROCEDURE distributed_matrix_values_add_intg1
270  MODULE PROCEDURE distributed_matrix_values_add_intg2
271  MODULE PROCEDURE distributed_matrix_values_add_sp
272  MODULE PROCEDURE distributed_matrix_values_add_sp1
273  MODULE PROCEDURE distributed_matrix_values_add_sp2
274  MODULE PROCEDURE distributed_matrix_values_add_dp
275  MODULE PROCEDURE distributed_matrix_values_add_dp1
276  MODULE PROCEDURE distributed_matrix_values_add_dp2
277  MODULE PROCEDURE distributed_matrix_values_add_l
278  MODULE PROCEDURE distributed_matrix_values_add_l1
279  MODULE PROCEDURE distributed_matrix_values_add_l2
280  END INTERFACE distributedmatrix_valuesadd
281 
283  MODULE PROCEDURE distributed_matrix_values_get_intg
284  MODULE PROCEDURE distributed_matrix_values_get_intg1
285  MODULE PROCEDURE distributed_matrix_values_get_intg2
286  MODULE PROCEDURE distributed_matrix_values_get_sp
287  MODULE PROCEDURE distributed_matrix_values_get_sp1
288  MODULE PROCEDURE distributed_matrix_values_get_sp2
289  MODULE PROCEDURE distributed_matrix_values_get_dp
290  MODULE PROCEDURE distributed_matrix_values_get_dp1
291  MODULE PROCEDURE distributed_matrix_values_get_dp2
292  MODULE PROCEDURE distributed_matrix_values_get_l
293  MODULE PROCEDURE distributed_matrix_values_get_l1
294  MODULE PROCEDURE distributed_matrix_values_get_l2
295  END INTERFACE distributed_matrix_values_get
296 
298  MODULE PROCEDURE distributed_matrix_values_get_intg
299  MODULE PROCEDURE distributed_matrix_values_get_intg1
300  MODULE PROCEDURE distributed_matrix_values_get_intg2
301  MODULE PROCEDURE distributed_matrix_values_get_sp
302  MODULE PROCEDURE distributed_matrix_values_get_sp1
303  MODULE PROCEDURE distributed_matrix_values_get_sp2
304  MODULE PROCEDURE distributed_matrix_values_get_dp
305  MODULE PROCEDURE distributed_matrix_values_get_dp1
306  MODULE PROCEDURE distributed_matrix_values_get_dp2
307  MODULE PROCEDURE distributed_matrix_values_get_l
308  MODULE PROCEDURE distributed_matrix_values_get_l1
309  MODULE PROCEDURE distributed_matrix_values_get_l2
310  END INTERFACE distributedmatrix_valuesget
311 
313  MODULE PROCEDURE distributed_matrix_values_set_intg
314  MODULE PROCEDURE distributed_matrix_values_set_intg1
315  MODULE PROCEDURE distributed_matrix_values_set_intg2
316  MODULE PROCEDURE distributed_matrix_values_set_sp
317  MODULE PROCEDURE distributed_matrix_values_set_sp1
318  MODULE PROCEDURE distributed_matrix_values_set_sp2
319  MODULE PROCEDURE distributed_matrix_values_set_dp
320  MODULE PROCEDURE distributed_matrix_values_set_dp1
321  MODULE PROCEDURE distributed_matrix_values_set_dp2
322  MODULE PROCEDURE distributed_matrix_values_set_l
323  MODULE PROCEDURE distributed_matrix_values_set_l1
324  MODULE PROCEDURE distributed_matrix_values_set_l2
325  END INTERFACE distributed_matrix_values_set
326 
328  MODULE PROCEDURE distributed_matrix_values_set_intg
329  MODULE PROCEDURE distributed_matrix_values_set_intg1
330  MODULE PROCEDURE distributed_matrix_values_set_intg2
331  MODULE PROCEDURE distributed_matrix_values_set_sp
332  MODULE PROCEDURE distributed_matrix_values_set_sp1
333  MODULE PROCEDURE distributed_matrix_values_set_sp2
334  MODULE PROCEDURE distributed_matrix_values_set_dp
335  MODULE PROCEDURE distributed_matrix_values_set_dp1
336  MODULE PROCEDURE distributed_matrix_values_set_dp2
337  MODULE PROCEDURE distributed_matrix_values_set_l
338  MODULE PROCEDURE distributed_matrix_values_set_l1
339  MODULE PROCEDURE distributed_matrix_values_set_l2
340  END INTERFACE distributedmatrix_valuesset
341 
343  MODULE PROCEDURE distributed_matrix_by_vector_add
345 
348  MODULE PROCEDURE distributed_vector_all_values_set_sp
349  MODULE PROCEDURE distributed_vector_all_values_set_dp
350  MODULE PROCEDURE distributed_vector_all_values_set_l
352 
355  MODULE PROCEDURE distributed_vector_all_values_set_sp
356  MODULE PROCEDURE distributed_vector_all_values_set_dp
357  MODULE PROCEDURE distributed_vector_all_values_set_l
358  END INTERFACE distributedvector_allvaluesset
359 
361  MODULE PROCEDURE distributed_vector_copy_intg
362  MODULE PROCEDURE distributed_vector_copy_sp
363  MODULE PROCEDURE distributed_vector_copy_dp
364  MODULE PROCEDURE distributed_vector_copy_l
365  END INTERFACE distributed_vector_copy
366 
368  MODULE PROCEDURE distributed_vector_copy_intg
369  MODULE PROCEDURE distributed_vector_copy_sp
370  MODULE PROCEDURE distributed_vector_copy_dp
371  MODULE PROCEDURE distributed_vector_copy_l
372  END INTERFACE distributedvector_copy
373 
375  MODULE PROCEDURE distributed_vector_create_finish
376  END INTERFACE distributedvector_createfinish
377 
379  MODULE PROCEDURE distributed_vector_create_start
380  END INTERFACE distributedvector_createstart
381 
383  MODULE PROCEDURE distributed_vector_data_get_intg
384  MODULE PROCEDURE distributed_vector_data_get_sp
385  MODULE PROCEDURE distributed_vector_data_get_dp
386  MODULE PROCEDURE distributed_vector_data_get_l
387  END INTERFACE distributed_vector_data_get
388 
390  MODULE PROCEDURE distributed_vector_data_get_intg
391  MODULE PROCEDURE distributed_vector_data_get_sp
392  MODULE PROCEDURE distributed_vector_data_get_dp
393  MODULE PROCEDURE distributed_vector_data_get_l
394  END INTERFACE distributedvector_dataget
395 
397  MODULE PROCEDURE distributed_vector_data_restore_intg
398  MODULE PROCEDURE distributed_vector_data_restore_sp
399  MODULE PROCEDURE distributed_vector_data_restore_dp
400  MODULE PROCEDURE distributed_vector_data_restore_l
401  END INTERFACE distributed_vector_data_restore
402 
404  MODULE PROCEDURE distributed_vector_data_restore_intg
405  MODULE PROCEDURE distributed_vector_data_restore_sp
406  MODULE PROCEDURE distributed_vector_data_restore_dp
407  MODULE PROCEDURE distributed_vector_data_restore_l
408  END INTERFACE distributedvector_datarestore
409 
411  MODULE PROCEDURE distributed_vector_destroy
412  END INTERFACE distributedvector_destroy
413 
415  MODULE PROCEDURE distributed_vector_duplicate
416  END INTERFACE distributedvector_duplicate
417 
419  MODULE PROCEDURE distributed_vector_ghosting_type_set
421 
423  MODULE PROCEDURE distributed_vector_library_type_set
425 
427  MODULE PROCEDURE distributed_vector_output
428  END INTERFACE distributedvector_output
429 
431  MODULE PROCEDURE distributed_vector_override_set_on
432  END INTERFACE distributedvector_overrideseton
433 
435  MODULE PROCEDURE distributed_vector_override_set_off
437 
439  MODULE PROCEDURE distributed_vector_update_finish
440  END INTERFACE distributedvector_updatefinish
441 
443  MODULE PROCEDURE distributed_vector_update_start
444  END INTERFACE distributedvector_updatestart
445 
447  MODULE PROCEDURE distributed_vector_update_isfinished
449 
453 
455  MODULE PROCEDURE distributed_vector_values_add_intg
456  MODULE PROCEDURE distributed_vector_values_add_intg1
457  MODULE PROCEDURE distributed_vector_values_add_sp
458  MODULE PROCEDURE distributed_vector_values_add_sp1
459  MODULE PROCEDURE distributed_vector_values_add_dp
460  MODULE PROCEDURE distributed_vector_values_add_dp1
461  MODULE PROCEDURE distributed_vector_values_add_l
462  MODULE PROCEDURE distributed_vector_values_add_l1
463  END INTERFACE distributed_vector_values_add
464 
466  MODULE PROCEDURE distributed_vector_values_add_intg
467  MODULE PROCEDURE distributed_vector_values_add_intg1
468  MODULE PROCEDURE distributed_vector_values_add_sp
469  MODULE PROCEDURE distributed_vector_values_add_sp1
470  MODULE PROCEDURE distributed_vector_values_add_dp
471  MODULE PROCEDURE distributed_vector_values_add_dp1
472  MODULE PROCEDURE distributed_vector_values_add_l
473  MODULE PROCEDURE distributed_vector_values_add_l1
474  END INTERFACE distributedvector_valuesadd
475 
477  MODULE PROCEDURE distributed_vector_values_get_intg
478  MODULE PROCEDURE distributed_vector_values_get_intg1
479  MODULE PROCEDURE distributed_vector_values_get_sp
480  MODULE PROCEDURE distributed_vector_values_get_sp1
481  MODULE PROCEDURE distributed_vector_values_get_dp
482  MODULE PROCEDURE distributed_vector_values_get_dp1
483  MODULE PROCEDURE distributed_vector_values_get_l
484  MODULE PROCEDURE distributed_vector_values_get_l1
485  END INTERFACE distributed_vector_values_get
486 
488  MODULE PROCEDURE distributed_vector_values_get_intg
489  MODULE PROCEDURE distributed_vector_values_get_intg1
490  MODULE PROCEDURE distributed_vector_values_get_sp
491  MODULE PROCEDURE distributed_vector_values_get_sp1
492  MODULE PROCEDURE distributed_vector_values_get_dp
493  MODULE PROCEDURE distributed_vector_values_get_dp1
494  MODULE PROCEDURE distributed_vector_values_get_l
495  MODULE PROCEDURE distributed_vector_values_get_l1
496  END INTERFACE distributedvector_valuesget
497 
499  MODULE PROCEDURE distributed_vector_values_set_intg
500  MODULE PROCEDURE distributed_vector_values_set_intg1
501  MODULE PROCEDURE distributed_vector_values_set_sp
502  MODULE PROCEDURE distributed_vector_values_set_sp1
503  MODULE PROCEDURE distributed_vector_values_set_dp
504  MODULE PROCEDURE distributed_vector_values_set_dp1
505  MODULE PROCEDURE distributed_vector_values_set_l
506  MODULE PROCEDURE distributed_vector_values_set_l1
507  END INTERFACE distributed_vector_values_set
508 
510  MODULE PROCEDURE distributed_vector_values_set_intg
511  MODULE PROCEDURE distributed_vector_values_set_intg1
512  MODULE PROCEDURE distributed_vector_values_set_sp
513  MODULE PROCEDURE distributed_vector_values_set_sp1
514  MODULE PROCEDURE distributed_vector_values_set_dp
515  MODULE PROCEDURE distributed_vector_values_set_dp1
516  MODULE PROCEDURE distributed_vector_values_set_l
517  MODULE PROCEDURE distributed_vector_values_set_l1
518  END INTERFACE distributedvector_valuesset
519 
521  MODULE PROCEDURE distributedvector_vecdotintg
522  MODULE PROCEDURE distributedvector_vecdotsp
523  MODULE PROCEDURE distributedvector_vecdotdp
524  END INTERFACE distributedvector_vecdot
525 
527 
530 
535 
537 
539 
541 
543 
545 
547 
549 
551 
553 
555 
557 
559 
561 
563 
565 
567 
569 
571 
573 
575 
577 
579 
581 
583 
585 
587 
589 
591 
593 
595 
597 
599 
601 
603 
605 
607 
609 
611 
613 
615 
617 
619 
621 
623 
625 
627 
629 
631 
633 
635 
637 
639 
641 
643 
645 
647 
649 
651 
653 
655 
657 
659 
661 
663 
665 
667 
669 
671 
673 
675 
677 
679 
681 
683 
685 
686 CONTAINS
687 
688  !
689  !================================================================================================================================
690  !
691 
693  SUBROUTINE distributed_matrix_all_values_set_intg(DISTRIBUTED_MATRIX,VALUE,ERR,ERROR,*)
695  !Argument variables
696  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
697  INTEGER(INTG), INTENT(IN) :: VALUE
698  INTEGER(INTG), INTENT(OUT) :: ERR
699  TYPE(varying_string), INTENT(OUT) :: ERROR
700  !Local variables
701  TYPE(varying_string) :: LOCAL_ERROR
702 
703  enters("DISTRIBUTED_MATRIX_ALL_VALUES_SET_INTG",err,error,*999)
704 
705  IF(ASSOCIATED(distributed_matrix)) THEN
706  IF(distributed_matrix%MATRIX_FINISHED) THEN
707  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
709  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
710  CALL matrix_all_values_set(distributed_matrix%CMISS%MATRIX,VALUE,err,error,*999)
711  ELSE
712  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
713  ENDIF
715  CALL flagerror("Cannot set all values for an integer PETSc distributed matrix.",err,error,*999)
716  CASE DEFAULT
717  local_error="The distributed matrix library type of "// &
718  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
719  CALL flagerror(local_error,err,error,*999)
720  END SELECT
721  ELSE
722  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
723  ENDIF
724  ELSE
725  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
726  ENDIF
727 
728  exits("DISTRIBUTED_MATRIX_ALL_VALUES_SET_INTG")
729  RETURN
730 999 errorsexits("DISTRIBUTED_MATRIX_ALL_VALUES_SET_INTG",err,error)
731  RETURN 1
733 
734  !
735  !================================================================================================================================
736  !
737 
739  SUBROUTINE distributed_matrix_all_values_set_sp(DISTRIBUTED_MATRIX,VALUE,ERR,ERROR,*)
741  !Argument variables
742  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
743  REAL(SP), INTENT(IN) :: VALUE
744  INTEGER(INTG), INTENT(OUT) :: ERR
745  TYPE(varying_string), INTENT(OUT) :: ERROR
746  !Local variables
747  TYPE(varying_string) :: LOCAL_ERROR
748 
749  enters("DISTRIBUTED_MATRIX_ALL_VALUES_SET_SP",err,error,*999)
750 
751  IF(ASSOCIATED(distributed_matrix)) THEN
752  IF(distributed_matrix%MATRIX_FINISHED) THEN
753  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
755  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
756  CALL matrix_all_values_set(distributed_matrix%CMISS%MATRIX,VALUE,err,error,*999)
757  ELSE
758  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
759  ENDIF
761  CALL flagerror("Cannot set all values for a single precision PETSc distributed matrix.",err,error,*999)
762  CASE DEFAULT
763  local_error="The distributed matrix library type of "// &
764  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
765  CALL flagerror(local_error,err,error,*999)
766  END SELECT
767  ELSE
768  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
769  ENDIF
770  ELSE
771  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
772  ENDIF
773 
774  exits("DISTRIBUTED_MATRIX_ALL_VALUES_SET_SP")
775  RETURN
776 999 errorsexits("DISTRIBUTED_MATRIX_ALL_VALUES_SET_SP",err,error)
777  RETURN 1
779 
780  !
781  !================================================================================================================================
782  !
783 
785  SUBROUTINE distributed_matrix_all_values_set_dp(DISTRIBUTED_MATRIX,VALUE,ERR,ERROR,*)
787  !Argument variables
788  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
789  REAL(DP), INTENT(IN) :: VALUE
790  INTEGER(INTG), INTENT(OUT) :: ERR
791  TYPE(varying_string), INTENT(OUT) :: ERROR
792  !Local variables
793  TYPE(varying_string) :: LOCAL_ERROR
794 
795  enters("DISTRIBUTED_MATRIX_ALL_VALUES_SET_DP",err,error,*999)
796 
797  IF(ASSOCIATED(distributed_matrix)) THEN
798  IF(distributed_matrix%MATRIX_FINISHED) THEN
799  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
801  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
802  CALL matrix_all_values_set(distributed_matrix%CMISS%MATRIX,VALUE,err,error,*999)
803  ELSE
804  CALL flagerror("Distributed matrix cmiss is not associated.",err,error,*999)
805  ENDIF
807  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
808  IF(abs(VALUE)<=zero_tolerance) THEN
809  IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX) THEN
810  CALL petsc_matzeroentries(distributed_matrix%PETSC%OVERRIDE_MATRIX,err,error,*999)
811  ELSE
812  CALL petsc_matzeroentries(distributed_matrix%PETSC%MATRIX,err,error,*999)
813  ENDIF
814  ELSE
815  CALL flagerror("Not implemented.",err,error,*999)
816  ENDIF
817  ELSE
818  CALL flagerror("Distributed matrix petsc is not associated.",err,error,*999)
819  ENDIF
820  CASE DEFAULT
821  local_error="The distributed matrix library type of "// &
822  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
823  CALL flagerror(local_error,err,error,*999)
824  END SELECT
825  ELSE
826  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
827  ENDIF
828  ELSE
829  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
830  ENDIF
831 
832  exits("DISTRIBUTED_MATRIX_ALL_VALUES_SET_DP")
833  RETURN
834 999 errorsexits("DISTRIBUTED_MATRIX_ALL_VALUES_SET_DP",err,error)
835  RETURN 1
837 
838  !
839  !================================================================================================================================
840  !
841 
843  SUBROUTINE distributed_matrix_all_values_set_l(DISTRIBUTED_MATRIX,VALUE,ERR,ERROR,*)
845  !Argument variables
846  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
847  LOGICAL, INTENT(IN) :: VALUE
848  INTEGER(INTG), INTENT(OUT) :: ERR
849  TYPE(varying_string), INTENT(OUT) :: ERROR
850  !Local variables
851  TYPE(varying_string) :: LOCAL_ERROR
852 
853  enters("DISTRIBUTED_MATRIX_ALL_VALUES_SET_L",err,error,*999)
854 
855  IF(ASSOCIATED(distributed_matrix)) THEN
856  IF(distributed_matrix%MATRIX_FINISHED) THEN
857  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
859  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
860  CALL matrix_all_values_set(distributed_matrix%CMISS%MATRIX,VALUE,err,error,*999)
861  ELSE
862  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
863  ENDIF
865  CALL flagerror("Cannot set all values for a logical PETSc distributed matrix.",err,error,*999)
866  CASE DEFAULT
867  local_error="The distributed matrix library type of "// &
868  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
869  CALL flagerror(local_error,err,error,*999)
870  END SELECT
871  ELSE
872  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
873  ENDIF
874  ELSE
875  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
876  ENDIF
877 
878  exits("DISTRIBUTED_MATRIX_ALL_VALUES_SET_L")
879  RETURN
880 999 errorsexits("DISTRIBUTED_MATRIX_ALL_VALUES_SET_L",err,error)
881  RETURN 1
883 
884  !
885  !================================================================================================================================
886  !
887 
889  SUBROUTINE distributed_matrix_cmiss_create_finish(CMISS_MATRIX,ERR,ERROR,*)
891  !Argument variables
892  TYPE(distributed_matrix_cmiss_type), POINTER :: CMISS_MATRIX
893  INTEGER(INTG), INTENT(OUT) :: ERR
894  TYPE(varying_string), INTENT(OUT) :: ERROR
895  !Local Variables
896  TYPE(domain_mapping_type), POINTER :: DOMAIN_MAPPING
897  INTEGER(INTG) :: DUMMY_ERR
898  TYPE(varying_string) :: DUMMY_ERROR
899 
900  enters("DISTRIBUTED_MATRIX_CMISS_CREATE_FINISH",err,error,*998)
901 
902  IF(ASSOCIATED(cmiss_matrix)) THEN
903  cmiss_matrix%BASE_TAG_NUMBER=distributed_data_id
904  domain_mapping=>cmiss_matrix%DISTRIBUTED_MATRIX%ROW_DOMAIN_MAPPING
905  IF(ASSOCIATED(domain_mapping)) THEN
906  IF(domain_mapping%NUMBER_OF_DOMAINS==1) THEN
908  ELSE
910  & domain_mapping%ADJACENT_DOMAINS_PTR(domain_mapping%NUMBER_OF_DOMAINS)
911  END IF
912  CALL matrix_create_finish(cmiss_matrix%MATRIX,err,error,*999)
913  ELSE
914  CALL flagerror("Distributed matrix row domain mapping is not associated.",err,error,*998)
915  ENDIF
916  ELSE
917  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*998)
918  ENDIF
919 
920  exits("DISTRIBUTED_MATRIX_CMISS_CREATE_FINISH")
921  RETURN
922 999 CALL distributed_matrix_cmiss_finalise(cmiss_matrix,dummy_err,dummy_error,*998)
923 998 errorsexits("DISTRIBUTED_MATRIX_CMISS_CREATE_FINISH",err,error)
924  RETURN 1
926 
927  !
928  !================================================================================================================================
929  !
930 
932  SUBROUTINE distributed_matrix_cmiss_finalise(CMISS_MATRIX,ERR,ERROR,*)
934  !Argument variables
935  TYPE(distributed_matrix_cmiss_type), POINTER :: CMISS_MATRIX
936  INTEGER(INTG), INTENT(OUT) :: ERR
937  TYPE(varying_string), INTENT(OUT) :: ERROR
938  !Local Variables
939 
940  enters("DISTRIBUTED_MATRIX_CMISS_FINALISE",err,error,*999)
941 
942  IF(ASSOCIATED(cmiss_matrix)) THEN
943  CALL matrix_destroy(cmiss_matrix%MATRIX,err,error,*999)
944  DEALLOCATE(cmiss_matrix)
945  ENDIF
946 
947  exits("DISTRIBUTED_MATRIX_CMISS_FINALSE")
948  RETURN
949 999 errorsexits("DISTRIBUTED_MATRIX_CMISS_FINALISE",err,error)
950  RETURN 1
951  END SUBROUTINE distributed_matrix_cmiss_finalise
952 
953  !
954  !================================================================================================================================
955  !
956 
958  SUBROUTINE distributed_matrix_cmiss_initialise(DISTRIBUTED_MATRIX,ERR,ERROR,*)
960  !Argument variables
961  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
962  INTEGER(INTG), INTENT(OUT) :: ERR
963  TYPE(varying_string), INTENT(OUT) :: ERROR
964  !Local Variables
965  INTEGER(INTG) :: DUMMY_ERR
966  TYPE(domain_mapping_type), POINTER :: ROW_DOMAIN_MAPPING,COLUMN_DOMAIN_MAPPING
967  TYPE(varying_string) :: DUMMY_ERROR,LOCAL_ERROR
968 
969  enters("DISTRIBUTED_MATRIX_CMISS_INITIALISE",err,error,*998)
970 
971  IF(ASSOCIATED(distributed_matrix)) THEN
972  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
973  CALL flagerror("CMISS is already associated for this distributed matrix.",err,error,*998)
974  ELSE
975  row_domain_mapping=>distributed_matrix%ROW_DOMAIN_MAPPING
976  column_domain_mapping=>distributed_matrix%COLUMN_DOMAIN_MAPPING
977  IF(ASSOCIATED(row_domain_mapping)) THEN
978  IF(ASSOCIATED(column_domain_mapping)) THEN
979  ALLOCATE(distributed_matrix%CMISS,stat=err)
980  IF(err/=0) CALL flagerror("Could not allocate CMISS distributed matrix.",err,error,*999)
981  distributed_matrix%CMISS%DISTRIBUTED_MATRIX=>distributed_matrix
982  distributed_matrix%LIBRARY_TYPE=distributed_matrix_vector_cmiss_type
983  NULLIFY(distributed_matrix%CMISS%MATRIX)
984  !Set the defaults
985  CALL matrix_create_start(distributed_matrix%CMISS%MATRIX,err,error,*999)
986  CALL matrix_data_type_set(distributed_matrix%CMISS%MATRIX,matrix_vector_dp_type,err,error,*999)
987  CALL matrix_storage_type_set(distributed_matrix%CMISS%MATRIX,matrix_block_storage_type,err,error,*999)
988  SELECT CASE(distributed_matrix%GHOSTING_TYPE)
990  CALL matrix_size_set(distributed_matrix%CMISS%MATRIX,row_domain_mapping%TOTAL_NUMBER_OF_LOCAL, &
991  & column_domain_mapping%NUMBER_OF_GLOBAL,err,error,*999)
993  CALL matrix_size_set(distributed_matrix%CMISS%MATRIX,row_domain_mapping%NUMBER_OF_LOCAL, &
994  & column_domain_mapping%NUMBER_OF_GLOBAL,err,error,*999)
995  CASE DEFAULT
996  local_error="The distributed matrix ghosting type of "// &
997  & trim(numbertovstring(distributed_matrix%GHOSTING_TYPE,"*",err,error))//" is invalid."
998  CALL flagerror(local_error,err,error,*999)
999  END SELECT
1000  ELSE
1001  CALL flagerror("Distributed matrix column domain mapping is not associated.",err,error,*998)
1002  ENDIF
1003  ELSE
1004  CALL flagerror("Distributed matrix row domain mapping is not associated.",err,error,*998)
1005  ENDIF
1006  ENDIF
1007  ELSE
1008  CALL flagerror("Distributed matrix is not associated.",err,error,*998)
1009  ENDIF
1010 
1011  exits("DISTRIBUTED_MATRIX_CMISS_INITIALSE")
1012  RETURN
1013 999 IF(ASSOCIATED(distributed_matrix%CMISS)) &
1014  & CALL distributed_matrix_cmiss_finalise(distributed_matrix%CMISS,dummy_err,dummy_error,*999)
1015 998 errorsexits("DISTRIBUTED_MATRIX_CMISS_INITIALISE",err,error)
1016  RETURN 1
1018 
1019  !
1020  !================================================================================================================================
1021  !
1022 
1024  SUBROUTINE distributed_matrix_create_finish(DISTRIBUTED_MATRIX,ERR,ERROR,*)
1026  !Argument variables
1027  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
1028  INTEGER(INTG), INTENT(OUT) :: ERR
1029  TYPE(varying_string), INTENT(OUT) :: ERROR
1030  !Local Variables
1031  INTEGER(INTG) :: DUMMY_ERR
1032  TYPE(varying_string) :: DUMMY_ERROR,LOCAL_ERROR
1033 
1034  enters("DISTRIBUTED_MATRIX_CREATE_FINISH",err,error,*998)
1035 
1036  IF(ASSOCIATED(distributed_matrix)) THEN
1037  IF(distributed_matrix%MATRIX_FINISHED) THEN
1038  CALL flagerror("The distributed matrix has been finished.",err,error,*998)
1039  ELSE
1040  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1042  CALL distributed_matrix_cmiss_create_finish(distributed_matrix%CMISS,err,error,*999)
1044  CALL distributed_matrix_petsc_create_finish(distributed_matrix%PETSC,err,error,*999)
1045  CASE DEFAULT
1046  local_error="The distributed matrix library type of "// &
1047  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
1048  CALL flagerror(local_error,err,error,*999)
1049  END SELECT
1050  distributed_matrix%MATRIX_FINISHED=.true.
1051  ENDIF
1052  ELSE
1053  CALL flagerror("Distributed matrix is not associated.",err,error,*998)
1054  ENDIF
1055 
1056  exits("DISTRIBUTED_MATRIX_CREATE_FINISH")
1057  RETURN
1058 999 CALL distributed_matrix_finalise(distributed_matrix,dummy_err,dummy_error,*998)
1059 998 errorsexits("DISTRIBUTED_MATRIX_CREATE_FINISH",err,error)
1060  RETURN 1
1061  END SUBROUTINE distributed_matrix_create_finish
1062 
1063  !
1064  !================================================================================================================================
1065  !
1066 
1068  SUBROUTINE distributed_matrix_create_start(ROW_DOMAIN_MAPPING,COLUMN_DOMAIN_MAPPING,DISTRIBUTED_MATRIX,ERR,ERROR,*)
1070  !Argument variables
1071  TYPE(domain_mapping_type), POINTER :: ROW_DOMAIN_MAPPING
1072  TYPE(domain_mapping_type), POINTER :: COLUMN_DOMAIN_MAPPING
1073  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
1074  INTEGER(INTG), INTENT(OUT) :: ERR
1075  TYPE(varying_string), INTENT(OUT) :: ERROR
1076  !Local Variables
1077  INTEGER(INTG) :: DUMMY_ERR
1078  TYPE(varying_string) :: DUMMY_ERROR,LOCAL_ERROR
1079 
1080  enters("DISTRIBUTED_MATRIX_CREATE_START",err,error,*998)
1081 
1082  IF(ASSOCIATED(row_domain_mapping)) THEN
1083  IF(ASSOCIATED(column_domain_mapping)) THEN
1084  IF(ASSOCIATED(distributed_matrix)) THEN
1085  CALL flagerror("Distributed matrix is already associated.",err,error,*998)
1086  ELSE
1087  IF(row_domain_mapping%NUMBER_OF_DOMAINS==column_domain_mapping%NUMBER_OF_DOMAINS) THEN
1088  CALL distributed_matrix_initialise(row_domain_mapping,column_domain_mapping,distributed_matrix,err,error,*999)
1089  !Set the defaults
1090  ELSE
1091  local_error="The number of domains in the row domain mapping ("// &
1092  & trim(numbertovstring(row_domain_mapping%NUMBER_OF_DOMAINS,"*",err,error))// &
1093  & ") does not match the number of domains in the column domain mapping ("// &
1094  & trim(numbertovstring(column_domain_mapping%NUMBER_OF_DOMAINS,"*",err,error))//")."
1095  CALL flagerror(local_error,err,error,*999)
1096  ENDIF
1097  ENDIF
1098  ELSE
1099  CALL flagerror("Column domain mapping is not associated.",err,error,*999)
1100  ENDIF
1101  ELSE
1102  CALL flagerror("Row domain mapping is not associated.",err,error,*998)
1103  ENDIF
1104 
1105  exits("DISTRIBUTED_MATRIX_CREATE_START")
1106  RETURN
1107 999 CALL distributed_matrix_finalise(distributed_matrix,dummy_err,dummy_error,*998)
1108 998 errorsexits("DISTRIBUTED_MATRIX_CREATE_START",err,error)
1109  RETURN 1
1110  END SUBROUTINE distributed_matrix_create_start
1111 
1112  !
1113  !================================================================================================================================
1114  !
1115 
1117  SUBROUTINE distributed_matrix_data_get_intg(DISTRIBUTED_MATRIX,DATA,ERR,ERROR,*)
1119  !Argument variables
1120  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
1121  INTEGER(INTG), POINTER :: DATA(:)
1122  INTEGER(INTG), INTENT(OUT) :: ERR
1123  TYPE(varying_string), INTENT(OUT) :: ERROR
1124  !Local Variables
1125  TYPE(varying_string) :: LOCAL_ERROR
1126 
1127  enters("DISTRIBUTED_MATRIX_DATA_GET_INTG",err,error,*999)
1128 
1129  IF(ASSOCIATED(distributed_matrix)) THEN
1130  IF(ASSOCIATED(data)) THEN
1131  CALL flagerror("Data is already associated",err,error,*999)
1132  ELSE
1133  NULLIFY(data)
1134  IF(distributed_matrix%MATRIX_FINISHED) THEN
1135  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1137  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
1138  CALL matrix_data_get(distributed_matrix%CMISS%MATRIX,DATA,err,error,*999)
1139  ELSE
1140  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
1141  ENDIF
1143  CALL flagerror("Cannot get data for an integer PETSc distributed matrix.",err,error,*999)
1144  CASE DEFAULT
1145  local_error="The distributed matrix library type of "// &
1146  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
1147  CALL flagerror(local_error,err,error,*999)
1148  END SELECT
1149  ELSE
1150  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
1151  ENDIF
1152  ENDIF
1153  ELSE
1154  CALL flagerror("Distributed matrix is not associated",err,error,*999)
1155  ENDIF
1156 
1157  exits("DISTRIBUTED_MATRIX_DATA_GET_INTG")
1158  RETURN
1159 999 errorsexits("DISTRIBUTED_MATRIX_DATA_GET_INTG",err,error)
1160  RETURN 1
1161  END SUBROUTINE distributed_matrix_data_get_intg
1162 
1163  !
1164  !================================================================================================================================
1165  !
1166 
1168  SUBROUTINE distributed_matrix_data_get_sp(DISTRIBUTED_MATRIX,DATA,ERR,ERROR,*)
1170  !Argument variables
1171  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
1172  REAL(SP), POINTER :: DATA(:)
1173  INTEGER(INTG), INTENT(OUT) :: ERR
1174  TYPE(varying_string), INTENT(OUT) :: ERROR
1175  !Local Variables
1176  TYPE(varying_string) :: LOCAL_ERROR
1177 
1178  enters("DISTRIBUTED_MATRIX_DATA_GET_SP",err,error,*999)
1179 
1180  IF(ASSOCIATED(distributed_matrix)) THEN
1181  IF(ASSOCIATED(data)) THEN
1182  CALL flagerror("Data is already associated.",err,error,*999)
1183  ELSE
1184  NULLIFY(data)
1185  IF(distributed_matrix%MATRIX_FINISHED) THEN
1186  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1188  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
1189  CALL matrix_data_get(distributed_matrix%CMISS%MATRIX,DATA,err,error,*999)
1190  ELSE
1191  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
1192  ENDIF
1194  CALL flagerror("Cannot get data for a single precision PETSc distributed matrix.",err,error,*999)
1195  CASE DEFAULT
1196  local_error="The distributed matrix library type of "// &
1197  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
1198  CALL flagerror(local_error,err,error,*999)
1199  END SELECT
1200  ELSE
1201  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
1202  ENDIF
1203  ENDIF
1204  ELSE
1205  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
1206  ENDIF
1207 
1208  exits("DISTRIBUTED_MATRIX_DATA_GET_SP")
1209  RETURN
1210 999 errorsexits("DISTRIBUTED_MATRIX_DATA_GET_SP",err,error)
1211  RETURN 1
1212  END SUBROUTINE distributed_matrix_data_get_sp
1213 
1214  !
1215  !================================================================================================================================
1216  !
1217 
1219  SUBROUTINE distributed_matrix_data_get_dp(DISTRIBUTED_MATRIX,DATA,ERR,ERROR,*)
1221  !Argument variables
1222  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
1223  REAL(DP), POINTER :: DATA(:)
1224  INTEGER(INTG), INTENT(OUT) :: ERR
1225  TYPE(varying_string), INTENT(OUT) :: ERROR
1226  !Local Variables
1227  REAL(DP), POINTER :: petscData(:,:)
1228  TYPE(varying_string) :: LOCAL_ERROR
1229  TYPE(c_ptr) :: TEMP_ME
1230 
1231  enters("DISTRIBUTED_MATRIX_DATA_GET_DP",err,error,*999)
1232 
1233  IF(ASSOCIATED(distributed_matrix)) THEN
1234  IF(ASSOCIATED(data)) THEN
1235  CALL flagerror("Data is already associated.",err,error,*999)
1236  ELSE
1237  NULLIFY(data)
1238  IF(distributed_matrix%MATRIX_FINISHED) THEN
1239  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1241  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
1242  CALL matrix_data_get(distributed_matrix%CMISS%MATRIX,DATA,err,error,*999)
1243  ELSE
1244  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
1245  ENDIF
1247  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
1248  IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX) THEN
1249  SELECT CASE(distributed_matrix%PETSC%STORAGE_TYPE)
1251  CALL petsc_matdensegetarrayf90(distributed_matrix%petsc%OVERRIDE_MATRIX,petscdata,err,error,*999)
1253  CALL flagerror("Diagonal storage is not implemented for PETSc matrices.",err,error,*999)
1255  CALL flagerror("Column major storage is not implemented for PETSc matrices.",err,error,*999)
1257  CALL flagerror("Row major storage is not implemented for PETSc matrices.",err,error,*999)
1259  CALL petsc_matseqaijgetarrayf90(distributed_matrix%petsc%OVERRIDE_MATRIX,petscdata,err,error,*999)
1261  CALL flagerror("Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
1263  CALL flagerror("Row column storage is not implemented for PETSc matrices.",err,error,*999)
1264  CASE DEFAULT
1265  local_error="The PETSc matrix storage type of "//trim(numbertovstring( &
1266  & distributed_matrix%PETSC%STORAGE_TYPE,"*",err,error))//" is invalid."
1267  CALL flagerror(local_error,err,error,*999)
1268  END SELECT
1269  ELSE
1270  SELECT CASE(distributed_matrix%PETSC%STORAGE_TYPE)
1272  CALL petsc_matdensegetarrayf90(distributed_matrix%petsc%matrix,petscdata,err,error,*999)
1274  CALL flagerror("Diagonal storage is not implemented for PETSc matrices.",err,error,*999)
1276  CALL flagerror("Column major storage is not implemented for PETSc matrices.",err,error,*999)
1278  CALL flagerror("Row major storage is not implemented for PETSc matrices.",err,error,*999)
1280  CALL petsc_matseqaijgetarrayf90(distributed_matrix%petsc%matrix,petscdata,err,error,*999)
1282  CALL flagerror("Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
1284  CALL flagerror("Row column storage is not implemented for PETSc matrices.",err,error,*999)
1285  CASE DEFAULT
1286  local_error="The PETSc matrix storage type of "//trim(numbertovstring( &
1287  & distributed_matrix%PETSC%STORAGE_TYPE,"*",err,error))//" is invalid."
1288  CALL flagerror(local_error,err,error,*999)
1289  END SELECT
1290  ENDIF
1291  ! Convert 2D array from PETSc to 1D array
1292  ! Using C_F_POINTER(C_LOC(... is a bit ugly but transfer doesn't work with pointers
1293  SELECT CASE(distributed_matrix%PETSC%STORAGE_TYPE)
1295  temp_me = c_loc(petscdata(1,1))
1296  CALL c_f_pointer(temp_me,DATA,[distributed_matrix%PETSC%M*distributed_matrix%PETSC%N])
1298  CALL flagerror("Diagonal storage is not implemented for PETSc matrices.",err,error,*999)
1300  CALL flagerror("Column major storage is not implemented for PETSc matrices.",err,error,*999)
1302  CALL flagerror("Row major storage is not implemented for PETSc matrices.",err,error,*999)
1304  !PETSc returns an m * n matrix rather than number non-zeros by 1, so the returned
1305  !2D array actually contains junk data outside of the actual matrix.
1306  !This is a bug in PETSc but we can get the correct 1D data here
1307  temp_me = c_loc(petscdata(1,1))
1308  CALL c_f_pointer(temp_me,DATA,[distributed_matrix%PETSC%NUMBER_NON_ZEROS])
1310  CALL flagerror("Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
1312  CALL flagerror("Row column storage is not implemented for PETSc matrices.",err,error,*999)
1313  CASE DEFAULT
1314  local_error="The PETSc matrix storage type of "//trim(numbertovstring( &
1315  & distributed_matrix%PETSC%STORAGE_TYPE,"*",err,error))//" is invalid."
1316  CALL flagerror(local_error,err,error,*999)
1317  END SELECT
1318  ELSE
1319  CALL flagerror("Distributed matris PETSc is not associated.",err,error,*999)
1320  ENDIF
1321  CASE DEFAULT
1322  local_error="The distributed matrix library type of "// &
1323  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
1324  CALL flagerror(local_error,err,error,*999)
1325  END SELECT
1326  ELSE
1327  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
1328  ENDIF
1329  ENDIF
1330  ELSE
1331  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
1332  ENDIF
1333 
1334  exits("DISTRIBUTED_MATRIX_DATA_GET_DP")
1335  RETURN
1336 999 errorsexits("DISTRIBUTED_MATRIX_DATA_GET_DP",err,error)
1337  RETURN 1
1338 
1339  END SUBROUTINE distributed_matrix_data_get_dp
1340 
1341  !
1342  !================================================================================================================================
1343  !
1344 
1346  SUBROUTINE distributed_matrix_data_get_l(DISTRIBUTED_MATRIX,DATA,ERR,ERROR,*)
1348  !Argument variables
1349  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
1350  LOGICAL, POINTER :: DATA(:)
1351  INTEGER(INTG), INTENT(OUT) :: ERR
1352  TYPE(varying_string), INTENT(OUT) :: ERROR
1353  !Local Variables
1354  TYPE(varying_string) :: LOCAL_ERROR
1355 
1356  enters("DISTRIBUTED_MATRIX_DATA_GET_L",err,error,*999)
1357 
1358  IF(ASSOCIATED(distributed_matrix)) THEN
1359  IF(ASSOCIATED(data)) THEN
1360  CALL flagerror("Data is already associated",err,error,*999)
1361  ELSE
1362  NULLIFY(data)
1363  IF(distributed_matrix%MATRIX_FINISHED) THEN
1364  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1366  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
1367  CALL matrix_data_get(distributed_matrix%CMISS%MATRIX,DATA,err,error,*999)
1368  ELSE
1369  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
1370  ENDIF
1372  CALL flagerror("Cannot get data for a logical PETSc distributed matrix.",err,error,*999)
1373  CASE DEFAULT
1374  local_error="The distributed matrix library type of "// &
1375  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
1376  CALL flagerror(local_error,err,error,*999)
1377  END SELECT
1378  ELSE
1379  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
1380  ENDIF
1381  ENDIF
1382  ELSE
1383  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
1384  ENDIF
1385 
1386  exits("DISTRIBUTED_MATRIX_DATA_GET_L")
1387  RETURN
1388 999 errorsexits("DISTRIBUTED_MATRIX_DATA_GET_L",err,error)
1389  RETURN 1
1390  END SUBROUTINE distributed_matrix_data_get_l
1391 
1392  !
1393  !================================================================================================================================
1394  !
1395 
1397  SUBROUTINE distributed_matrix_data_restore_intg(DISTRIBUTED_MATRIX,DATA,ERR,ERROR,*)
1399  !Argument variables
1400  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
1401  INTEGER(INTG), POINTER :: DATA(:)
1402  INTEGER(INTG), INTENT(OUT) :: ERR
1403  TYPE(varying_string), INTENT(OUT) :: ERROR
1404  !Local Variables
1405  TYPE(varying_string) :: LOCAL_ERROR
1406 
1407  enters("DISTRIBUTED_MATRIX_DATA_RESTORE_INTG",err,error,*999)
1408 
1409  IF(ASSOCIATED(distributed_matrix)) THEN
1410  IF(ASSOCIATED(data)) THEN
1411  IF(distributed_matrix%MATRIX_FINISHED) THEN
1412  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1414  NULLIFY(data)
1416  CALL flagerror("Cannot restore data for an integer PETSc distributed matrix.",err,error,*999)
1417  CASE DEFAULT
1418  local_error="The distributed matrix library type of "// &
1419  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
1420  CALL flagerror(local_error,err,error,*999)
1421  END SELECT
1422  ELSE
1423  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
1424  ENDIF
1425  ELSE
1426  CALL flagerror("Data is not associated.",err,error,*999)
1427  ENDIF
1428  ELSE
1429  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
1430  ENDIF
1431 
1432  exits("DISTRIBUTED_MATRIX_DATA_RESTORE_INTG")
1433  RETURN
1434 999 errorsexits("DISTRIBUTED_MATRIX_DATA_RESTORE_INTG",err,error)
1435  RETURN 1
1437 
1438  !
1439  !================================================================================================================================
1440  !
1441 
1443  SUBROUTINE distributed_matrix_data_restore_sp(DISTRIBUTED_MATRIX,DATA,ERR,ERROR,*)
1445  !Argument variables
1446  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
1447  REAL(SP), POINTER :: DATA(:)
1448  INTEGER(INTG), INTENT(OUT) :: ERR
1449  TYPE(varying_string), INTENT(OUT) :: ERROR
1450  !Local Variables
1451  TYPE(varying_string) :: LOCAL_ERROR
1452 
1453  enters("DISTRIBUTED_MATRIX_DATA_RESTORE_SP",err,error,*999)
1454 
1455  IF(ASSOCIATED(distributed_matrix)) THEN
1456  IF(ASSOCIATED(data)) THEN
1457  IF(distributed_matrix%MATRIX_FINISHED) THEN
1458  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1460  NULLIFY(data)
1462  CALL flagerror("Cannot restore data for a single precision PETSc distributed matrix.",err,error,*999)
1463  CASE DEFAULT
1464  local_error="The distributed matrix library type of "// &
1465  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
1466  CALL flagerror(local_error,err,error,*999)
1467  END SELECT
1468  ELSE
1469  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
1470  ENDIF
1471  ELSE
1472  CALL flagerror("Data is not associated.",err,error,*999)
1473  ENDIF
1474  ELSE
1475  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
1476  ENDIF
1477 
1478  exits("DISTRIBUTED_MATRIX_DATA_RESTORE_SP")
1479  RETURN
1480 999 errorsexits("DISTRIBUTED_MATRIX_DATA_RESTORE_SP",err,error)
1481  RETURN 1
1482  END SUBROUTINE distributed_matrix_data_restore_sp
1483 
1484  !
1485  !================================================================================================================================
1486  !
1487 
1489  SUBROUTINE distributed_matrix_data_restore_dp(DISTRIBUTED_MATRIX,DATA,ERR,ERROR,*)
1491  !Argument variables
1492  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
1493  REAL(DP), POINTER :: DATA(:)
1494  INTEGER(INTG), INTENT(OUT) :: ERR
1495  TYPE(varying_string), INTENT(OUT) :: ERROR
1496  !Local Variables
1497  REAL(DP), POINTER :: petscData(:,:)
1498  TYPE(varying_string) :: LOCAL_ERROR
1499  TYPE(c_ptr) :: TEMP_ME
1500 
1501  enters("DISTRIBUTED_MATRIX_DATA_RESTORE_DP",err,error,*999)
1502 
1503  IF(ASSOCIATED(distributed_matrix)) THEN
1504  IF(ASSOCIATED(data)) THEN
1505  IF(distributed_matrix%MATRIX_FINISHED) THEN
1506  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1508  NULLIFY(data)
1510  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
1511  SELECT CASE(distributed_matrix%PETSC%STORAGE_TYPE)
1513  !Convert 1D array to 2D
1514  temp_me = c_loc(DATA(1))
1515  CALL c_f_pointer(temp_me,petscdata,[distributed_matrix%PETSC%M,distributed_matrix%PETSC%N])
1517  CALL flagerror("Diagonal storage is not implemented for PETSc matrices.",err,error,*999)
1519  CALL flagerror("Column major storage is not implemented for PETSc matrices.",err,error,*999)
1521  CALL flagerror("Row major storage is not implemented for PETSc matrices.",err,error,*999)
1523  !PETSc expects an m * n 2D matrix rather than a 1D array with length equal to number of non-zeros
1524  !This is a bug in PETSc so we have to give it a 2D matrix with junk at the end
1525  temp_me = c_loc(DATA(1))
1526  CALL c_f_pointer(temp_me,petscdata,[distributed_matrix%PETSC%M,distributed_matrix%PETSC%N])
1528  CALL flagerror("Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
1530  CALL flagerror("Row column storage is not implemented for PETSc matrices.",err,error,*999)
1531  CASE DEFAULT
1532  local_error="The PETSc matrix storage type of "//trim(numbertovstring( &
1533  & distributed_matrix%PETSC%STORAGE_TYPE,"*",err,error))//" is invalid."
1534  CALL flagerror(local_error,err,error,*999)
1535  END SELECT
1536  IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX) THEN
1537  SELECT CASE(distributed_matrix%PETSC%STORAGE_TYPE)
1539  CALL petsc_matdenserestorearrayf90(distributed_matrix%PETSC%OVERRIDE_MATRIX,petscdata,err,error,*999)
1541  CALL flagerror("Diagonal storage is not implemented for PETSc matrices.",err,error,*999)
1543  CALL flagerror("Column major storage is not implemented for PETSc matrices.",err,error,*999)
1545  CALL flagerror("Row major storage is not implemented for PETSc matrices.",err,error,*999)
1547  CALL petsc_matseqaijrestorearrayf90(distributed_matrix%PETSC%OVERRIDE_MATRIX,petscdata,err,error,*999)
1549  CALL flagerror("Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
1551  CALL flagerror("Row column storage is not implemented for PETSc matrices.",err,error,*999)
1552  CASE DEFAULT
1553  local_error="The PETSc matrix storage type of "//trim(numbertovstring( &
1554  & distributed_matrix%PETSC%STORAGE_TYPE,"*",err,error))//" is invalid."
1555  CALL flagerror(local_error,err,error,*999)
1556  END SELECT
1557  ELSE
1558  SELECT CASE(distributed_matrix%PETSC%STORAGE_TYPE)
1560  CALL petsc_matdenserestorearrayf90(distributed_matrix%PETSC%MATRIX,petscdata,err,error,*999)
1562  CALL flagerror("Diagonal storage is not implemented for PETSc matrices.",err,error,*999)
1564  CALL flagerror("Column major storage is not implemented for PETSc matrices.",err,error,*999)
1566  CALL flagerror("Row major storage is not implemented for PETSc matrices.",err,error,*999)
1568  CALL petsc_matseqaijrestorearrayf90(distributed_matrix%PETSC%MATRIX,petscdata,err,error,*999)
1570  CALL flagerror("Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
1572  CALL flagerror("Row column storage is not implemented for PETSc matrices.",err,error,*999)
1573  CASE DEFAULT
1574  local_error="The PETSc matrix storage type of "//trim(numbertovstring( &
1575  & distributed_matrix%PETSC%STORAGE_TYPE,"*",err,error))//" is invalid."
1576  CALL flagerror(local_error,err,error,*999)
1577  END SELECT
1578  ENDIF
1579  ELSE
1580  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
1581  ENDIF
1582  CASE DEFAULT
1583  local_error="The distributed matrix library type of "// &
1584  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
1585  CALL flagerror(local_error,err,error,*999)
1586  END SELECT
1587  ELSE
1588  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
1589  ENDIF
1590  ELSE
1591  CALL flagerror("Data is not associated.",err,error,*999)
1592  ENDIF
1593  ELSE
1594  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
1595  ENDIF
1596 
1597  exits("DISTRIBUTED_MATRIX_DATA_RESTORE_DP")
1598  RETURN
1599 999 errorsexits("DISTRIBUTED_MATRIX_DATA_RESTORE_DP",err,error)
1600  RETURN 1
1601 
1602  END SUBROUTINE distributed_matrix_data_restore_dp
1603 
1604  !
1605  !================================================================================================================================
1606  !
1607 
1609  SUBROUTINE distributed_matrix_data_restore_l(DISTRIBUTED_MATRIX,DATA,ERR,ERROR,*)
1611  !Argument variables
1612  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
1613  LOGICAL, POINTER :: DATA(:)
1614  INTEGER(INTG), INTENT(OUT) :: ERR
1615  TYPE(varying_string), INTENT(OUT) :: ERROR
1616  !Local Variables
1617  TYPE(varying_string) :: LOCAL_ERROR
1618 
1619  enters("DISTRIBUTED_MATRIX_DATA_RESTORE_L",err,error,*999)
1620 
1621  IF(ASSOCIATED(distributed_matrix)) THEN
1622  IF(ASSOCIATED(data)) THEN
1623  IF(distributed_matrix%MATRIX_FINISHED) THEN
1624  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1626  NULLIFY(data)
1628  CALL flagerror("Cannot restore data for a logical PETSc distributed matrix.",err,error,*999)
1629  CASE DEFAULT
1630  local_error="The distributed matrix library type of "// &
1631  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
1632  CALL flagerror(local_error,err,error,*999)
1633  END SELECT
1634  ELSE
1635  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
1636  ENDIF
1637  ELSE
1638  CALL flagerror("Data is not associated.",err,error,*999)
1639  ENDIF
1640  ELSE
1641  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
1642  ENDIF
1643 
1644  exits("DISTRIBUTED_MATRIX_DATA_RESTORE_L")
1645  RETURN
1646 999 errorsexits("DISTRIBUTED_MATRIX_DATA_RESTORE_L",err,error)
1647  RETURN 1
1648  END SUBROUTINE distributed_matrix_data_restore_l
1649 
1650  !
1651  !================================================================================================================================
1652  !
1653 
1655  SUBROUTINE distributedmatrix_datatypeget(matrix,dataType,err,error,*)
1657  !Argument variables
1658  TYPE(distributed_matrix_type), POINTER :: matrix
1659  INTEGER(INTG), INTENT(OUT) :: dataType
1660  INTEGER(INTG), INTENT(OUT) :: err
1661  TYPE(varying_string), INTENT(OUT) :: error
1662 
1663  enters("DistributedMatrix_DataTypeGet",err,error,*999)
1664 
1665  IF(ASSOCIATED(matrix)) THEN
1666  IF(.NOT.matrix%matrix_finished) THEN
1667  CALL flag_error("The matrix has not been finished.",err,error,*999)
1668  ELSE
1669  datatype=matrix%data_type
1670  END IF
1671  ELSE
1672  CALL flag_error("Distributed matrix is not associated.",err,error,*999)
1673  END IF
1674 
1675  exits("DistributedMatrix_DataTypeGet")
1676  RETURN
1677 999 errorsexits("DistributedMatrix_DataTypeGet",err,error)
1678  RETURN 1
1679  END SUBROUTINE distributedmatrix_datatypeget
1680 
1681  !
1682  !================================================================================================================================
1683  !
1684 
1686  SUBROUTINE distributed_matrix_data_type_set(DISTRIBUTED_MATRIX,DATA_TYPE,ERR,ERROR,*)
1688  !Argument variables
1689  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
1690  INTEGER(INTG), INTENT(IN) :: DATA_TYPE
1691  INTEGER(INTG), INTENT(OUT) :: ERR
1692  TYPE(varying_string), INTENT(OUT) :: ERROR
1693  !Local Variables
1694  TYPE(varying_string) :: LOCAL_ERROR
1695 
1696  enters("DISTRIBUTED_MATRIX_DATA_TYPE_SET",err,error,*999)
1697 
1698  IF(ASSOCIATED(distributed_matrix)) THEN
1699  IF(distributed_matrix%MATRIX_FINISHED) THEN
1700  CALL flagerror("The distributed matrix has been finished.",err,error,*999)
1701  ELSE
1702  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1704  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
1705  CALL matrix_data_type_set(distributed_matrix%CMISS%MATRIX,data_type,err,error,*999)
1706  distributed_matrix%DATA_TYPE=data_type
1707  ELSE
1708  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
1709  ENDIF
1711  SELECT CASE(data_type)
1713  CALL flagerror("An integer distributed PETSc matrix is not implemented.",err,error,*999)
1715  CALL flagerror("A single precision distributed PETSc matrix is not implemented.",err,error,*999)
1717  distributed_matrix%DATA_TYPE=distributed_matrix_vector_dp_type
1719  CALL flagerror("A logical distributed PETSc matrix is not implemented.",err,error,*999)
1720  CASE DEFAULT
1721  local_error="The specified data type of "//trim(numbertovstring(data_type,"*",err,error))//" is invalid."
1722  CALL flagerror(local_error,err,error,*999)
1723  END SELECT
1724  CASE DEFAULT
1725  local_error="The distributed matrix library type of "// &
1726  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
1727  CALL flagerror(local_error,err,error,*999)
1728  END SELECT
1729  ENDIF
1730  ELSE
1731  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
1732  ENDIF
1733 
1734  exits("DISTRIBUTED_MATRIX_DATA_TYPE_SET")
1735  RETURN
1736 999 errorsexits("DISTRIBUTED_MATRIX_DATA_TYPE_SET",err,error)
1737  RETURN 1
1738  END SUBROUTINE distributed_matrix_data_type_set
1739 
1740  !
1741  !================================================================================================================================
1742  !
1743 
1745  SUBROUTINE distributedmatrix_dimensionsget(distributedMatrix,m,n,err,error,*)
1747  !Argument variables
1748  TYPE(distributed_matrix_type), POINTER :: distributedMatrix
1749  INTEGER(INTG), INTENT(OUT) :: m
1750  INTEGER(INTG), INTENT(OUT) :: n
1751  INTEGER(INTG), INTENT(OUT) :: err
1752  TYPE(varying_string), INTENT(OUT) :: error
1753  !Local variables
1754  TYPE(matrix_type), POINTER :: matrix
1755  TYPE(distributed_matrix_petsc_type), POINTER :: petscMatrix
1756  TYPE(varying_string) :: localError
1757 
1758  enters("DistributedMatrix_DimensionsGet",err,error,*999)
1759 
1760  IF(ASSOCIATED(distributedmatrix)) THEN
1761  SELECT CASE(distributedmatrix%library_type)
1763  IF(ASSOCIATED(distributedmatrix%cmiss)) THEN
1764  matrix=>distributedmatrix%cmiss%matrix
1765  IF(ASSOCIATED(matrix)) THEN
1766  IF(.NOT.matrix%matrix_finished) THEN
1767  CALL flag_error("The matrix has not been finished.",err,error,*999)
1768  ELSE
1769  m=matrix%m
1770  n=matrix%n
1771  END IF
1772  ELSE
1773  CALL flag_error("Distributed matrix CMISS matrix is not associated.",err,error,*999)
1774  END IF
1775  ELSE
1776  CALL flag_error("Distributed matrix CMISS is not associated.",err,error,*999)
1777  END IF
1779  petscmatrix=>distributedmatrix%petsc
1780  IF(ASSOCIATED(petscmatrix)) THEN
1781  m=petscmatrix%m
1782  n=petscmatrix%n
1783  ELSE
1784  CALL flag_error("Distributed matrix PETSc is not associated.",err,error,*999)
1785  END IF
1786  CASE DEFAULT
1787  localerror="The distributed matrix library type of "// &
1788  & trim(number_to_vstring(distributedmatrix%library_type,"*",err,error))//" is invalid."
1789  CALL flag_error(localerror,err,error,*999)
1790  END SELECT
1791  ELSE
1792  CALL flag_error("Distributed matrix is not associated.",err,error,*999)
1793  END IF
1794 
1795  exits("DistributedMatrix_DimensionsGet")
1796  RETURN
1797 999 errorsexits("DistributedMatrix_DimensionsGet",err,error)
1798  RETURN 1
1799  END SUBROUTINE distributedmatrix_dimensionsget
1800 
1801  !
1802  !================================================================================================================================
1803  !
1804 
1806  SUBROUTINE distributed_matrix_destroy(DISTRIBUTED_MATRIX,ERR,ERROR,*)
1808  !Argument variables
1809  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
1810  INTEGER(INTG), INTENT(OUT) :: ERR
1811  TYPE(varying_string), INTENT(OUT) :: ERROR
1812  !Local Variables
1813 
1814  enters("DISTRIBUTED_MATRIX_DESTROY",err,error,*999)
1815 
1816  IF(ASSOCIATED(distributed_matrix)) THEN
1817  CALL distributed_matrix_finalise(distributed_matrix,err,error,*999)
1818  ELSE
1819  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
1820  ENDIF
1821 
1822  exits("DISTRIBUTED_MATRIX_DESTROY")
1823  RETURN
1824 999 errorsexits("DISTRIBUTED_MATRIX_DESTROY",err,error)
1825  RETURN 1
1826  END SUBROUTINE distributed_matrix_destroy
1827 
1828  !
1829  !================================================================================================================================
1830  !
1831 
1833  SUBROUTINE distributed_matrix_duplicate(DISTRIBUTED_MATRIX,NEW_DISTRIBUTED_MATRIX,ERR,ERROR,*)
1835  !Argument variables
1836  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
1837  TYPE(distributed_matrix_type), POINTER :: NEW_DISTRIBUTED_MATRIX
1838  INTEGER(INTG), INTENT(OUT) :: ERR
1839  TYPE(varying_string), INTENT(OUT) :: ERROR
1840  !Local Variables
1841  INTEGER(INTG) :: DUMMY_ERR
1842  TYPE(varying_string) :: DUMMY_ERROR,LOCAL_ERROR
1843 
1844  enters("DISTRIBUTED_MATRIX_DUPLICATE",err,error,*998)
1845 
1846  IF(ASSOCIATED(distributed_matrix)) THEN
1847  IF(ASSOCIATED(new_distributed_matrix)) THEN
1848  CALL flagerror("New distributed matrix is already associated.",err,error,*998)
1849  ELSE
1850  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1852  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
1853  CALL distributed_matrix_create_start(distributed_matrix%ROW_DOMAIN_MAPPING,distributed_matrix%COLUMN_DOMAIN_MAPPING, &
1854  & new_distributed_matrix,err,error,*999)
1855  CALL matrix_duplicate(distributed_matrix%CMISS%MATRIX,new_distributed_matrix%CMISS%MATRIX,err,error,*999)
1856  CALL distributed_matrix_create_finish(new_distributed_matrix,err,error,*999)
1857  ELSE
1858  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
1859  ENDIF
1861  CALL distributed_matrix_create_start(distributed_matrix%ROW_DOMAIN_MAPPING,distributed_matrix%COLUMN_DOMAIN_MAPPING, &
1862  & new_distributed_matrix,err,error,*999)
1863  CALL distributed_matrix_library_type_set(new_distributed_matrix,distributed_matrix_vector_petsc_type,err,error,*999)
1864  CALL distributed_matrix_create_finish(new_distributed_matrix,err,error,*999)
1865  CASE DEFAULT
1866  local_error="The distributed matrix library type of "// &
1867  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
1868  CALL flagerror(local_error,err,error,*999)
1869  END SELECT
1870  ENDIF
1871  ELSE
1872  CALL flagerror("Distributed matrix is not associated.",err,error,*998)
1873  ENDIF
1874 
1875  exits("DISTRIBUTED_MATRIX_DUPLICATE")
1876  RETURN
1877 999 CALL distributed_matrix_finalise(new_distributed_matrix,dummy_err,dummy_error,*999)
1878 998 errorsexits("DISTRIBUTED_MATRIX_DUPLICATE",err,error)
1879  RETURN 1
1880  END SUBROUTINE distributed_matrix_duplicate
1881 
1882  !
1883  !================================================================================================================================
1884  !
1885 
1887  SUBROUTINE distributed_matrix_finalise(DISTRIBUTED_MATRIX,ERR,ERROR,*)
1889  !Argument variables
1890  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
1891  INTEGER(INTG), INTENT(OUT) :: ERR
1892  TYPE(varying_string), INTENT(OUT) :: ERROR
1893  !Local Variables
1894 
1895  enters("DISTRIBUTED_MATRIX_FINALISE",err,error,*999)
1896 
1897  IF(ASSOCIATED(distributed_matrix)) THEN
1898  CALL distributed_matrix_cmiss_finalise(distributed_matrix%CMISS,err,error,*999)
1899  CALL distributed_matrix_petsc_finalise(distributed_matrix%PETSC,err,error,*999)
1900  DEALLOCATE(distributed_matrix)
1901  ENDIF
1902 
1903  exits("DISTRIBUTED_MATRIX_FINALISE")
1904  RETURN
1905 999 errorsexits("DISTRIBUTED_MATRIX_FINALISE",err,error)
1906  RETURN 1
1907  END SUBROUTINE distributed_matrix_finalise
1908 
1909  !
1910  !================================================================================================================================
1911  !
1912 
1914  SUBROUTINE distributed_matrix_form(DISTRIBUTED_MATRIX,ERR,ERROR,*)
1916  !Argument variables
1917  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
1918  INTEGER(INTG), INTENT(OUT) :: ERR
1919  TYPE(varying_string), INTENT(OUT) :: ERROR
1920  !Local Variables
1921  INTEGER(INTG) :: column_idx,row
1922  TYPE(distributed_matrix_petsc_type), POINTER :: PETSC_MATRIX
1923  TYPE(varying_string) :: LOCAL_ERROR
1924 
1925  enters("DISTRIBUTED_MATRIX_FORM",err,error,*999)
1926 
1927  IF(ASSOCIATED(distributed_matrix)) THEN
1928  IF(distributed_matrix%MATRIX_FINISHED) THEN
1929  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1931  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
1932  CALL flagerror("Not implemented.",err,error,*999)
1933  ELSE
1934  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
1935  ENDIF
1937  petsc_matrix=>distributed_matrix%PETSC
1938  IF(ASSOCIATED(petsc_matrix)) THEN
1939  SELECT CASE(petsc_matrix%STORAGE_TYPE)
1941  CALL petsc_matzeroentries(petsc_matrix%MATRIX,err,error,*999)
1943  CALL flagerror("Diagonal storage is not implemented for PETSc matrices.",err,error,*999)
1945  CALL flagerror("Column major storage is not implemented for PETSc matrices.",err,error,*999)
1947  CALL flagerror("Row major storage is not implemented for PETSc matrices.",err,error,*999)
1949  IF(petsc_matrix%USE_OVERRIDE_MATRIX) THEN
1950  DO row=1,petsc_matrix%M
1951  DO column_idx=petsc_matrix%ROW_INDICES(row),petsc_matrix%ROW_INDICES(row+1)-1
1952  CALL petsc_matsetvalue(petsc_matrix%OVERRIDE_MATRIX,petsc_matrix%GLOBAL_ROW_NUMBERS(row), &
1953  & petsc_matrix%COLUMN_INDICES(column_idx)-1,0.0_dp,petsc_insert_values, &
1954  & err,error,*999) !PETSc uses 0 indicies
1955  ENDDO !column_idx
1956  ENDDO !row_idx
1957  ELSE
1958  DO row=1,petsc_matrix%M
1959  DO column_idx=petsc_matrix%ROW_INDICES(row),petsc_matrix%ROW_INDICES(row+1)-1
1960  CALL petsc_matsetvalue(petsc_matrix%MATRIX,petsc_matrix%GLOBAL_ROW_NUMBERS(row), &
1961  & petsc_matrix%COLUMN_INDICES(column_idx)-1,0.0_dp,petsc_insert_values, &
1962  & err,error,*999) !PETSc uses 0 indicies
1963  ENDDO !column_idx
1964  ENDDO !row_idx
1965  ENDIF
1967  CALL flagerror("Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
1969  CALL flagerror("Row column storage is not implemented for PETSc matrices.",err,error,*999)
1970  CASE DEFAULT
1971  local_error="The PETSc matrix storage type of "//trim(numbertovstring(petsc_matrix%STORAGE_TYPE,"*",err,error))// &
1972  & " is invalid."
1973  CALL flagerror(local_error,err,error,*999)
1974  END SELECT
1975  IF(petsc_matrix%USE_OVERRIDE_MATRIX) THEN
1976  CALL petsc_matassemblybegin(petsc_matrix%OVERRIDE_MATRIX,petsc_mat_final_assembly,err,error,*999)
1977  CALL petsc_matassemblyend(petsc_matrix%OVERRIDE_MATRIX,petsc_mat_final_assembly,err,error,*999)
1978  ELSE
1979  CALL petsc_matassemblybegin(petsc_matrix%MATRIX,petsc_mat_final_assembly,err,error,*999)
1980  CALL petsc_matassemblyend(petsc_matrix%MATRIX,petsc_mat_final_assembly,err,error,*999)
1981  ENDIF
1982  ELSE
1983  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
1984  ENDIF
1985  CASE DEFAULT
1986  local_error="The distributed matrix library type of "// &
1987  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
1988  CALL flagerror(local_error,err,error,*999)
1989  END SELECT
1990  ELSE
1991  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
1992  ENDIF
1993  ENDIF
1994 
1995  exits("DISTRIBUTED_MATRIX_FORM")
1996  RETURN
1997 999 errorsexits("DISTRIBUTED_MATRIX_FORM",err,error)
1998  RETURN 1
1999  END SUBROUTINE distributed_matrix_form
2000 
2001  !
2002  !================================================================================================================================
2003  !
2004 
2006  SUBROUTINE distributed_matrix_ghosting_type_set(DISTRIBUTED_MATRIX,GHOSTING_TYPE,ERR,ERROR,*)
2008  !Argument variables
2009  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
2010  INTEGER(INTG), INTENT(IN) :: GHOSTING_TYPE
2011  INTEGER(INTG), INTENT(OUT) :: ERR
2012  TYPE(varying_string), INTENT(OUT) :: ERROR
2013  !Local Variables
2014  TYPE(domain_mapping_type), POINTER :: ROW_DOMAIN_MAPPING,COLUMN_DOMAIN_MAPPING
2015  TYPE(varying_string) :: LOCAL_ERROR
2016 
2017  enters("DISTRIBUTED_MATRIX_GHOSTING_TYPE_SET",err,error,*999)
2018 
2019  IF(ASSOCIATED(distributed_matrix)) THEN
2020  IF(distributed_matrix%MATRIX_FINISHED) THEN
2021  CALL flagerror("The distributed matrix has already been finished.",err,error,*999)
2022  ELSE
2023  row_domain_mapping=>distributed_matrix%ROW_DOMAIN_MAPPING
2024  column_domain_mapping=>distributed_matrix%COLUMN_DOMAIN_MAPPING
2025  IF(ASSOCIATED(row_domain_mapping)) THEN
2026  IF(ASSOCIATED(column_domain_mapping)) THEN
2027  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2029  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
2030  SELECT CASE(ghosting_type)
2032  CALL matrix_size_set(distributed_matrix%CMISS%MATRIX,row_domain_mapping%TOTAL_NUMBER_OF_LOCAL, &
2033  & column_domain_mapping%NUMBER_OF_GLOBAL,err,error,*999)
2035  CALL matrix_size_set(distributed_matrix%CMISS%MATRIX,row_domain_mapping%NUMBER_OF_LOCAL, &
2036  & column_domain_mapping%NUMBER_OF_GLOBAL,err,error,*999)
2037  CASE DEFAULT
2038  local_error="The given ghosting type of "//trim(numbertovstring(ghosting_type,"*",err,error))//" is invalid."
2039  CALL flagerror(local_error,err,error,*999)
2040  END SELECT
2041  ELSE
2042  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
2043  ENDIF
2045  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
2046  SELECT CASE(ghosting_type)
2048  distributed_matrix%PETSC%N=row_domain_mapping%TOTAL_NUMBER_OF_LOCAL
2050  distributed_matrix%PETSC%N=row_domain_mapping%NUMBER_OF_LOCAL
2051  CASE DEFAULT
2052  local_error="The given ghosting type of "//trim(numbertovstring(ghosting_type,"*",err,error))//" is invalid."
2053  CALL flagerror(local_error,err,error,*999)
2054  END SELECT
2055  ELSE
2056  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
2057  ENDIF
2058  CASE DEFAULT
2059  local_error="The distributed matrix library type of "// &
2060  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
2061  CALL flagerror(local_error,err,error,*999)
2062  END SELECT
2063  distributed_matrix%GHOSTING_TYPE=ghosting_type
2064  ELSE
2065  CALL flagerror("Distributed matrix column domain mapping is not associated.",err,error,*999)
2066  ENDIF
2067  ELSE
2068  CALL flagerror("Distributed matrix row domain mapping is not associated.",err,error,*999)
2069  ENDIF
2070  ENDIF
2071  ELSE
2072  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
2073  ENDIF
2074 
2075  exits("DISTRIBUTED_MATRIX_GHOSTING_TYPE_SET")
2076  RETURN
2077 999 errorsexits("DISTRIBUTED_MATRIX_GHOSTING_TYPE_SET",err,error)
2078  RETURN 1
2080 
2081  !
2082  !================================================================================================================================
2083  !
2084 
2086  SUBROUTINE distributed_matrix_library_type_set(DISTRIBUTED_MATRIX,LIBRARY_TYPE,ERR,ERROR,*)
2088  !Argument variables
2089  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
2090  INTEGER(INTG), INTENT(IN) :: LIBRARY_TYPE
2091  INTEGER(INTG), INTENT(OUT) :: ERR
2092  TYPE(varying_string), INTENT(OUT) :: ERROR
2093  !Local Variables
2094  INTEGER(INTG) :: DUMMY_ERR,OLD_LIBRARY_TYPE
2095  TYPE(varying_string) :: DUMMY_ERROR,LOCAL_ERROR
2096 
2097  enters("DISTRIBUTED_MATRIX_LIBRARY_TYPE_SET",err,error,*998)
2098 
2099  IF(ASSOCIATED(distributed_matrix)) THEN
2100  IF(distributed_matrix%MATRIX_FINISHED) THEN
2101  CALL flagerror("The distributed matrix has already been finished.",err,error,*998)
2102  ELSE
2103  old_library_type=distributed_matrix%LIBRARY_TYPE
2104  IF(library_type/=old_library_type) THEN
2105  !Initialise the new library type
2106  SELECT CASE(library_type)
2108  CALL distributed_matrix_cmiss_initialise(distributed_matrix,err,error,*999)
2110  CALL distributed_matrix_petsc_initialise(distributed_matrix,err,error,*999)
2111  CASE DEFAULT
2112  local_error="The library type of "//trim(numbertovstring(library_type,"*",err,error))//" is invalid."
2113  CALL flagerror(local_error,err,error,*999)
2114  END SELECT
2115  !Finalise the old library type
2116  SELECT CASE(old_library_type)
2118  CALL distributed_matrix_cmiss_finalise(distributed_matrix%CMISS,err,error,*999)
2120  CALL distributed_matrix_petsc_finalise(distributed_matrix%PETSC,err,error,*999)
2121  CASE DEFAULT
2122  local_error="The distributed matrix library type of "// &
2123  & trim(numbertovstring(old_library_type,"*",err,error))//" is invalid."
2124  CALL flagerror(local_error,err,error,*999)
2125  END SELECT
2126  distributed_matrix%LIBRARY_TYPE=library_type
2127  ENDIF
2128  ENDIF
2129  ELSE
2130  CALL flagerror("Distributed matrix is not associated.",err,error,*998)
2131  ENDIF
2132 
2133  exits("DISTRIBUTED_MATRIX_LIBRARY_TYPE_SET")
2134  RETURN
2135 999 SELECT CASE(library_type)
2137  CALL distributed_matrix_cmiss_finalise(distributed_matrix%CMISS,dummy_err,dummy_error,*998)
2139  CALL distributed_matrix_petsc_finalise(distributed_matrix%PETSC,dummy_err,dummy_error,*998)
2140  END SELECT
2141 998 errorsexits("DISTRIBUTED_MATRIX_LIBRARY_TYPE_SET",err,error)
2142  RETURN 1
2144 
2145  !
2146  !================================================================================================================================
2147  !
2148 
2150  SUBROUTINE distributed_matrix_initialise(ROW_DOMAIN_MAPPING,COLUMN_DOMAIN_MAPPING,DISTRIBUTED_MATRIX,ERR,ERROR,*)
2152  !Argument variables
2153  TYPE(domain_mapping_type), POINTER :: ROW_DOMAIN_MAPPING
2154  TYPE(domain_mapping_type), POINTER :: COLUMN_DOMAIN_MAPPING
2155  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
2156  INTEGER(INTG), INTENT(OUT) :: ERR
2157  TYPE(varying_string), INTENT(OUT) :: ERROR
2158  !Local Variables
2159  INTEGER(INTG) :: DUMMY_ERR
2160  TYPE(varying_string) :: DUMMY_ERROR
2161 
2162  enters("DISTRIBUTED_MATRIX_INITIALISE",err,error,*998)
2163 
2164  IF(ASSOCIATED(row_domain_mapping)) THEN
2165  IF(ASSOCIATED(column_domain_mapping)) THEN
2166  IF(ASSOCIATED(distributed_matrix)) THEN
2167  CALL flagerror("Distributed matrix is already associated.",err,error,*998)
2168  ELSE
2169  ALLOCATE(distributed_matrix,stat=err)
2170  IF(err/=0) CALL flagerror("Could not allocated the distributed matrix.",err,error,*999)
2171  distributed_matrix%MATRIX_FINISHED=.false.
2172  distributed_matrix%LIBRARY_TYPE=0
2173  distributed_matrix%GHOSTING_TYPE=distributed_matrix_vector_include_ghosts_type
2174  distributed_matrix%ROW_DOMAIN_MAPPING=>row_domain_mapping
2175  distributed_matrix%COLUMN_DOMAIN_MAPPING=>column_domain_mapping
2176  distributed_matrix%DATA_TYPE=matrix_vector_dp_type
2177  NULLIFY(distributed_matrix%CMISS)
2178  NULLIFY(distributed_matrix%PETSC)
2179  CALL distributed_matrix_cmiss_initialise(distributed_matrix,err,error,*999)
2180  ENDIF
2181  ELSE
2182  CALL flagerror("Column domain mapping is not associated.",err,error,*999)
2183  ENDIF
2184  ELSE
2185  CALL flagerror("Row domain mapping is not associated.",err,error,*998)
2186  ENDIF
2187 
2188  exits("DISTRIBUTED_MATRIX_INITIALSE")
2189  RETURN
2190 999 CALL distributed_matrix_finalise(distributed_matrix,dummy_err,dummy_error,*998)
2191 998 errorsexits("DISTRIBUTED_MATRIX_INITIALISE",err,error)
2192  RETURN 1
2193  END SUBROUTINE distributed_matrix_initialise
2194 
2195  !
2196  !================================================================================================================================
2197  !
2198 
2200  SUBROUTINE distributed_matrix_max_columns_per_row_get(DISTRIBUTED_MATRIX,MAX_COLUMNS_PER_ROW,ERR,ERROR,*)
2202  !Argument variables
2203  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
2204  INTEGER(INTG), INTENT(OUT) :: MAX_COLUMNS_PER_ROW
2205  INTEGER(INTG), INTENT(OUT) :: ERR
2206  TYPE(varying_string), INTENT(OUT) :: ERROR
2207  !Local Variables
2208  TYPE(varying_string) :: LOCAL_ERROR
2209 
2210  enters("DISTRIBUTED_MATRIX_MAX_COLUMNS_PER_ROW_GET",err,error,*999)
2211 
2212  IF(ASSOCIATED(distributed_matrix)) THEN
2213  IF(distributed_matrix%MATRIX_FINISHED) THEN
2214  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2216  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
2217  CALL matrix_max_columns_per_row_get(distributed_matrix%CMISS%MATRIX,max_columns_per_row,err,error,*999)
2218  ELSE
2219  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
2220  ENDIF
2222  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
2223  max_columns_per_row=distributed_matrix%PETSC%MAXIMUM_COLUMN_INDICES_PER_ROW
2224  ELSE
2225  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
2226  ENDIF
2227  CASE DEFAULT
2228  local_error="The distributed matrix library type of "// &
2229  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
2230  CALL flagerror(local_error,err,error,*999)
2231  END SELECT
2232  ELSE
2233  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
2234  ENDIF
2235  ELSE
2236  CALL flagerror("Distributed mtrix is not associated.",err,error,*999)
2237  ENDIF
2238 
2239  exits("DISTRIBUTED_MATRIX_MAX_COLUMNS_PER_ROW_GET")
2240  RETURN
2241 999 errorsexits("DISTRIBUTED_MATRIX_MAX_COLUMNS_PER_ROW_GET",err,error)
2242  RETURN 1
2244 
2245  !
2246  !================================================================================================================================
2247  !
2248 
2250  SUBROUTINE distributed_matrix_number_non_zeros_set(DISTRIBUTED_MATRIX,NUMBER_NON_ZEROS,ERR,ERROR,*)
2252  !Argument variables
2253  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
2254  INTEGER(INTG), INTENT(IN) :: NUMBER_NON_ZEROS
2255  INTEGER(INTG), INTENT(OUT) :: ERR
2256  TYPE(varying_string), INTENT(OUT) :: ERROR
2257  !Local Variables
2258  TYPE(varying_string) :: LOCAL_ERROR
2259 
2260  enters("DISTRIBUTED_MATRIX_NUMBER_NON_ZEROS_SET",err,error,*999)
2261 
2262  IF(ASSOCIATED(distributed_matrix)) THEN
2263  IF(distributed_matrix%MATRIX_FINISHED) THEN
2264  CALL flagerror("The distributed matrix has been finished.",err,error,*999)
2265  ELSE
2266  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2268  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
2269  CALL matrix_number_non_zeros_set(distributed_matrix%CMISS%MATRIX,number_non_zeros,err,error,*999)
2270  ELSE
2271  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
2272  ENDIF
2274  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
2275  IF(number_non_zeros>0) THEN
2276  distributed_matrix%PETSC%NUMBER_NON_ZEROS=number_non_zeros
2277  ELSE
2278  local_error="The specified number of non zeros ("//trim(numbertovstring(number_non_zeros,"*",err,error))// &
2279  & ") is invalid. The number must be > 0."
2280  CALL flagerror(local_error,err,error,*999)
2281  ENDIF
2282  ELSE
2283  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
2284  ENDIF
2285  CASE DEFAULT
2286  local_error="The distributed matrix library type of "// &
2287  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
2288  CALL flagerror(local_error,err,error,*999)
2289  END SELECT
2290  ENDIF
2291  ELSE
2292  CALL flagerror("Distributed mtrix is not associated.",err,error,*999)
2293  ENDIF
2294 
2295  exits("DISTRIBUTED_MATRIX_NUMBER_NON_ZEROS_SET")
2296  RETURN
2297 999 errorsexits("DISTRIBUTED_MATRIX_NUMBER_NON_ZEROS_SET",err,error)
2298  RETURN 1
2300 
2301  !
2302  !================================================================================================================================
2303  !
2304 
2306  SUBROUTINE distributed_matrix_number_non_zeros_get(DISTRIBUTED_MATRIX,NUMBER_NON_ZEROS,ERR,ERROR,*)
2308  !Argument variables
2309  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
2310  INTEGER(INTG), INTENT(OUT) :: NUMBER_NON_ZEROS
2311  INTEGER(INTG), INTENT(OUT) :: ERR
2312  TYPE(varying_string), INTENT(OUT) :: ERROR
2313  !Local Variables
2314  TYPE(varying_string) :: LOCAL_ERROR
2315 
2316  enters("DISTRIBUTED_MATRIX_NUMBER_NON_ZEROS_GET",err,error,*999)
2317 
2318  IF(ASSOCIATED(distributed_matrix)) THEN
2319  IF(distributed_matrix%MATRIX_FINISHED) THEN
2320  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2322  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
2323  CALL matrix_number_non_zeros_get(distributed_matrix%CMISS%MATRIX,number_non_zeros,err,error,*999)
2324  ELSE
2325  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
2326  ENDIF
2328  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
2329  number_non_zeros=distributed_matrix%PETSC%NUMBER_NON_ZEROS
2330  ELSE
2331  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
2332  ENDIF
2333  CASE DEFAULT
2334  local_error="The distributed matrix library type of "// &
2335  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
2336  CALL flagerror(local_error,err,error,*999)
2337  END SELECT
2338  ELSE
2339  CALL flagerror("The distributed matrix is not finished.",err,error,*999)
2340  ENDIF
2341  ELSE
2342  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
2343  ENDIF
2344 
2345  exits("DISTRIBUTED_MATRIX_NUMBER_NON_ZEROS_GET")
2346  RETURN
2347 999 errorsexits("DISTRIBUTED_MATRIX_NUMBER_NON_ZEROS_GET",err,error)
2348  RETURN 1
2350 
2351  !
2352  !================================================================================================================================
2353  !
2354  !================================================================================================================================
2355  !
2356 
2358  SUBROUTINE distributed_matrix_linklist_set(DISTRIBUTED_MATRIX,LIST,ERR,ERROR,*)
2360  !Argument variables
2361  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
2362  type(linkedlist),pointer :: list(:)
2363  !INTEGER(INTG), INTENT(IN) :: NUMBER_NON_ZEROS !<The number of non zeros in the matrix to set
2364  INTEGER(INTG), INTENT(OUT) :: ERR
2365  TYPE(varying_string), INTENT(OUT) :: ERROR
2366  !Local Variables
2367  TYPE(varying_string) :: LOCAL_ERROR
2368 
2369  enters("DISTRIBUTED_MATRIX_LINKLIST_SET",err,error,*999)
2370 
2371  IF(ASSOCIATED(distributed_matrix)) THEN
2372  IF(distributed_matrix%MATRIX_FINISHED) THEN
2373  CALL flagerror("The distributed matrix has been finished.",err,error,*999)
2374  ELSE
2375  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2377  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
2378  CALL matrix_linklist_set(distributed_matrix%CMISS%MATRIX,list,err,error,*999)
2379  !DISTRIBUTED_MATRIX%CMISS%list=list
2380  ELSE
2381  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
2382  ENDIF
2384  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
2385  !IF(NUMBER_NON_ZEROS>0) THEN
2386  ! Check this
2387  distributed_matrix%PETSC%list=>list
2388  !ELSE
2389  ! LOCAL_ERROR="The specified number of non zeros ("//TRIM(NumberToVString(NUMBER_NON_ZEROS,"*",ERR,ERROR))// &
2390  ! & ") is invalid. The number must be > 0."
2391  ! CALL FlagError(LOCAL_ERROR,ERR,ERROR,*999)
2392  !ENDIF
2393  ELSE
2394  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
2395  ENDIF
2396  CASE DEFAULT
2397  local_error="The distributed matrix library type of "// &
2398  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
2399  CALL flagerror(local_error,err,error,*999)
2400  END SELECT
2401  ENDIF
2402  ELSE
2403  CALL flagerror("Distributed mtrix is not associated.",err,error,*999)
2404  ENDIF
2405 
2406  exits("DISTRIBUTED_MATRIX_LIKLIST_SET")
2407  RETURN
2408 999 errorsexits("DISTRIBUTED_MATRIX_LINKLIST_SET",err,error)
2409  RETURN 1
2410  END SUBROUTINE distributed_matrix_linklist_set
2411 
2412  !
2413  !================================================================================================================================
2414  !
2415 
2417  SUBROUTINE distributed_matrix_linklist_get(DISTRIBUTED_MATRIX,LIST,ERR,ERROR,*)
2419  !Argument variables
2420  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
2421  type(linkedlist),pointer :: list(:)
2422 ! INTEGER(INTG), INTENT(OUT) :: NUMBER_NON_ZEROS !<On return, the number of non zeros in the matrix to get
2423  INTEGER(INTG), INTENT(OUT) :: ERR
2424  TYPE(varying_string), INTENT(OUT) :: ERROR
2425  !Local Variables
2426  TYPE(varying_string) :: LOCAL_ERROR
2427 
2428  enters("DISTRIBUTED_MATRIX_NUMBER_NON_ZEROS_GET",err,error,*999)
2429 
2430  IF(ASSOCIATED(distributed_matrix)) THEN
2431  IF(distributed_matrix%MATRIX_FINISHED) THEN
2432  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2434  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
2435  !list=DISTRIBUTED_MATRIX%CMISS%list
2436  CALL matrix_linklist_get(distributed_matrix%CMISS%MATRIX,list,err,error,*999)
2437  ELSE
2438  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
2439  ENDIF
2441  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
2442  list=>distributed_matrix%PETSC%list
2443  ELSE
2444  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
2445  ENDIF
2446  CASE DEFAULT
2447  local_error="The distributed matrix library type of "// &
2448  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
2449  CALL flagerror(local_error,err,error,*999)
2450  END SELECT
2451  ELSE
2452  CALL flagerror("The distributed matrix is not finished.",err,error,*999)
2453  ENDIF
2454  ELSE
2455  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
2456  ENDIF
2457 
2458  exits("DISTRIBUTED_MATRIX_LINKLIST_GET")
2459  RETURN
2460 999 errorsexits("DISTRIBUTED_MATRIX_LINKLIST_GET",err,error)
2461  RETURN 1
2462  END SUBROUTINE distributed_matrix_linklist_get
2463 
2464  !
2465  !================================================================================================================================
2466  !
2468  SUBROUTINE distributed_matrix_output(ID,DISTRIBUTED_MATRIX,ERR,ERROR,*)
2470  !Argument variables
2471  INTEGER(INTG), INTENT(IN) :: ID
2472  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
2473  INTEGER(INTG), INTENT(OUT) :: ERR
2474  TYPE(varying_string), INTENT(OUT) :: ERROR
2475  !Local Variables
2476 ! INTEGER(INTG) :: i,NUMBER_OF_COLUMNS
2477 ! INTEGER(INTG), ALLOCATABLE :: COLUMNS(:)
2478 ! REAL(DP), ALLOCATABLE :: VALUES(:)
2479 ! CHARACTER(LEN=9) :: ROW_STRING
2480 ! CHARACTER(LEN=39) :: INITIAL_STRING
2481  TYPE(varying_string) :: LOCAL_ERROR
2482 
2483  enters("DISTRIBUTED_MATRIX_OUTPUT",err,error,*999)
2484 
2485  IF(ASSOCIATED(distributed_matrix)) THEN
2486  IF(distributed_matrix%MATRIX_FINISHED) THEN
2487  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2489  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
2490  CALL matrix_output(id,distributed_matrix%CMISS%MATRIX,err,error,*999)
2491  ELSE
2492  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
2493  ENDIF
2495  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
2496  IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX) THEN
2497  CALL petsc_matview(distributed_matrix%PETSC%OVERRIDE_MATRIX,petsc_viewer_stdout_world,err,error,*999)
2498  ELSE
2499  CALL petsc_matview(distributed_matrix%PETSC%MATRIX,petsc_viewer_stdout_world,err,error,*999)
2500  ENDIF
2501  !ALLOCATE(COLUMNS(DISTRIBUTED_MATRIX%PETSC%MAXIMUM_COLUMN_INDICES_PER_ROW),STAT=ERR)
2502  !IF(ERR/=0) CALL FlagError("Could not allocate columns.",ERR,ERROR,*999)
2503  !ALLOCATE(VALUES(DISTRIBUTED_MATRIX%PETSC%MAXIMUM_COLUMN_INDICES_PER_ROW),STAT=ERR)
2504  !IF(ERR/=0) CALL FlagError("Could not allocate values.",ERR,ERROR,*999)
2505  !DO i=1,DISTRIBUTED_MATRIX%PETSC%M
2506  ! IF(DISTRIBUTED_MATRIX%PETSC%USE_OVERRIDE_MATRIX) THEN
2507  ! CALL Petsc_MatGetRow(DISTRIBUTED_MATRIX%PETSC%OVERRIDE_MATRIX,i-1,NUMBER_OF_COLUMNS,COLUMNS,VALUES, &
2508  ! & ERR,ERROR,*999)
2509  ! ELSE
2510  ! CALL Petsc_MatGetRow(DISTRIBUTED_MATRIX%PETSC%MATRIX,i-1,NUMBER_OF_COLUMNS,COLUMNS,VALUES,ERR,ERROR,*999)
2511  ! ENDIF
2512  ! ROW_STRING=NUMBER_TO_CHARACTER(i,"I9",ERR,ERROR)
2513  ! INITIAL_STRING='("Matrix('//ROW_STRING//',:):",8(X,E13.6))'
2514  ! CALL WRITE_STRING_VECTOR(ID,1,1,NUMBER_OF_COLUMNS,8,8,VALUES,INITIAL_STRING,'(20X,8(X,E13.6))', &
2515  ! & ERR,ERROR,*999)
2516  ! IF(DISTRIBUTED_MATRIX%PETSC%USE_OVERRIDE_MATRIX) THEN
2517  ! CALL Petsc_MatRestoreRow(DISTRIBUTED_MATRIX%PETSC%OVERRIDE_MATRIX,i-1,NUMBER_OF_COLUMNS,COLUMNS,VALUES, &
2518  ! & ERR,ERROR,*999)
2519  ! ELSE
2520  ! CALL Petsc_MatRestoreRow(DISTRIBUTED_MATRIX%PETSC%MATRIX,i-1,NUMBER_OF_COLUMNS,COLUMNS,VALUES,ERR,ERROR,*999)
2521  ! ENDIF
2522  !ENDDO !i
2523  !IF(ALLOCATED(VALUES)) DEALLOCATE(VALUES)
2524  !IF(ALLOCATED(COLUMNS)) DEALLOCATE(COLUMNS)
2525  ELSE
2526  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
2527  ENDIF
2528  CASE DEFAULT
2529  local_error="The distributed matrix library type of "// &
2530  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
2531  CALL flagerror(local_error,err,error,*999)
2532  END SELECT
2533  ELSE
2534  CALL flagerror("Distributed matrix has not been finished.",err,error,*999)
2535  ENDIF
2536  ELSE
2537  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
2538  ENDIF
2539 
2540  exits("DISTRIBUTED_MATRIX_OUTPUT")
2541  RETURN
2542 !999 IF(ALLOCATED(VALUES)) DEALLOCATE(VALUES)
2543 ! IF(ALLOCATED(COLUMNS)) DEALLOCATE(COLUMNS)
2544 999 errorsexits("DISTRIBUTED_MATRIX_OUTPUT",err,error)
2545  RETURN 1
2546  END SUBROUTINE distributed_matrix_output
2547 
2548  !
2549  !================================================================================================================================
2550  !
2551 
2553  SUBROUTINE distributed_matrix_override_set_on(DISTRIBUTED_MATRIX,OVERRIDE_MATRIX,ERR,ERROR,*)
2555  !Argument variables
2556  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
2557  TYPE(petscmattype), INTENT(IN) :: OVERRIDE_MATRIX
2558  INTEGER(INTG), INTENT(OUT) :: ERR
2559  TYPE(varying_string), INTENT(OUT) :: ERROR
2560  !Local Variables
2561  TYPE(varying_string) :: LOCAL_ERROR
2562 
2563  enters("DISTRIBUTED_MATRIX_OVERRIDE_SET_ON",err,error,*999)
2564 
2565  IF(ASSOCIATED(distributed_matrix)) THEN
2566  IF(distributed_matrix%MATRIX_FINISHED) THEN
2567  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2569  CALL flagerror("Not implemented.",err,error,*999)
2571  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
2572  distributed_matrix%PETSC%USE_OVERRIDE_MATRIX=.true.
2573  distributed_matrix%PETSC%OVERRIDE_MATRIX=override_matrix
2574  ELSE
2575  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
2576  ENDIF
2577  CASE DEFAULT
2578  local_error="The distributed matrix library type of "// &
2579  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
2580  CALL flagerror(local_error,err,error,*999)
2581  END SELECT
2582  ELSE
2583  CALL flagerror("Distributed matrix has not been finished.",err,error,*999)
2584  ENDIF
2585  ELSE
2586  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
2587  ENDIF
2588 
2589  exits("DISTRIBUTED_MATRIX_OVERRIDE_SET_ON")
2590  RETURN
2591 999 errorsexits("DISTRIBUTED_MATRIX_OVERRIDE_SET_ON",err,error)
2592  RETURN 1
2593  END SUBROUTINE distributed_matrix_override_set_on
2594 
2595  !
2596  !================================================================================================================================
2597  !
2598 
2600  SUBROUTINE distributed_matrix_override_set_off(DISTRIBUTED_MATRIX,ERR,ERROR,*)
2602  !Argument variables
2603  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
2604  INTEGER(INTG), INTENT(OUT) :: ERR
2605  TYPE(varying_string), INTENT(OUT) :: ERROR
2606  !Local Variables
2607  TYPE(varying_string) :: LOCAL_ERROR
2608 
2609  enters("DISTRIBUTED_MATRIX_OVERRIDE_SET_OFF",err,error,*999)
2610 
2611  IF(ASSOCIATED(distributed_matrix)) THEN
2612  IF(distributed_matrix%MATRIX_FINISHED) THEN
2613  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2615  CALL flagerror("Not implemented.",err,error,*999)
2617  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
2618  distributed_matrix%PETSC%USE_OVERRIDE_MATRIX=.false.
2619  CALL petsc_matinitialise(distributed_matrix%PETSC%OVERRIDE_MATRIX,err,error,*999)
2620  ELSE
2621  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
2622  ENDIF
2623  CASE DEFAULT
2624  local_error="The distributed matrix library type of "// &
2625  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
2626  CALL flagerror(local_error,err,error,*999)
2627  END SELECT
2628  ELSE
2629  CALL flagerror("Distributed matrix has not been finished.",err,error,*999)
2630  ENDIF
2631  ELSE
2632  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
2633  ENDIF
2634 
2635  exits("DISTRIBUTED_MATRIX_OVERRIDE_SET_OFF")
2636  RETURN
2637 999 errorsexits("DISTRIBUTED_MATRIX_OVERRIDE_SET_OFF",err,error)
2638  RETURN 1
2640 
2641  !
2642  !================================================================================================================================
2643  !
2644 
2646  SUBROUTINE distributed_matrix_petsc_create_finish(PETSC_MATRIX,ERR,ERROR,*)
2648  !Argument variables
2649  TYPE(distributed_matrix_petsc_type), POINTER :: PETSC_MATRIX
2650  INTEGER(INTG), INTENT(OUT) :: ERR
2651  TYPE(varying_string), INTENT(OUT) :: ERROR
2652  !Local Variables
2653  INTEGER(INTG) :: DUMMY_ERR,i
2654  !INTEGER(INTG), ALLOCATABLE :: GLOBAL_NUMBERS(:)
2655  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
2656  TYPE(domain_mapping_type), POINTER :: ROW_DOMAIN_MAPPING,COLUMN_DOMAIN_MAPPING
2657  TYPE(varying_string) :: DUMMY_ERROR,LOCAL_ERROR
2658 
2659  enters("DISTRIBUTED_MATRIX_PETSC_CREATE_FINISH",err,error,*999)
2660 
2661  IF(ASSOCIATED(petsc_matrix)) THEN
2662  distributed_matrix=>petsc_matrix%DISTRIBUTED_MATRIX
2663  IF(ASSOCIATED(distributed_matrix)) THEN
2664  row_domain_mapping=>distributed_matrix%ROW_DOMAIN_MAPPING
2665  column_domain_mapping=>distributed_matrix%COLUMN_DOMAIN_MAPPING
2666  IF(ASSOCIATED(row_domain_mapping)) THEN
2667  IF(ASSOCIATED(column_domain_mapping)) THEN
2668  SELECT CASE(petsc_matrix%STORAGE_TYPE)
2670  petsc_matrix%NUMBER_NON_ZEROS=petsc_matrix%M*petsc_matrix%GLOBAL_N
2671  petsc_matrix%MAXIMUM_COLUMN_INDICES_PER_ROW=petsc_matrix%GLOBAL_N
2672  petsc_matrix%DATA_SIZE=petsc_matrix%NUMBER_NON_ZEROS
2673  !Set up the Local to Global mappings
2674  ALLOCATE(petsc_matrix%GLOBAL_ROW_NUMBERS(petsc_matrix%M),stat=err)
2675  IF(err/=0) CALL flagerror("Could not allocate global row numbers for PETSc distributed matrix.",err,error,*999)
2676  DO i=1,petsc_matrix%M
2677  petsc_matrix%GLOBAL_ROW_NUMBERS(i)=row_domain_mapping%LOCAL_TO_GLOBAL_MAP(i)-1 !PETSc uses 0 based indexing
2678  ENDDO !i
2679  !Set up the matrix
2680  ALLOCATE(petsc_matrix%DATA_DP(petsc_matrix%DATA_SIZE),stat=err)
2681  IF(err/=0) CALL flagerror("Could not allocate PETSc matrix data.",err,error,*999)
2682  CALL petsc_matcreatedense(computational_environment%MPI_COMM,petsc_matrix%M,petsc_matrix%N, &
2683  & petsc_matrix%GLOBAL_M,petsc_matrix%GLOBAL_N,petsc_matrix%DATA_DP,petsc_matrix%MATRIX,err,error,*999)
2685  petsc_matrix%NUMBER_NON_ZEROS=petsc_matrix%M
2686  petsc_matrix%MAXIMUM_COLUMN_INDICES_PER_ROW=1
2687  petsc_matrix%DATA_SIZE=petsc_matrix%NUMBER_NON_ZEROS
2688  !Set up the Local to Global mappings
2689  ALLOCATE(petsc_matrix%GLOBAL_ROW_NUMBERS(petsc_matrix%M),stat=err)
2690  IF(err/=0) CALL flagerror("Could not allocate global row numbers for PETSc distributed matrix.",err,error,*999)
2691  DO i=1,petsc_matrix%M
2692  petsc_matrix%GLOBAL_ROW_NUMBERS(i)=row_domain_mapping%LOCAL_TO_GLOBAL_MAP(i)-1 !PETSc uses 0 based indexing
2693  ENDDO !i
2694  !Set up the matrix
2695  ALLOCATE(petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS(petsc_matrix%N),stat=err)
2696  IF(err/=0) CALL flagerror("Could not allocate diagonal number of non zeros.",err,error,*999)
2697  ALLOCATE(petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS(petsc_matrix%N),stat=err)
2698  IF(err/=0) CALL flagerror("Could not allocate off diagonal number of non zeros.",err,error,*999)
2699  petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS=1
2700  petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS=0
2701  !Create the PETsc AIJ matrix
2702  CALL petsc_matcreateaij(computational_environment%MPI_COMM,petsc_matrix%M,petsc_matrix%N, &
2703  & petsc_matrix%GLOBAL_M,petsc_matrix%GLOBAL_N,petsc_null_integer,petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS, &
2704  & petsc_null_integer,petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS,petsc_matrix%MATRIX,err,error,*999)
2706  CALL flagerror("Column major storage is not implemented for PETSc matrices.",err,error,*999)
2708  CALL flagerror("Row major storage is not implemented for PETSc matrices.",err,error,*999)
2710  IF(ALLOCATED(petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS)) THEN
2711  IF(ALLOCATED(petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS)) THEN
2712  !Create the PETSc AIJ matrix
2713  CALL petsc_matcreateaij(computational_environment%MPI_COMM,petsc_matrix%M,petsc_matrix%N, &
2714  & petsc_matrix%GLOBAL_M,petsc_matrix%GLOBAL_N,petsc_null_integer,petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS, &
2715  & petsc_null_integer,petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS,petsc_matrix%MATRIX,err,error,*999)
2716  !Set matrix options
2717  CALL petsc_matsetoption(petsc_matrix%MATRIX,petsc_mat_new_nonzero_location_err,.true.,err,error,*999)
2718  CALL petsc_matsetoption(petsc_matrix%MATRIX,petsc_mat_new_nonzero_allocation_err,.true.,err,error,*999)
2719  CALL petsc_matsetoption(petsc_matrix%MATRIX,petsc_mat_unused_nonzero_location_err,.true.,err,error,*999)
2720  !Set up the Local to Global mappings
2721  ALLOCATE(petsc_matrix%GLOBAL_ROW_NUMBERS(petsc_matrix%M),stat=err)
2722  IF(err/=0) CALL flagerror("Could not allocate global row numbers for PETSc distributed matrix.",err,error,*999)
2723  petsc_matrix%MAXIMUM_COLUMN_INDICES_PER_ROW=0
2724  DO i=1,petsc_matrix%M
2725  petsc_matrix%GLOBAL_ROW_NUMBERS(i)=row_domain_mapping%LOCAL_TO_GLOBAL_MAP(i)-1 !PETSc uses 0 based indexing
2726  IF((petsc_matrix%ROW_INDICES(i+1)-petsc_matrix%ROW_INDICES(i))>petsc_matrix%MAXIMUM_COLUMN_INDICES_PER_ROW) &
2727  & petsc_matrix%MAXIMUM_COLUMN_INDICES_PER_ROW=petsc_matrix%ROW_INDICES(i+1)-petsc_matrix%ROW_INDICES(i)
2728  ENDDO !i
2729  ELSE
2730  CALL flagerror("Matrix off diagonal storage locations have not been set.",err,error,*999)
2731  ENDIF
2732  ELSE
2733  CALL flagerror("Matrix diagonal storage locations have not been set.",err,error,*999)
2734  ENDIF
2736  CALL flagerror("Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
2738  CALL flagerror("Row column storage is not implemented for PETSc matrices.",err,error,*999)
2739  CASE DEFAULT
2740  local_error="The PETSc matrix storage type of "//trim(numbertovstring(petsc_matrix%STORAGE_TYPE,"*",err,error))// &
2741  & " is invalid."
2742  CALL flagerror(local_error,err,error,*999)
2743  END SELECT
2744  ELSE
2745  CALL flagerror("PETSc matrix distributed matrix column domain mapping is not associated.",err,error,*999)
2746  ENDIF
2747  ELSE
2748  CALL flagerror("PETSc matrix distributed matrix row domain mapping is not associated.",err,error,*999)
2749  ENDIF
2750  ENDIF
2751  ELSE
2752  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*998)
2753  ENDIF
2754 
2755  exits("DISTRIBUTED_MATRIX_PETSC_CREATE_FINISH")
2756  RETURN
2757 999 CALL distributed_matrix_petsc_finalise(petsc_matrix,dummy_err,dummy_error,*998)
2758 998 errorsexits("DISTRIBUTED_MATRIX_PETSC_CREATE_FINISH",err,error)
2759  RETURN 1
2761 
2762  !
2763  !================================================================================================================================
2764  !
2765 
2767  SUBROUTINE distributed_matrix_petsc_finalise(PETSC_MATRIX,ERR,ERROR,*)
2769  !Argument variables
2770  TYPE(distributed_matrix_petsc_type), POINTER :: PETSC_MATRIX
2771  INTEGER(INTG), INTENT(OUT) :: ERR
2772  TYPE(varying_string), INTENT(OUT) :: ERROR
2773  !Local Variables
2774 
2775  enters("DISTRIBUTED_MATRIX_PETSC_FINALISE",err,error,*999)
2776 
2777  IF(ASSOCIATED(petsc_matrix)) THEN
2778  IF(ALLOCATED(petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS)) DEALLOCATE(petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS)
2779  IF(ALLOCATED(petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS)) DEALLOCATE(petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS)
2780  IF(ALLOCATED(petsc_matrix%ROW_INDICES)) DEALLOCATE(petsc_matrix%ROW_INDICES)
2781  IF(ALLOCATED(petsc_matrix%COLUMN_INDICES)) DEALLOCATE(petsc_matrix%COLUMN_INDICES)
2782  IF(ALLOCATED(petsc_matrix%GLOBAL_ROW_NUMBERS)) DEALLOCATE(petsc_matrix%GLOBAL_ROW_NUMBERS)
2783  !IF(ALLOCATED(PETSC_MATRIX%DATA_DP)) DEALLOCATE(PETSC_MATRIX%DATA_DP)
2784  CALL petsc_matfinalise(petsc_matrix%MATRIX,err,error,*999)
2785  CALL petsc_matfinalise(petsc_matrix%OVERRIDE_MATRIX,err,error,*999)
2786  DEALLOCATE(petsc_matrix)
2787  ENDIF
2788 
2789  exits("DISTRIBUTED_MATRIX_PETSC_FINALSE")
2790  RETURN
2791 999 errorsexits("DISTRIBUTED_MATRIX_PETSC_FINALISE",err,error)
2792  RETURN 1
2793  END SUBROUTINE distributed_matrix_petsc_finalise
2794 
2795  !
2796  !================================================================================================================================
2797  !
2798 
2800  SUBROUTINE distributed_matrix_petsc_initialise(DISTRIBUTED_MATRIX,ERR,ERROR,*)
2802  !Argument variables
2803  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
2804  INTEGER(INTG), INTENT(OUT) :: ERR
2805  TYPE(varying_string), INTENT(OUT) :: ERROR
2806  !Local Variables
2807  INTEGER(INTG) :: DUMMY_ERR
2808  TYPE(domain_mapping_type), POINTER :: ROW_DOMAIN_MAPPING,COLUMN_DOMAIN_MAPPING
2809  TYPE(varying_string) :: DUMMY_ERROR,LOCAL_ERROR
2810 
2811  enters("DISTRIBUTED_MATRIX_PETSC_INITIALISE",err,error,*998)
2812 
2813  IF(ASSOCIATED(distributed_matrix)) THEN
2814  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
2815  CALL flagerror("PETSc is already associated for this distributed matrix",err,error,*998)
2816  ELSE
2817  row_domain_mapping=>distributed_matrix%ROW_DOMAIN_MAPPING
2818  column_domain_mapping=>distributed_matrix%COLUMN_DOMAIN_MAPPING
2819  IF(ASSOCIATED(row_domain_mapping)) THEN
2820  IF(ASSOCIATED(column_domain_mapping)) THEN
2821  ALLOCATE(distributed_matrix%PETSC,stat=err)
2822  IF(err/=0) CALL flagerror("Could not allocate PETSc distributed matrix.",err,error,*999)
2823  distributed_matrix%PETSC%DISTRIBUTED_MATRIX=>distributed_matrix
2824  distributed_matrix%LIBRARY_TYPE=distributed_matrix_vector_petsc_type
2825  !Set the defaults
2826  SELECT CASE(distributed_matrix%GHOSTING_TYPE)
2828  distributed_matrix%PETSC%M=row_domain_mapping%TOTAL_NUMBER_OF_LOCAL
2830  distributed_matrix%PETSC%M=row_domain_mapping%NUMBER_OF_LOCAL
2831  CASE DEFAULT
2832  local_error="The distributed matrix ghosting type of "// &
2833  & trim(numbertovstring(distributed_matrix%GHOSTING_TYPE,"*",err,error))//" is invalid."
2834  CALL flagerror(local_error,err,error,*999)
2835  END SELECT
2836  distributed_matrix%PETSC%N=column_domain_mapping%TOTAL_NUMBER_OF_LOCAL
2837  distributed_matrix%PETSC%GLOBAL_M=row_domain_mapping%NUMBER_OF_GLOBAL
2838  distributed_matrix%PETSC%GLOBAL_N=column_domain_mapping%NUMBER_OF_GLOBAL
2839  distributed_matrix%PETSC%STORAGE_TYPE=distributed_matrix_compressed_row_storage_type
2840  distributed_matrix%PETSC%DATA_SIZE=0
2841  distributed_matrix%PETSC%MAXIMUM_COLUMN_INDICES_PER_ROW=0
2842  distributed_matrix%PETSC%USE_OVERRIDE_MATRIX=.false.
2843  CALL petsc_matinitialise(distributed_matrix%PETSC%MATRIX,err,error,*999)
2844  CALL petsc_matinitialise(distributed_matrix%PETSC%OVERRIDE_MATRIX,err,error,*999)
2845  ELSE
2846  CALL flagerror("Distributed matrix column domain mapping is not associated.",err,error,*998)
2847  ENDIF
2848  ELSE
2849  CALL flagerror("Distributed matrix row domain mapping is not associated.",err,error,*998)
2850  ENDIF
2851  ENDIF
2852  ELSE
2853  CALL flagerror("Distributed matrix is not associated.",err,error,*998)
2854  ENDIF
2855 
2856  exits("DISTRIBUTED_MATRIX_PETSC_INITIALSE")
2857  RETURN
2858 999 IF(ASSOCIATED(distributed_matrix%PETSC)) &
2859  & CALL distributed_matrix_petsc_finalise(distributed_matrix%PETSC,dummy_err,dummy_error,*998)
2860 998 errorsexits("DISTRIBUTED_MATRIX_PETSC_INITIALISE",err,error)
2861  RETURN 1
2863 
2864  !
2865  !================================================================================================================================
2866  !
2867 
2869  SUBROUTINE distributed_matrix_storage_locations_get(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,ERR,ERROR,*)
2871  !Argument variables
2872  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
2873  INTEGER(INTG), POINTER :: ROW_INDICES(:)
2874  INTEGER(INTG), POINTER :: COLUMN_INDICES(:)
2875  INTEGER(INTG), INTENT(OUT) :: ERR
2876  TYPE(varying_string), INTENT(OUT) :: ERROR
2877  !Local Variables
2878  TYPE(distributed_matrix_cmiss_type), POINTER :: CMISS_MATRIX
2879  TYPE(distributed_matrix_petsc_type), POINTER :: PETSC_MATRIX
2880  TYPE(varying_string) :: LOCAL_ERROR
2881 
2882  enters("DISTRIBUTED_MATRIX_STORAGE_LOCATIONS_GET",err,error,*999)
2883 
2884  IF(ASSOCIATED(distributed_matrix)) THEN
2885  IF(distributed_matrix%MATRIX_FINISHED) THEN
2886  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2888  cmiss_matrix=>distributed_matrix%CMISS
2889  IF(ASSOCIATED(cmiss_matrix)) THEN
2890  CALL matrix_storage_locations_get(cmiss_matrix%MATRIX,row_indices,column_indices,err,error,*999)
2891  ELSE
2892  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
2893  ENDIF
2895  petsc_matrix=>distributed_matrix%PETSC
2896  IF(ASSOCIATED(petsc_matrix)) THEN
2897  SELECT CASE(petsc_matrix%STORAGE_TYPE)
2899  CALL flagerror("Cannot get matrix locations for a block storage matrix.",err,error,*999)
2901  CALL flagerror("Diagonal storage is not implemented for PETSc matrices.",err,error,*999)
2903  CALL flagerror("Column major storage is not implemented for PETSc matrices.",err,error,*999)
2905  CALL flagerror("Row major storage is not implemented for PETSc matrices.",err,error,*999)
2907  row_indices=>distributed_matrix%PETSC%ROW_INDICES
2908  column_indices=>distributed_matrix%PETSC%COLUMN_INDICES
2910  CALL flagerror("Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
2912  CALL flagerror("Row column storage is not implemented for PETSc matrices.",err,error,*999)
2913  CASE DEFAULT
2914  local_error="The matrix storage type of "// &
2915  & trim(numbertovstring(petsc_matrix%STORAGE_TYPE,"*",err,error))//" is invalid."
2916  CALL flagerror(local_error,err,error,*999)
2917  END SELECT
2918  ELSE
2919  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
2920  ENDIF
2921  CASE DEFAULT
2922  local_error="The distributed matrix library type of "// &
2923  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
2924  CALL flagerror(local_error,err,error,*999)
2925  END SELECT
2926  ELSE
2927  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
2928  ENDIF
2929  ELSE
2930  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
2931  ENDIF
2932 
2933  exits("DISTRIBUTED_MATRIX_STORAGE_LOCATIONS_GET")
2934  RETURN
2935 999 errorsexits("DISTRIBUTED_MATRIX_STORAGE_LOCATIONS_GET",err,error)
2936  RETURN 1
2938 
2939  !
2940  !================================================================================================================================
2941  !
2942 
2944  SUBROUTINE distributed_matrix_storage_locations_set(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,ERR,ERROR,*)
2946  !Argument variables
2947  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
2948  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
2949  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
2950  INTEGER(INTG), INTENT(OUT) :: ERR
2951  TYPE(varying_string), INTENT(OUT) :: ERROR
2952  !Local Variables
2953  INTEGER(INTG) :: i,j,k,global_row_start,global_row_finish
2954  TYPE(distributed_matrix_cmiss_type), POINTER :: CMISS_MATRIX
2955  TYPE(distributed_matrix_petsc_type), POINTER :: PETSC_MATRIX
2956  TYPE(domain_mapping_type), POINTER :: ROW_DOMAIN_MAPPING,COLUMN_DOMAIN_MAPPING
2957  TYPE(varying_string) :: LOCAL_ERROR
2958 
2959  NULLIFY(cmiss_matrix)
2960  NULLIFY(petsc_matrix)
2961 
2962  enters("DISTRIBUTED_MATRIX_STORAGE_LOCATIONS_SET",err,error,*999)
2963 
2964  IF(ASSOCIATED(distributed_matrix)) THEN
2965  IF(distributed_matrix%MATRIX_FINISHED) THEN
2966  CALL flagerror("The distributed matrix has been finished.",err,error,*999)
2967  ELSE
2968  row_domain_mapping=>distributed_matrix%ROW_DOMAIN_MAPPING
2969  column_domain_mapping=>distributed_matrix%COLUMN_DOMAIN_MAPPING
2970  IF(ASSOCIATED(row_domain_mapping)) THEN
2971  IF(ASSOCIATED(column_domain_mapping)) THEN
2972  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2974  cmiss_matrix=>distributed_matrix%CMISS
2975  IF(ASSOCIATED(cmiss_matrix)) THEN
2976  CALL matrix_storage_locations_set(cmiss_matrix%MATRIX,row_indices,column_indices,err,error,*999)
2977  ELSE
2978  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
2979  ENDIF
2981  petsc_matrix=>distributed_matrix%PETSC
2982  IF(ASSOCIATED(petsc_matrix)) THEN
2983  SELECT CASE(petsc_matrix%STORAGE_TYPE)
2985  !Do nothing
2987  CALL flagerror("Diagonal storage is not implemented for PETSc matrices.",err,error,*999)
2989  CALL flagerror("Column major storage is not implemented for PETSc matrices.",err,error,*999)
2991  CALL flagerror("Row major storage is not implemented for PETSc matrices.",err,error,*999)
2993  IF(SIZE(row_indices,1)==petsc_matrix%M+1) THEN
2994  IF(SIZE(column_indices,1)==petsc_matrix%NUMBER_NON_ZEROS) THEN
2995  IF(row_indices(1)==1) THEN
2996  !Check the row indicies are correct
2997  IF(row_indices(petsc_matrix%M+1)==petsc_matrix%NUMBER_NON_ZEROS+1) THEN
2998  DO i=2,petsc_matrix%M+1
2999  IF(row_indices(i)<row_indices(i-1)) THEN
3000  local_error="Invalid row indices. Row "//trim(numbertovstring(i,"*",err,error))// &
3001  & " index number ("//trim(numbertovstring(row_indices(i),"*",err,error))//" &
3002  & ) is less than row "//trim(numbertovstring(i-1,"*",err,error))//" index number ("// &
3003  & trim(numbertovstring(row_indices(i-1),"*",err,error))//")."
3004  CALL flagerror(local_error,err,error,*999)
3005  ENDIF
3006  ENDDO !i
3007  !Allocate the PETSc sparsity storage arrays
3008  ALLOCATE(petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS(petsc_matrix%M),stat=err)
3009  IF(err/=0) CALL flagerror("Could not allocate PETSc matrix diagonal number of non zeros.",err,error,*999)
3010  petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS=0
3011  ALLOCATE(petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS(petsc_matrix%M),stat=err)
3012  IF(err/=0) CALL flagerror("Could not allocate PETSc matrix off diagonal number of non zeros.", &
3013  & err,error,*999)
3014  petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS=0
3015  ALLOCATE(petsc_matrix%ROW_INDICES(petsc_matrix%M+1),stat=err)
3016  IF(err/=0) CALL flagerror("Could not allocate PETSc matrix row indices.",err,error,*999)
3017  petsc_matrix%ROW_INDICES(1:petsc_matrix%M+1)=row_indices(1:petsc_matrix%M+1)
3018  ALLOCATE(petsc_matrix%COLUMN_INDICES(petsc_matrix%NUMBER_NON_ZEROS),stat=err)
3019  IF(err/=0) CALL flagerror("Could not allocate PETSc matrix column indices.",err,error,*999)
3020  petsc_matrix%COLUMN_INDICES(1:petsc_matrix%NUMBER_NON_ZEROS)= &
3021  & column_indices(1:petsc_matrix%NUMBER_NON_ZEROS)
3022  !Check the column indices are correct and calculate number of diagonal and off-diagonal columns
3023  global_row_start=row_domain_mapping%LOCAL_TO_GLOBAL_MAP(1)
3024  global_row_finish=row_domain_mapping%LOCAL_TO_GLOBAL_MAP(petsc_matrix%M)
3025  DO i=1,petsc_matrix%M
3026  DO j=row_indices(i),row_indices(i+1)-1
3027  k=column_indices(j)
3028  IF(k>0) THEN
3029  IF(k>petsc_matrix%GLOBAL_N) THEN
3030  local_error="Invalid column indices. Column index "//trim(numbertovstring(j,"*",err,error))// &
3031  & " ("//trim(numbertovstring(k,"*",err,error))// &
3032  & ") is greater than the number of columns ("// &
3033  & trim(numbertovstring(petsc_matrix%GLOBAL_N,"*",err,error))//")."
3034  CALL flagerror(local_error,err,error,*999)
3035  ENDIF
3036  IF(k>=global_row_start.AND.k<=global_row_finish) THEN
3037  petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS(i)=petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS(i)+1
3038  ELSE
3039  petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS(i)=petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS(i)+1
3040  ENDIF
3041  ELSE
3042  local_error="Invalid column indices. Column index "//trim(numbertovstring(j,"*",err,error))// &
3043  & " ("//trim(numbertovstring(k,"*",err,error))//") is less than zero."
3044  CALL flagerror(local_error,err,error,*999)
3045  ENDIF
3046  ENDDO !j
3047  !Enforce a place for the diagonal entry.
3048  IF(petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS(i)==0) petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS(i)=1
3049  ENDDO !i
3050  IF(diagnostics3) THEN
3051  CALL write_string(diagnostic_output_type,"PETSc distributed matrix sparsity:",err,error,*999)
3052  CALL write_string_value(diagnostic_output_type," Storage type = ",petsc_matrix%STORAGE_TYPE, &
3053  & err,error,*999)
3054  CALL write_string_value(diagnostic_output_type," M = ",petsc_matrix%M,err,error,*999)
3055  CALL write_string_value(diagnostic_output_type," N = ",petsc_matrix%N,err,error,*999)
3056  CALL write_string_value(diagnostic_output_type," Global M = ",petsc_matrix%GLOBAL_M,err,error,*999)
3057  CALL write_string_value(diagnostic_output_type," Global N = ",petsc_matrix%GLOBAL_N,err,error,*999)
3058  CALL write_string_value(diagnostic_output_type," Number of non zeros = ",petsc_matrix% &
3059  & number_non_zeros,err,error,*999)
3060  CALL write_string_vector(diagnostic_output_type,1,1,petsc_matrix%M,8,8,petsc_matrix%&
3061  & diagonal_number_non_zeros,'(" Diagonal number non zeros :",8(X,I10))','(33X,8(X,I10))', &
3062  & err,error,*999)
3063  CALL write_string_vector(diagnostic_output_type,1,1,petsc_matrix%M,8,8,petsc_matrix%&
3064  & offdiagonal_number_non_zeros,'(" Off-diagonal number non zeros :",8(X,I10))','(33X,8(X,I10))', &
3065  & err,error,*999)
3066  CALL write_string_vector(diagnostic_output_type,1,1,petsc_matrix%M+1,8,8,petsc_matrix%&
3067  & row_indices,'(" Row indices :",8(X,I10))','(33X,8(X,I10))', &
3068  & err,error,*999)
3069  CALL write_string_vector(diagnostic_output_type,1,1,petsc_matrix%NUMBER_NON_ZEROS,8,8,petsc_matrix%&
3070  & column_indices,'(" Column indices :",8(X,I10))','(33X,8(X,I10))', &
3071  & err,error,*999)
3072  ENDIF
3073  ELSE
3074  local_error="Invalid row indices. The last row index ("// &
3075  & trim(numbertovstring(row_indices(petsc_matrix%M+1),"*",err,error))// &
3076  & ") does not equal the number of non-zeros + 1 ("// &
3077  & trim(numbertovstring(petsc_matrix%NUMBER_NON_ZEROS+1,"*",err,error))//")."
3078  CALL flagerror(local_error,err,error,*999)
3079  ENDIF
3080  ELSE
3081  local_error="Invalid row indices. The first row index ("// &
3082  & trim(numbertovstring(row_indices(1),"*",err,error))//") does not equal 1."
3083  CALL flagerror(local_error,err,error,*999)
3084  ENDIF
3085  ELSE
3086  local_error="The supplied number of column indices ("// &
3087  & trim(numbertovstring(SIZE(column_indices,1),"*",err,error))// &
3088  & ") does not match the number of non-zeros in the matrix ("// &
3089  & trim(numbertovstring(petsc_matrix%NUMBER_NON_ZEROS,"*",err,error))//")."
3090  CALL flagerror(local_error,err,error,*999)
3091  ENDIF
3092  ELSE
3093  local_error="The supplied number of row indices ("// &
3094  & trim(numbertovstring(SIZE(row_indices,1),"*",err,error))// &
3095  & ") does not match the number of rows in the matrix + 1 ("// &
3096  & trim(numbertovstring(petsc_matrix%M+1,"*",err,error))//")."
3097  CALL flagerror(local_error,err,error,*999)
3098  ENDIF
3100  CALL flagerror("Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
3102  CALL flagerror("Row column storage is not implemented for PETSc matrices.",err,error,*999)
3103  CASE DEFAULT
3104  local_error="The specified matrix storage type of "// &
3105  & trim(numbertovstring(petsc_matrix%STORAGE_TYPE,"*",err,error))//" is invalid."
3106  CALL flagerror(local_error,err,error,*999)
3107  END SELECT
3108  ELSE
3109  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
3110  ENDIF
3111  CASE DEFAULT
3112  local_error="The distributed matrix library type of "// &
3113  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
3114  CALL flagerror(local_error,err,error,*999)
3115  END SELECT
3116  ELSE
3117  CALL flagerror("Distributed matrix column domain mapping is not associated.",err,error,*999)
3118  ENDIF
3119  ELSE
3120  CALL flagerror("Distributed matrix row domain mapping is not associated.",err,error,*999)
3121  ENDIF
3122  ENDIF
3123  ELSE
3124  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
3125  ENDIF
3126 
3127  exits("DISTRIBUTED_MATRIX_STORAGE_LOCATIONS_SET")
3128  RETURN
3129 999 IF(ASSOCIATED(petsc_matrix)) THEN
3130  IF(ALLOCATED(petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS)) DEALLOCATE(petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS)
3131  IF(ALLOCATED(petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS)) DEALLOCATE(petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS)
3132  ENDIF
3133  errorsexits("DISTRIBUTED_MATRIX_STORAGE_LOCATIONS_SET",err,error)
3134  RETURN 1
3136 
3137  !
3138  !================================================================================================================================
3139  !
3140 
3142  SUBROUTINE distributed_matrix_storage_type_get(DISTRIBUTED_MATRIX,STORAGE_TYPE,ERR,ERROR,*)
3144  !Argument variables
3145  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
3146  INTEGER(INTG), INTENT(OUT) :: STORAGE_TYPE
3147  INTEGER(INTG), INTENT(OUT) :: ERR
3148  TYPE(varying_string), INTENT(OUT) :: ERROR
3149  !Local Variables
3150  TYPE(varying_string) :: LOCAL_ERROR
3151 
3152  enters("DISTRIBUTED_MATRIX_STORAGE_TYPE_GET",err,error,*999)
3153 
3154  IF(ASSOCIATED(distributed_matrix)) THEN
3155  IF(distributed_matrix%MATRIX_FINISHED) THEN
3156  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3158  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
3159  CALL matrix_storage_type_get(distributed_matrix%CMISS%MATRIX,storage_type,err,error,*999)
3160  ELSE
3161  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
3162  ENDIF
3164  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
3165  storage_type=distributed_matrix%PETSC%STORAGE_TYPE
3166  ELSE
3167  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
3168  ENDIF
3169  CASE DEFAULT
3170  local_error="The distributed matrix library type of "// &
3171  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
3172  CALL flagerror(local_error,err,error,*999)
3173  END SELECT
3174  ELSE
3175  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
3176  ENDIF
3177  ELSE
3178  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
3179  ENDIF
3180 
3181  exits("DISTRIBUTED_MATRIX_STORAGE_TYPE_GET")
3182  RETURN
3183 999 errorsexits("DISTRIBUTED_MATRIX_STORAGE_TYPE_GET",err,error)
3184  RETURN 1
3186 
3187  !
3188  !================================================================================================================================
3189  !
3190 
3192  SUBROUTINE distributed_matrix_storage_type_set(DISTRIBUTED_MATRIX,STORAGE_TYPE,ERR,ERROR,*)
3194  !Argument variables
3195  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
3196  INTEGER(INTG), INTENT(IN) :: STORAGE_TYPE
3197  INTEGER(INTG), INTENT(OUT) :: ERR
3198  TYPE(varying_string), INTENT(OUT) :: ERROR
3199  !Local Variables
3200  TYPE(varying_string) :: LOCAL_ERROR
3201 
3202  enters("DISTRIBUTED_MATRIX_STORAGE_TYPE_SET",err,error,*999)
3203 
3204  IF(ASSOCIATED(distributed_matrix)) THEN
3205  IF(distributed_matrix%MATRIX_FINISHED) THEN
3206  CALL flagerror("The distributed matrix has been finished.",err,error,*999)
3207  ELSE
3208  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3210  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
3211  CALL matrix_storage_type_set(distributed_matrix%CMISS%MATRIX,storage_type,err,error,*999)
3212  ELSE
3213  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
3214  ENDIF
3216  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
3217  SELECT CASE(storage_type)
3219  distributed_matrix%PETSC%STORAGE_TYPE=distributed_matrix_block_storage_type
3221  distributed_matrix%PETSC%STORAGE_TYPE=distributed_matrix_diagonal_storage_type
3223  CALL flagerror("Column major storage is not implemented for PETSc matrices.",err,error,*999)
3225  CALL flagerror("Row major storage is not implemented for PETSc matrices.",err,error,*999)
3227  distributed_matrix%PETSC%STORAGE_TYPE=distributed_matrix_compressed_row_storage_type
3229  CALL flagerror("Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
3231  CALL flagerror("Row column storage is not implemented for PETSc matrices.",err,error,*999)
3232  CASE DEFAULT
3233  local_error="The specified matrix storage type of "//trim(numbertovstring(storage_type,"*",err,error))// &
3234  & " is invalid."
3235  CALL flagerror(local_error,err,error,*999)
3236  END SELECT
3237  ELSE
3238  CALL flagerror("Distributed matrix PETSc is not implemented.",err,error,*999)
3239  ENDIF
3240  CASE DEFAULT
3241  local_error="The distributed matrix library type of "// &
3242  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
3243  CALL flagerror(local_error,err,error,*999)
3244  END SELECT
3245  ENDIF
3246  ELSE
3247  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
3248  ENDIF
3249 
3250  exits("DISTRIBUTED_MATRIX_STORAGE_TYPE_SET")
3251  RETURN
3252 999 errorsexits("DISTRIBUTED_MATRIX_STORAGE_TYPE_SET",err,error)
3253  RETURN 1
3255 
3256  !
3257  !================================================================================================================================
3258  !
3259 
3261  SUBROUTINE distributed_matrix_update_finish(DISTRIBUTED_MATRIX,ERR,ERROR,*)
3263  !Argument variables
3264  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
3265  INTEGER(INTG), INTENT(OUT) :: ERR
3266  TYPE(varying_string), INTENT(OUT) :: ERROR
3267  !Local Variables
3268  TYPE(varying_string) :: LOCAL_ERROR
3269 
3270  enters("DISTRIBUTED_MATRIX_UPDATE_FINISH",err,error,*999)
3271 
3272  IF(ASSOCIATED(distributed_matrix)) THEN
3273  IF(distributed_matrix%MATRIX_FINISHED) THEN
3274  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3276  !Do nothing for now.
3278  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
3279  IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX) THEN
3280  CALL petsc_matassemblyend(distributed_matrix%PETSC%OVERRIDE_MATRIX,petsc_mat_final_assembly,err,error,*999)
3281  ELSE
3282  CALL petsc_matassemblyend(distributed_matrix%PETSC%MATRIX,petsc_mat_final_assembly,err,error,*999)
3283  ENDIF
3284  ELSE
3285  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
3286  ENDIF
3287  CASE DEFAULT
3288  local_error="The distributed matrix library type of "// &
3289  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
3290  CALL flagerror(local_error,err,error,*999)
3291  END SELECT
3292  ELSE
3293  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
3294  ENDIF
3295  ELSE
3296  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
3297  ENDIF
3298 
3299  exits("DISTRIBUTED_MATRIX_UPDATE_FINISH")
3300  RETURN
3301 999 errorsexits("DISTRIBUTED_MATRIX_UPDATE_FINISH",err,error)
3302  RETURN 1
3303  END SUBROUTINE distributed_matrix_update_finish
3304 
3305  !
3306  !================================================================================================================================
3307  !
3308 
3310  SUBROUTINE distributed_matrix_update_isfinished(DISTRIBUTED_MATRIX,ISFINISHED,ERR,ERROR,*)
3312  !Argument variables
3313  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
3314  LOGICAL, INTENT(OUT) :: ISFINISHED
3315  INTEGER(INTG), INTENT(OUT) :: ERR
3316  TYPE(varying_string), INTENT(OUT) :: ERROR
3317  !Local Variables
3318 
3319  enters("DISTRIBUTED_MATRIX_UPDATE_ISFINISHED",err,error,*999)
3320 
3321  isfinished=.false.
3322  IF(ASSOCIATED(distributed_matrix)) THEN
3323  IF(distributed_matrix%MATRIX_FINISHED) THEN
3324  !Do nothting for now.
3325  isfinished=.true.
3326  ELSE
3327  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
3328  ENDIF
3329  ELSE
3330  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
3331  ENDIF
3332 
3333  exits("DISTRIBUTED_MATRIX_UPDATE_ISFINISHED")
3334  RETURN
3335 999 errorsexits("DISTRIBUTED_MATRIX_UPDATE_ISFINISHED",err,error)
3336  RETURN 1
3338 
3339  !
3340  !================================================================================================================================
3341  !
3342 
3344  SUBROUTINE distributed_matrix_update_waitfinished(DISTRIBUTED_MATRIX,ERR,ERROR,*)
3346  !Argument variables
3347  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
3348  INTEGER(INTG), INTENT(OUT) :: ERR
3349  TYPE(varying_string), INTENT(OUT) :: ERROR
3350  !Local Variables
3351 
3352  enters("DISTRIBUTED_MATRIX_UPDATE_WAITFINISHED",err,error,*999)
3353 
3354  IF(ASSOCIATED(distributed_matrix)) THEN
3355  IF(distributed_matrix%MATRIX_FINISHED) THEN
3356  !Do nothing for now.
3357  ELSE
3358  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
3359  ENDIF
3360  ELSE
3361  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
3362  ENDIF
3363 
3364  exits("DISTRIBUTED_MATRIX_UPDATE_WAITFINISHED")
3365  RETURN
3366 999 errorsexits("DISTRIBUTED_MATRIX_UPDATE_WAITFINISHED",err,error)
3367  RETURN 1
3369 
3370  !
3371  !================================================================================================================================
3372  !
3373 
3375  SUBROUTINE distributed_matrix_update_start(DISTRIBUTED_MATRIX,ERR,ERROR,*)
3377  !Argument variables
3378  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
3379  INTEGER(INTG), INTENT(OUT) :: ERR
3380  TYPE(varying_string), INTENT(OUT) :: ERROR
3381  !Local Variables
3382  TYPE(varying_string) :: LOCAL_ERROR
3383 
3384  enters("DISTRIBUTED_MATRIX_UPDATE_START",err,error,*999)
3385 
3386  IF(ASSOCIATED(distributed_matrix)) THEN
3387  IF(distributed_matrix%MATRIX_FINISHED) THEN
3388  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3390  !Do nothing for now.
3392  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
3393  IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX) THEN
3394  CALL petsc_matassemblybegin(distributed_matrix%PETSC%OVERRIDE_MATRIX,petsc_mat_final_assembly,err,error,*999)
3395  ELSE
3396  CALL petsc_matassemblybegin(distributed_matrix%PETSC%MATRIX,petsc_mat_final_assembly,err,error,*999)
3397  ENDIF
3398  ELSE
3399  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
3400  ENDIF
3401  CASE DEFAULT
3402  local_error="The distributed matrix library type of "// &
3403  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
3404  CALL flagerror(local_error,err,error,*999)
3405  END SELECT
3406  ELSE
3407  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
3408  ENDIF
3409  ELSE
3410  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
3411  ENDIF
3412 
3413  exits("DISTRIBUTED_MATRIX_UPDATE_START")
3414  RETURN
3415 999 errorsexits("DISTRIBUTED_MATRIX_UPDATE_START",err,error)
3416  RETURN 1
3417  END SUBROUTINE distributed_matrix_update_start
3418 
3419  !
3420  !================================================================================================================================
3421  !
3422 
3424  SUBROUTINE distributed_matrix_values_add_intg(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
3426  !Argument variables
3427  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
3428  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
3429  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
3430  INTEGER(INTG), INTENT(IN) :: VALUES(:)
3431  INTEGER(INTG), INTENT(OUT) :: ERR
3432  TYPE(varying_string), INTENT(OUT) :: ERROR
3433  !Local Variables
3434  TYPE(varying_string) :: LOCAL_ERROR
3435 
3436  enters("DISTRIBUTED_MATRIX_VALUES_ADD_INTG",err,error,*999)
3437 
3438  IF(ASSOCIATED(distributed_matrix)) THEN
3439  IF(distributed_matrix%MATRIX_FINISHED) THEN
3440  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3442  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
3443  CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
3444  ELSE
3445  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
3446  ENDIF
3448  CALL flagerror("Adding values to an integer PETSc distributed matrix is not implemented.",err,error,*999)
3449  CASE DEFAULT
3450  local_error="The distributed matrix library type of "// &
3451  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
3452  CALL flagerror(local_error,err,error,*999)
3453  END SELECT
3454  ELSE
3455  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
3456  ENDIF
3457  ELSE
3458  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
3459  ENDIF
3460 
3461  exits("DISTRIBUTED_MATRIX_VALUES_ADD_INTG")
3462  RETURN
3463 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_ADD_INTG",err,error)
3464  RETURN 1
3465  END SUBROUTINE distributed_matrix_values_add_intg
3466 
3467  !
3468  !================================================================================================================================
3469  !
3470 
3472  SUBROUTINE distributed_matrix_values_add_intg1(DISTRIBUTED_MATRIX,ROW_INDEX,COLUMN_INDEX,VALUE,ERR,ERROR,*)
3474  !Argument variables
3475  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
3476  INTEGER(INTG), INTENT(IN) :: ROW_INDEX
3477  INTEGER(INTG), INTENT(IN) :: COLUMN_INDEX
3478  INTEGER(INTG), INTENT(IN) :: VALUE
3479  INTEGER(INTG), INTENT(OUT) :: ERR
3480  TYPE(varying_string), INTENT(OUT) :: ERROR
3481  !Local Variables
3482  TYPE(varying_string) :: LOCAL_ERROR
3483 
3484  enters("DISTRIBUTED_MATRIX_VALUES_ADD_INTG1",err,error,*999)
3485 
3486  IF(ASSOCIATED(distributed_matrix)) THEN
3487  IF(distributed_matrix%MATRIX_FINISHED) THEN
3488  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3490  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
3491  CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_index,column_index,VALUE,err,error,*999)
3492  ELSE
3493  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
3494  ENDIF
3496  CALL flagerror("Adding values to an integer PETSc distributed matrix is not implemented.",err,error,*999)
3497  CASE DEFAULT
3498  local_error="The distributed matrix library type of "// &
3499  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
3500  CALL flagerror(local_error,err,error,*999)
3501  END SELECT
3502  ELSE
3503  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
3504  ENDIF
3505  ELSE
3506  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
3507  ENDIF
3508 
3509  exits("DISTRIBUTED_MATRIX_VALUES_ADD_INTG1")
3510  RETURN
3511 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_ADD_INTG1",err,error)
3512  RETURN 1
3514 
3515  !
3516  !================================================================================================================================
3517  !
3518 
3520  SUBROUTINE distributed_matrix_values_add_intg2(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
3522  !Argument variables
3523  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
3524  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
3525  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
3526  INTEGER(INTG), INTENT(IN) :: VALUES(:,:)
3527  INTEGER(INTG), INTENT(OUT) :: ERR
3528  TYPE(varying_string), INTENT(OUT) :: ERROR
3529  !Local Variables
3530  TYPE(varying_string) :: LOCAL_ERROR
3531 
3532  enters("DISTRIBUTED_MATRIX_VALUES_ADD_INTG2",err,error,*999)
3533 
3534  IF(ASSOCIATED(distributed_matrix)) THEN
3535  IF(distributed_matrix%MATRIX_FINISHED) THEN
3536  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3538  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
3539  CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
3540  ELSE
3541  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
3542  ENDIF
3544  CALL flagerror("Adding values to an integer PETSc distributed matrix is not implemented.",err,error,*999)
3545  CASE DEFAULT
3546  local_error="The distributed matrix library type of "// &
3547  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
3548  CALL flagerror(local_error,err,error,*999)
3549  END SELECT
3550  ELSE
3551  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
3552  ENDIF
3553  ELSE
3554  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
3555  ENDIF
3556 
3557  exits("DISTRIBUTED_MATRIX_VALUES_ADD_INTG2")
3558  RETURN
3559 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_ADD_INTG2",err,error)
3560  RETURN 1
3562 
3563  !
3564  !================================================================================================================================
3565  !
3566 
3568  SUBROUTINE distributed_matrix_values_add_sp(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
3570  !Argument variables
3571  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
3572  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
3573  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
3574  REAL(SP), INTENT(IN) :: VALUES(:)
3575  INTEGER(INTG), INTENT(OUT) :: ERR
3576  TYPE(varying_string), INTENT(OUT) :: ERROR
3577  !Local Variables
3578  TYPE(varying_string) :: LOCAL_ERROR
3579 
3580  enters("DISTRIBUTED_MATRIX_VALUES_ADD_SP",err,error,*999)
3581 
3582  IF(ASSOCIATED(distributed_matrix)) THEN
3583  IF(distributed_matrix%MATRIX_FINISHED) THEN
3584  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3586  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
3587  CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
3588  ELSE
3589  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
3590  ENDIF
3592  CALL flagerror("Adding values to a single precision PETSc distributed matrix is not implemented.",err,error,*999)
3593  CASE DEFAULT
3594  local_error="The distributed matrix library type of "// &
3595  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
3596  CALL flagerror(local_error,err,error,*999)
3597  END SELECT
3598  ELSE
3599  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
3600  ENDIF
3601  ELSE
3602  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
3603  ENDIF
3604 
3605  exits("DISTRIBUTED_MATRIX_VALUES_ADD_SP")
3606  RETURN
3607 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_ADD_SP",err,error)
3608  RETURN 1
3609  END SUBROUTINE distributed_matrix_values_add_sp
3610 
3611  !
3612  !================================================================================================================================
3613  !
3614 
3616  SUBROUTINE distributed_matrix_values_add_sp1(DISTRIBUTED_MATRIX,ROW_INDEX,COLUMN_INDEX,VALUE,ERR,ERROR,*)
3618  !Argument variables
3619  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
3620  INTEGER(INTG), INTENT(IN) :: ROW_INDEX
3621  INTEGER(INTG), INTENT(IN) :: COLUMN_INDEX
3622  REAL(SP), INTENT(IN) :: VALUE
3623  INTEGER(INTG), INTENT(OUT) :: ERR
3624  TYPE(varying_string), INTENT(OUT) :: ERROR
3625  !Local Variables
3626  TYPE(varying_string) :: LOCAL_ERROR
3627 
3628  enters("DISTRIBUTED_MATRIX_VALUES_ADD_SP1",err,error,*999)
3629 
3630  IF(ASSOCIATED(distributed_matrix)) THEN
3631  IF(distributed_matrix%MATRIX_FINISHED) THEN
3632  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3634  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
3635  CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_index,column_index,VALUE,err,error,*999)
3636  ELSE
3637  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
3638  ENDIF
3640  CALL flagerror("Adding values to a single precision PETSc distributed matrix is not implemented.",err,error,*999)
3641  CASE DEFAULT
3642  local_error="The distributed matrix library type of "// &
3643  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
3644  CALL flagerror(local_error,err,error,*999)
3645  END SELECT
3646  ELSE
3647  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
3648  ENDIF
3649  ELSE
3650  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
3651  ENDIF
3652 
3653  exits("DISTRIBUTED_MATRIX_VALUES_ADD_SP1")
3654  RETURN
3655 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_ADD_SP1",err,error)
3656  RETURN 1
3657  END SUBROUTINE distributed_matrix_values_add_sp1
3658 
3659  !
3660  !================================================================================================================================
3661  !
3662 
3664  SUBROUTINE distributed_matrix_values_add_sp2(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
3666  !Argument variables
3667  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
3668  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
3669  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
3670  REAL(SP), INTENT(IN) :: VALUES(:,:)
3671  INTEGER(INTG), INTENT(OUT) :: ERR
3672  TYPE(varying_string), INTENT(OUT) :: ERROR
3673  !Local Variables
3674  TYPE(varying_string) :: LOCAL_ERROR
3675 
3676  enters("DISTRIBUTED_MATRIX_VALUES_ADD_SP2",err,error,*999)
3677 
3678  IF(ASSOCIATED(distributed_matrix)) THEN
3679  IF(distributed_matrix%MATRIX_FINISHED) THEN
3680  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3682  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
3683  CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
3684  ELSE
3685  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
3686  ENDIF
3688  CALL flagerror("Adding values to a single precision PETSc distributed matrix is not implemented.",err,error,*999)
3689  CASE DEFAULT
3690  local_error="The distributed matrix library type of "// &
3691  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
3692  CALL flagerror(local_error,err,error,*999)
3693  END SELECT
3694  ELSE
3695  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
3696  ENDIF
3697  ELSE
3698  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
3699  ENDIF
3700 
3701  exits("DISTRIBUTED_MATRIX_VALUES_ADD_SP2")
3702  RETURN
3703 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_ADD_SP2",err,error)
3704  RETURN 1
3705  END SUBROUTINE distributed_matrix_values_add_sp2
3706 
3707  !
3708  !================================================================================================================================
3709  !
3710 
3712  SUBROUTINE distributed_matrix_values_add_dp(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
3714  !Argument variables
3715  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
3716  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
3717  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
3718  REAL(DP), INTENT(IN) :: VALUES(:)
3719  INTEGER(INTG), INTENT(OUT) :: ERR
3720  TYPE(varying_string), INTENT(OUT) :: ERROR
3721  !Local Variables
3722  INTEGER(INTG) :: i
3723  TYPE(varying_string) :: LOCAL_ERROR
3724 
3725  enters("DISTRIBUTED_MATRIX_VALUES_ADD_DP",err,error,*999)
3726 
3727  IF(ASSOCIATED(distributed_matrix)) THEN
3728  IF(distributed_matrix%MATRIX_FINISHED) THEN
3729  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3731  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
3732  CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
3733  ELSE
3734  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
3735  ENDIF
3737  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
3738  IF(SIZE(row_indices,1)==SIZE(values,1)) THEN
3739  IF(SIZE(column_indices,1)==SIZE(values,1)) THEN
3740  IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX) THEN
3741  DO i=1,SIZE(row_indices,1)
3742  !Use global matrix row and column numbers
3743  CALL petsc_matsetvalues(distributed_matrix%PETSC%OVERRIDE_MATRIX,1,distributed_matrix%PETSC% &
3744  & global_row_numbers(row_indices(i:i)),1,column_indices(i:i)-1,values(i:i),petsc_add_values, &
3745  & err,error,*999) !PETSc uses 0 indicies
3746  ENDDO !i
3747  ELSE
3748  DO i=1,SIZE(row_indices,1)
3749  !Use global matrix row and column numbers
3750  CALL petsc_matsetvalues(distributed_matrix%PETSC%MATRIX,1,distributed_matrix%PETSC% &
3751  & global_row_numbers(row_indices(i:i)),1,column_indices(i:i)-1,values(i:i),petsc_add_values, &
3752  & err,error,*999) !PETSc uses 0 indicies
3753  ENDDO !i
3754  ENDIF
3755  ELSE
3756  local_error="The size of the column indices array ("// &
3757  & trim(numbertovstring(SIZE(column_indices,1),"*",err,error))// &
3758  & ") does not conform to the size of the values array ("// &
3759  & trim(numbertovstring(SIZE(values,1),"*",err,error))//")."
3760  CALL flagerror(local_error,err,error,*999)
3761  ENDIF
3762  ELSE
3763  local_error="The size of the row indices array ("// &
3764  & trim(numbertovstring(SIZE(row_indices,1),"*",err,error))// &
3765  & ") does not conform to the size of the values array ("// &
3766  & trim(numbertovstring(SIZE(values,1),"*",err,error))//")."
3767  CALL flagerror(local_error,err,error,*999)
3768  ENDIF
3769  ELSE
3770  CALL flagerror("The distributed matrix PETSc is not associated.",err,error,*999)
3771  ENDIF
3772  CASE DEFAULT
3773  local_error="The distributed matrix library type of "// &
3774  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
3775  CALL flagerror(local_error,err,error,*999)
3776  END SELECT
3777  ELSE
3778  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
3779  ENDIF
3780  ELSE
3781  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
3782  ENDIF
3783 
3784  exits("DISTRIBUTED_MATRIX_VALUES_ADD_DP")
3785  RETURN
3786 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_ADD_DP",err,error)
3787  RETURN 1
3788  END SUBROUTINE distributed_matrix_values_add_dp
3789 
3790  !
3791  !================================================================================================================================
3792  !
3793 
3795  SUBROUTINE distributed_matrix_values_add_dp1(DISTRIBUTED_MATRIX,ROW_INDEX,COLUMN_INDEX,VALUE,ERR,ERROR,*)
3797  !Argument variables
3798  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
3799  INTEGER(INTG), INTENT(IN) :: ROW_INDEX
3800  INTEGER(INTG), INTENT(IN) :: COLUMN_INDEX
3801  REAL(DP), INTENT(IN) :: VALUE
3802  INTEGER(INTG), INTENT(OUT) :: ERR
3803  TYPE(varying_string), INTENT(OUT) :: ERROR
3804  !Local Variables
3805  INTEGER(INTG) :: PETSC_COL_INDEX(1)
3806  REAL(DP) :: PETSC_VALUE(1)
3807  TYPE(varying_string) :: LOCAL_ERROR
3808 
3809  enters("DISTRIBUTED_MATRIX_VALUES_ADD_DP1",err,error,*999)
3810 
3811  IF(ASSOCIATED(distributed_matrix)) THEN
3812  IF(distributed_matrix%MATRIX_FINISHED) THEN
3813  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3815  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
3816  CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_index,column_index,VALUE,err,error,*999)
3817  ELSE
3818  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
3819  ENDIF
3821  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
3822  !Use global matrix row and column numbers
3823  petsc_col_index(1)=column_index-1
3824  petsc_value(1)=VALUE
3825  IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX) THEN
3826  CALL petsc_matsetvalue(distributed_matrix%PETSC%OVERRIDE_MATRIX,distributed_matrix%PETSC%GLOBAL_ROW_NUMBERS( &
3827  & row_index),column_index-1,VALUE,petsc_add_values,err,error,*999) !PETSc uses 0 based indices
3828  ELSE
3829  CALL petsc_matsetvalue(distributed_matrix%PETSC%MATRIX,distributed_matrix%PETSC%GLOBAL_ROW_NUMBERS( &
3830  & row_index),column_index-1,VALUE,petsc_add_values,err,error,*999) !PETSc uses 0 based indices
3831  ENDIF
3832  ELSE
3833  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
3834  ENDIF
3835  CASE DEFAULT
3836  local_error="The distributed matrix library type of "// &
3837  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
3838  CALL flagerror(local_error,err,error,*999)
3839  END SELECT
3840  ELSE
3841  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
3842  ENDIF
3843  ELSE
3844  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
3845  ENDIF
3846 
3847  exits("DISTRIBUTED_MATRIX_VALUES_ADD_DP1")
3848  RETURN
3849 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_ADD_DP1",err,error)
3850  RETURN 1
3851  END SUBROUTINE distributed_matrix_values_add_dp1
3852 
3853  !
3854  !================================================================================================================================
3855  !
3856 
3858  SUBROUTINE distributed_matrix_values_add_dp2(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
3860  !Argument variables
3861  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
3862  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
3863  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
3864  REAL(DP), INTENT(IN) :: VALUES(:,:)
3865  INTEGER(INTG), INTENT(OUT) :: ERR
3866  TYPE(varying_string), INTENT(OUT) :: ERROR
3867  !Local Variables
3868  INTEGER(INTG) :: GLOBAL_ROW_INDICES(size(row_indices)),i
3869  TYPE(varying_string) :: LOCAL_ERROR
3870 
3871  enters("DISTRIBUTED_MATRIX_VALUES_ADD_DP2",err,error,*999)
3872 
3873  IF(ASSOCIATED(distributed_matrix)) THEN
3874  IF(distributed_matrix%MATRIX_FINISHED) THEN
3875  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3877  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
3878  CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
3879  ELSE
3880  CALL flagerror("Distributed matrix CMISS is not associated",err,error,*999)
3881  ENDIF
3883  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
3884  IF(SIZE(row_indices,1)==SIZE(values,1)) THEN
3885  IF(SIZE(column_indices,1)==SIZE(values,2)) THEN
3886  DO i=1,SIZE(row_indices,1)
3887  global_row_indices(i)=distributed_matrix%PETSC%GLOBAL_ROW_NUMBERS(row_indices(i))
3888  ENDDO !i
3889  IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX) THEN
3890  CALL petsc_matsetvalues(distributed_matrix%PETSC%OVERRIDE_MATRIX,SIZE(row_indices,1),global_row_indices, &
3891  & SIZE(column_indices,1),column_indices-1,values,petsc_add_values,err,error,*999) !PETSc uses 0 based indices
3892  ELSE
3893  CALL petsc_matsetvalues(distributed_matrix%PETSC%MATRIX,SIZE(row_indices,1),global_row_indices, &
3894  & SIZE(column_indices,1),column_indices-1,values,petsc_add_values,err,error,*999) !PETSc uses 0 based indices
3895  ENDIF
3896  ELSE
3897  local_error="The size of the column indices array ("// &
3898  & trim(numbertovstring(SIZE(column_indices,1),"*",err,error))// &
3899  & ") does not conform to the number of columns in the values array ("// &
3900  & trim(numbertovstring(SIZE(values,2),"*",err,error))//")."
3901  CALL flagerror(local_error,err,error,*999)
3902  ENDIF
3903  ELSE
3904  local_error="The size of the row indices array ("// &
3905  & trim(numbertovstring(SIZE(row_indices,1),"*",err,error))// &
3906  & ") does not conform to the number of rows in the values array ("// &
3907  & trim(numbertovstring(SIZE(values,1),"*",err,error))//")."
3908  CALL flagerror(local_error,err,error,*999)
3909  ENDIF
3910  ELSE
3911  CALL flagerror("The distributed matrix PETSc is not associated.",err,error,*999)
3912  ENDIF
3913  CASE DEFAULT
3914  local_error="The distributed matrix library type of "// &
3915  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
3916  CALL flagerror(local_error,err,error,*999)
3917  END SELECT
3918  ELSE
3919  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
3920  ENDIF
3921  ELSE
3922  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
3923  ENDIF
3924 
3925  exits("DISTRIBUTED_MATRIX_VALUES_ADD_DP2")
3926  RETURN
3927 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_ADD_DP2",err,error)
3928  RETURN 1
3929  END SUBROUTINE distributed_matrix_values_add_dp2
3930 
3931  !
3932  !================================================================================================================================
3933  !
3934 
3936  SUBROUTINE distributed_matrix_values_add_l(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
3938  !Argument variables
3939  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
3940  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
3941  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
3942  LOGICAL, INTENT(IN) :: VALUES(:)
3943  INTEGER(INTG), INTENT(OUT) :: ERR
3944  TYPE(varying_string), INTENT(OUT) :: ERROR
3945  !Local Variables
3946  TYPE(varying_string) :: LOCAL_ERROR
3947 
3948  enters("DISTRIBUTED_MATRIX_VALUES_ADD_L",err,error,*999)
3949 
3950  IF(ASSOCIATED(distributed_matrix)) THEN
3951  IF(distributed_matrix%MATRIX_FINISHED) THEN
3952  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3954  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
3955  CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
3956  ELSE
3957  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
3958  ENDIF
3960  CALL flagerror("Adding values to a logical PETSc distributed matrix is not implemented.",err,error,*999)
3961  CASE DEFAULT
3962  local_error="The distributed matrix library type of "// &
3963  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
3964  CALL flagerror(local_error,err,error,*999)
3965  END SELECT
3966  ELSE
3967  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
3968  ENDIF
3969  ELSE
3970  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
3971  ENDIF
3972 
3973  exits("DISTRIBUTED_MATRIX_VALUES_ADD_L")
3974  RETURN
3975 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_ADD_L",err,error)
3976  RETURN 1
3977  END SUBROUTINE distributed_matrix_values_add_l
3978 
3979  !
3980  !================================================================================================================================
3981  !
3982 
3984  SUBROUTINE distributed_matrix_values_add_l1(DISTRIBUTED_MATRIX,ROW_INDEX,COLUMN_INDEX,VALUE,ERR,ERROR,*)
3986  !Argument variables
3987  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
3988  INTEGER(INTG), INTENT(IN) :: ROW_INDEX
3989  INTEGER(INTG), INTENT(IN) :: COLUMN_INDEX
3990  LOGICAL, INTENT(IN) :: VALUE
3991  INTEGER(INTG), INTENT(OUT) :: ERR
3992  TYPE(varying_string), INTENT(OUT) :: ERROR
3993  !Local Variables
3994  TYPE(varying_string) :: LOCAL_ERROR
3995 
3996  enters("DISTRIBUTED_MATRIX_VALUES_ADD_L1",err,error,*999)
3997 
3998  IF(ASSOCIATED(distributed_matrix)) THEN
3999  IF(distributed_matrix%MATRIX_FINISHED) THEN
4000  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4002  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4003  CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_index,column_index,VALUE,err,error,*999)
4004  ELSE
4005  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4006  ENDIF
4008  CALL flagerror("Adding values to a logical PETSc distributed matrix is not implemented.",err,error,*999)
4009  CASE DEFAULT
4010  local_error="The distributed matrix library type of "// &
4011  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4012  CALL flagerror(local_error,err,error,*999)
4013  END SELECT
4014  ELSE
4015  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4016  ENDIF
4017  ELSE
4018  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4019  ENDIF
4020 
4021  exits("DISTRIBUTED_MATRIX_VALUES_ADD_L1")
4022  RETURN
4023 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_ADD_L1",err,error)
4024  RETURN 1
4025  END SUBROUTINE distributed_matrix_values_add_l1
4026 
4027  !
4028  !================================================================================================================================
4029  !
4030 
4032  SUBROUTINE distributed_matrix_values_add_l2(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
4034  !Argument variables
4035  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4036  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
4037  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
4038  LOGICAL, INTENT(IN) :: VALUES(:,:)
4039  INTEGER(INTG), INTENT(OUT) :: ERR
4040  TYPE(varying_string), INTENT(OUT) :: ERROR
4041  !Local Variables
4042  TYPE(varying_string) :: LOCAL_ERROR
4043 
4044  enters("DISTRIBUTED_MATRIX_VALUES_ADD_L2",err,error,*999)
4045 
4046  IF(ASSOCIATED(distributed_matrix)) THEN
4047  IF(distributed_matrix%MATRIX_FINISHED) THEN
4048  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4050  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4051  CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4052  ELSE
4053  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4054  ENDIF
4056  CALL flagerror("Adding values to a logical PETSc distributed matrix is not implemented.",err,error,*999)
4057  CASE DEFAULT
4058  local_error="The distributed matrix library type of "// &
4059  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4060  CALL flagerror(local_error,err,error,*999)
4061  END SELECT
4062  ELSE
4063  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4064  ENDIF
4065  ELSE
4066  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4067  ENDIF
4068 
4069  exits("DISTRIBUTED_MATRIX_VALUES_ADD_L2")
4070  RETURN
4071 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_ADD_L2",err,error)
4072  RETURN 1
4073  END SUBROUTINE distributed_matrix_values_add_l2
4074 
4075  !
4076  !================================================================================================================================
4077  !
4078 
4080  SUBROUTINE distributed_matrix_values_get_intg(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
4082  !Argument variables
4083  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4084  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
4085  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
4086  INTEGER(INTG), INTENT(OUT) :: VALUES(:)
4087  INTEGER(INTG), INTENT(OUT) :: ERR
4088  TYPE(varying_string), INTENT(OUT) :: ERROR
4089  !Local Variables
4090  TYPE(varying_string) :: LOCAL_ERROR
4091 
4092  enters("DISTRIBUTED_MATRIX_VALUES_GET_INTG",err,error,*999)
4093 
4094  IF(ASSOCIATED(distributed_matrix)) THEN
4095  IF(distributed_matrix%MATRIX_FINISHED) THEN
4096  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4098  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4099  CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4100  ELSE
4101  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4102  ENDIF
4104  CALL flagerror("Cannot get values for an integer PETSc distributed matrix.",err,error,*999)
4105  CASE DEFAULT
4106  local_error="The distributed matrix library type of "// &
4107  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4108  CALL flagerror(local_error,err,error,*999)
4109  END SELECT
4110  ELSE
4111  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4112  ENDIF
4113  ELSE
4114  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4115  ENDIF
4116 
4117  exits("DISTRIBUTED_MATRIX_VALUES_GET_INTG")
4118  RETURN
4119 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_GET_INTG",err,error)
4120  RETURN 1
4121  END SUBROUTINE distributed_matrix_values_get_intg
4122 
4123  !
4124  !================================================================================================================================
4125  !
4126 
4128  SUBROUTINE distributed_matrix_values_get_intg1(DISTRIBUTED_MATRIX,ROW_INDEX,COLUMN_INDEX,VALUE,ERR,ERROR,*)
4130  !Argument variables
4131  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4132  INTEGER(INTG), INTENT(IN) :: ROW_INDEX
4133  INTEGER(INTG), INTENT(IN) :: COLUMN_INDEX
4134  INTEGER(INTG), INTENT(OUT) :: VALUE
4135  INTEGER(INTG), INTENT(OUT) :: ERR
4136  TYPE(varying_string), INTENT(OUT) :: ERROR
4137  !Local Variables
4138  TYPE(varying_string) :: LOCAL_ERROR
4139 
4140  enters("DISTRIBUTED_MATRIX_VALUES_GET_INTG1",err,error,*999)
4141 
4142  IF(ASSOCIATED(distributed_matrix)) THEN
4143  IF(distributed_matrix%MATRIX_FINISHED) THEN
4144  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4146  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4147  CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_index,column_index,VALUE,err,error,*999)
4148  ELSE
4149  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4150  ENDIF
4152  CALL flagerror("Cannot get values for an integer PETSc distributed matrix.",err,error,*999)
4153  CASE DEFAULT
4154  local_error="The distributed matrix library type of "// &
4155  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4156  CALL flagerror(local_error,err,error,*999)
4157  END SELECT
4158  ELSE
4159  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4160  ENDIF
4161  ELSE
4162  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4163  ENDIF
4164 
4165  exits("DISTRIBUTED_MATRIX_VALUES_GET_INTG1")
4166  RETURN
4167 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_GET_INTG1",err,error)
4168  RETURN 1
4170 
4171  !
4172  !================================================================================================================================
4173  !
4174 
4176  SUBROUTINE distributed_matrix_values_get_intg2(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
4178  !Argument variables
4179  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4180  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
4181  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
4182  INTEGER(INTG), INTENT(OUT) :: VALUES(:,:)
4183  INTEGER(INTG), INTENT(OUT) :: ERR
4184  TYPE(varying_string), INTENT(OUT) :: ERROR
4185  !Local Variables
4186  TYPE(varying_string) :: LOCAL_ERROR
4187 
4188  enters("DISTRIBUTED_MATRIX_VALUES_GET_INTG2",err,error,*999)
4189 
4190  IF(ASSOCIATED(distributed_matrix)) THEN
4191  IF(distributed_matrix%MATRIX_FINISHED) THEN
4192  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4194  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4195  CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4196  ELSE
4197  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4198  ENDIF
4200  CALL flagerror("Cannot get values for an integer PETSc distributed matrix.",err,error,*999)
4201  CASE DEFAULT
4202  local_error="The distributed matrix library type of "// &
4203  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4204  CALL flagerror(local_error,err,error,*999)
4205  END SELECT
4206  ELSE
4207  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4208  ENDIF
4209  ELSE
4210  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4211  ENDIF
4212 
4213  exits("DISTRIBUTED_MATRIX_VALUES_GET_INTG2")
4214  RETURN
4215 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_GET_INTG2",err,error)
4216  RETURN 1
4218 
4219  !
4220  !================================================================================================================================
4221  !
4222 
4224  SUBROUTINE distributed_matrix_values_get_sp(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
4226  !Argument variables
4227  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4228  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
4229  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
4230  REAL(SP), INTENT(OUT) :: VALUES(:)
4231  INTEGER(INTG), INTENT(OUT) :: ERR
4232  TYPE(varying_string), INTENT(OUT) :: ERROR
4233  !Local Variables
4234  TYPE(varying_string) :: LOCAL_ERROR
4235 
4236  enters("DISTRIBUTED_MATRIX_VALUES_GET_SP",err,error,*999)
4237 
4238  IF(ASSOCIATED(distributed_matrix)) THEN
4239  IF(distributed_matrix%MATRIX_FINISHED) THEN
4240  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4242  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4243  CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4244  ELSE
4245  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4246  ENDIF
4248  CALL flagerror("Cannot get values for a single precision PETSc distributed matrix.",err,error,*999)
4249  CASE DEFAULT
4250  local_error="The distributed matrix library type of "// &
4251  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4252  CALL flagerror(local_error,err,error,*999)
4253  END SELECT
4254  ELSE
4255  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4256  ENDIF
4257  ELSE
4258  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4259  ENDIF
4260 
4261  exits("DISTRIBUTED_MATRIX_VALUES_GET_SP")
4262  RETURN
4263 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_GET_SP",err,error)
4264  RETURN 1
4265  END SUBROUTINE distributed_matrix_values_get_sp
4266 
4267  !
4268  !================================================================================================================================
4269  !
4270 
4272  SUBROUTINE distributed_matrix_values_get_sp1(DISTRIBUTED_MATRIX,ROW_INDEX,COLUMN_INDEX,VALUE,ERR,ERROR,*)
4274  !Argument variables
4275  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4276  INTEGER(INTG), INTENT(IN) :: ROW_INDEX
4277  INTEGER(INTG), INTENT(IN) :: COLUMN_INDEX
4278  REAL(SP), INTENT(OUT) :: VALUE
4279  INTEGER(INTG), INTENT(OUT) :: ERR
4280  TYPE(varying_string), INTENT(OUT) :: ERROR
4281  !Local Variables
4282  TYPE(varying_string) :: LOCAL_ERROR
4283 
4284  enters("DISTRIBUTED_MATRIX_VALUES_GET_SP1",err,error,*999)
4285 
4286  IF(ASSOCIATED(distributed_matrix)) THEN
4287  IF(distributed_matrix%MATRIX_FINISHED) THEN
4288  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4290  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4291  CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_index,column_index,VALUE,err,error,*999)
4292  ELSE
4293  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4294  ENDIF
4296  CALL flagerror("Cannot get values for a single precision PETSc distributed matrix.",err,error,*999)
4297  CASE DEFAULT
4298  local_error="The distributed matrix library type of "// &
4299  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4300  CALL flagerror(local_error,err,error,*999)
4301  END SELECT
4302  ELSE
4303  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4304  ENDIF
4305  ELSE
4306  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4307  ENDIF
4308 
4309  exits("DISTRIBUTED_MATRIX_VALUES_GET_SP1")
4310  RETURN
4311 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_GET_SP1",err,error)
4312  RETURN 1
4313  END SUBROUTINE distributed_matrix_values_get_sp1
4314 
4315  !
4316  !================================================================================================================================
4317  !
4318 
4320  SUBROUTINE distributed_matrix_values_get_sp2(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
4322  !Argument variables
4323  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4324  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
4325  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
4326  REAL(SP), INTENT(OUT) :: VALUES(:,:)
4327  INTEGER(INTG), INTENT(OUT) :: ERR
4328  TYPE(varying_string), INTENT(OUT) :: ERROR
4329  !Local Variables
4330  TYPE(varying_string) :: LOCAL_ERROR
4331 
4332  enters("DISTRIBUTED_MATRIX_VALUES_GET_SP2",err,error,*999)
4333 
4334  IF(ASSOCIATED(distributed_matrix)) THEN
4335  IF(distributed_matrix%MATRIX_FINISHED) THEN
4336  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4338  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4339  CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4340  ELSE
4341  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4342  ENDIF
4344  CALL flagerror("Cannot get values for a single precision PETSc distributed matrix.",err,error,*999)
4345  CASE DEFAULT
4346  local_error="The distributed matrix library type of "// &
4347  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4348  CALL flagerror(local_error,err,error,*999)
4349  END SELECT
4350  ELSE
4351  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4352  ENDIF
4353  ELSE
4354  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4355  ENDIF
4356 
4357  exits("DISTRIBUTED_MATRIX_VALUES_GET_SP2")
4358  RETURN
4359 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_GET_SP2",err,error)
4360  RETURN 1
4361  END SUBROUTINE distributed_matrix_values_get_sp2
4362 
4363  !
4364  !================================================================================================================================
4365  !
4366 
4368  SUBROUTINE distributed_matrix_values_get_dp(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
4370  !Argument variables
4371  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4372  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
4373  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
4374  REAL(DP), INTENT(OUT) :: VALUES(:)
4375  INTEGER(INTG), INTENT(OUT) :: ERR
4376  TYPE(varying_string), INTENT(OUT) :: ERROR
4377  !Local Variables
4378  INTEGER(INTG) :: i
4379  TYPE(varying_string) :: LOCAL_ERROR
4380 
4381  enters("DISTRIBUTED_MATRIX_VALUES_GET_DP",err,error,*999)
4382 
4383  IF(ASSOCIATED(distributed_matrix)) THEN
4384  IF(distributed_matrix%MATRIX_FINISHED) THEN
4385  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4387  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4388  CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4389  ELSE
4390  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4391  ENDIF
4393  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
4394  IF(SIZE(row_indices,1)==SIZE(values,1)) THEN
4395  IF(SIZE(column_indices,1)==SIZE(values,1)) THEN
4396  IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX) THEN
4397  DO i=1,SIZE(row_indices,1)
4398  CALL petsc_matgetvalues(distributed_matrix%PETSC%OVERRIDE_MATRIX,1,distributed_matrix%PETSC% &
4399  & global_row_numbers(row_indices(i:i)),1,column_indices(i:i)-1,values(i:i), &
4400  & err,error,*999) !PETSc uses 0 based indices
4401  ENDDO !i
4402  ELSE
4403  DO i=1,SIZE(row_indices,1)
4404  CALL petsc_matgetvalues(distributed_matrix%PETSC%MATRIX,1,distributed_matrix%PETSC%GLOBAL_ROW_NUMBERS( &
4405  & row_indices(i:i)),1,column_indices(i:i)-1,values(i:i),err,error,*999) !PETSc uses 0 based indices
4406  ENDDO !i
4407  ENDIF
4408  ELSE
4409  local_error="The size of the column indices array ("// &
4410  & trim(numbertovstring(SIZE(column_indices,1),"*",err,error))// &
4411  & ") does not conform to the size of the values array ("// &
4412  & trim(numbertovstring(SIZE(values,1),"*",err,error))//")."
4413  CALL flagerror(local_error,err,error,*999)
4414  ENDIF
4415  ELSE
4416  local_error="The size of the row indices array ("// &
4417  & trim(numbertovstring(SIZE(row_indices,1),"*",err,error))// &
4418  & ") does not conform to the size of the values array ("// &
4419  & trim(numbertovstring(SIZE(values,1),"*",err,error))//")."
4420  CALL flagerror(local_error,err,error,*999)
4421  ENDIF
4422  ELSE
4423  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
4424  ENDIF
4425  CASE DEFAULT
4426  local_error="The distributed matrix library type of "// &
4427  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4428  CALL flagerror(local_error,err,error,*999)
4429  END SELECT
4430  ELSE
4431  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4432  ENDIF
4433  ELSE
4434  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4435  ENDIF
4436 
4437  exits("DISTRIBUTED_MATRIX_VALUES_GET_DP")
4438  RETURN
4439 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_GET_DP",err,error)
4440  RETURN 1
4441  END SUBROUTINE distributed_matrix_values_get_dp
4442 
4443  !
4444  !================================================================================================================================
4445  !
4446 
4448  SUBROUTINE distributed_matrix_values_get_dp1(DISTRIBUTED_MATRIX,ROW_INDEX,COLUMN_INDEX,VALUE,ERR,ERROR,*)
4450  !Argument variables
4451  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4452  INTEGER(INTG), INTENT(IN) :: ROW_INDEX
4453  INTEGER(INTG), INTENT(IN) :: COLUMN_INDEX
4454  REAL(DP), INTENT(OUT) :: VALUE
4455  INTEGER(INTG), INTENT(OUT) :: ERR
4456  TYPE(varying_string), INTENT(OUT) :: ERROR
4457  !Local Variables
4458  INTEGER(INTG) :: COLUMN_INDICES(1)
4459  REAL(DP) :: VALUES(1)
4460  TYPE(varying_string) :: LOCAL_ERROR
4461 
4462  enters("DISTRIBUTED_MATRIX_VALUES_GET_DP1",err,error,*999)
4463 
4464  IF(ASSOCIATED(distributed_matrix)) THEN
4465  IF(distributed_matrix%MATRIX_FINISHED) THEN
4466  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4468  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4469  CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_index,column_index,VALUE,err,error,*999)
4470  ELSE
4471  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4472  ENDIF
4474  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
4475  column_indices(1)=column_index-1
4476  IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX) THEN
4477  CALL petsc_matgetvalues(distributed_matrix%PETSC%OVERRIDE_MATRIX,1,distributed_matrix%PETSC% &
4478  & global_row_numbers(row_index),1,column_indices,values,err,error,*999) !PETSc uses 0 based indices
4479  ELSE
4480  CALL petsc_matgetvalues(distributed_matrix%PETSC%MATRIX,1,distributed_matrix%PETSC%GLOBAL_ROW_NUMBERS(row_index), &
4481  & 1,column_indices,values,err,error,*999) !PETSc uses 0 based indices
4482  ENDIF
4483  VALUE=values(1)
4484  ELSE
4485  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
4486  ENDIF
4487  CASE DEFAULT
4488  local_error="The distributed matrix library type of "// &
4489  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4490  CALL flagerror(local_error,err,error,*999)
4491  END SELECT
4492  ELSE
4493  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4494  ENDIF
4495  ELSE
4496  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4497  ENDIF
4498 
4499  exits("DISTRIBUTED_MATRIX_VALUES_GET_DP1")
4500  RETURN
4501 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_GET_DP1",err,error)
4502  RETURN 1
4503  END SUBROUTINE distributed_matrix_values_get_dp1
4504 
4505  !
4506  !
4507  !================================================================================================================================
4508  !
4509 
4511  SUBROUTINE distributed_matrix_values_get_dp2(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
4513  !Argument variables
4514  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4515  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
4516  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
4517  REAL(DP), INTENT(OUT) :: VALUES(:,:)
4518  INTEGER(INTG), INTENT(OUT) :: ERR
4519  TYPE(varying_string), INTENT(OUT) :: ERROR
4520  !Local Variables
4521  INTEGER(INTG) :: GLOBAL_ROW_INDICES(size(row_indices,1)),i
4522  TYPE(varying_string) :: LOCAL_ERROR
4523 
4524  enters("DISTRIBUTED_MATRIX_VALUES_GET_DP2",err,error,*999)
4525 
4526  IF(ASSOCIATED(distributed_matrix)) THEN
4527  IF(distributed_matrix%MATRIX_FINISHED) THEN
4528  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4530  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4531  CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4532  ELSE
4533  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4534  ENDIF
4536  CALL flagerror("Cannot get values for an integer precision PETSc distributed matrix.",err,error,*999)
4537  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
4538  IF(SIZE(row_indices,1)==SIZE(values,1)) THEN
4539  IF(SIZE(column_indices,1)==SIZE(values,2)) THEN
4540  DO i=1,SIZE(row_indices,1)
4541  global_row_indices(i)=distributed_matrix%PETSC%GLOBAL_ROW_NUMBERS(row_indices(i))
4542  ENDDO !i
4543  IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX) THEN
4544  CALL petsc_matgetvalues(distributed_matrix%PETSC%OVERRIDE_MATRIX,SIZE(row_indices,1),global_row_indices, &
4545  & SIZE(column_indices,1),column_indices-1,values,err,error,*999) !PETSc uses 0 based row indices
4546  ELSE
4547  CALL petsc_matgetvalues(distributed_matrix%PETSC%MATRIX,SIZE(row_indices,1),global_row_indices, &
4548  & SIZE(column_indices,1),column_indices-1,values,err,error,*999) !PETSc uses 0 based row indices
4549  ENDIF
4550  ELSE
4551  local_error="The size of the column indices array ("// &
4552  & trim(numbertovstring(SIZE(column_indices,1),"*",err,error))// &
4553  & ") does not conform to the number of columns in the values array ("// &
4554  & trim(numbertovstring(SIZE(values,2),"*",err,error))//")."
4555  CALL flagerror(local_error,err,error,*999)
4556  ENDIF
4557  ELSE
4558  local_error="The size of the row indices array ("// &
4559  & trim(numbertovstring(SIZE(row_indices,1),"*",err,error))// &
4560  & ") does not conform to the number of rows in the values array ("// &
4561  & trim(numbertovstring(SIZE(values,1),"*",err,error))//")."
4562  CALL flagerror(local_error,err,error,*999)
4563  ENDIF
4564  ELSE
4565  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
4566  ENDIF
4567  CASE DEFAULT
4568  local_error="The distributed matrix library type of "// &
4569  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4570  CALL flagerror(local_error,err,error,*999)
4571  END SELECT
4572  ELSE
4573  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4574  ENDIF
4575  ELSE
4576  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4577  ENDIF
4578 
4579  exits("DISTRIBUTED_MATRIX_VALUES_GET_DP2")
4580  RETURN
4581 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_GET_DP2",err,error)
4582  RETURN 1
4583  END SUBROUTINE distributed_matrix_values_get_dp2
4584 
4585  !
4586  !================================================================================================================================
4587  !
4588 
4590  SUBROUTINE distributed_matrix_values_get_l(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
4592  !Argument variables
4593  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4594  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
4595  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
4596  LOGICAL, INTENT(OUT) :: VALUES(:)
4597  INTEGER(INTG), INTENT(OUT) :: ERR
4598  TYPE(varying_string), INTENT(OUT) :: ERROR
4599  !Local Variables
4600  TYPE(varying_string) :: LOCAL_ERROR
4601 
4602  enters("DISTRIBUTED_MATRIX_VALUES_GET_L",err,error,*999)
4603 
4604  IF(ASSOCIATED(distributed_matrix)) THEN
4605  IF(distributed_matrix%MATRIX_FINISHED) THEN
4606  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4608  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4609  CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4610  ELSE
4611  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4612  ENDIF
4614  CALL flagerror("Cannot get values for a logical PETSc distributed matrix.",err,error,*999)
4615  CASE DEFAULT
4616  local_error="The distributed matrix library type of "// &
4617  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4618  CALL flagerror(local_error,err,error,*999)
4619  END SELECT
4620  ELSE
4621  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4622  ENDIF
4623  ELSE
4624  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4625  ENDIF
4626 
4627  exits("DISTRIBUTED_MATRIX_VALUES_GET_L")
4628  RETURN
4629 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_GET_L",err,error)
4630  RETURN 1
4631  END SUBROUTINE distributed_matrix_values_get_l
4632 
4633  !
4634  !================================================================================================================================
4635  !
4636 
4638  SUBROUTINE distributed_matrix_values_get_l1(DISTRIBUTED_MATRIX,ROW_INDEX,COLUMN_INDEX,VALUE,ERR,ERROR,*)
4640  !Argument variables
4641  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4642  INTEGER(INTG), INTENT(IN) :: ROW_INDEX
4643  INTEGER(INTG), INTENT(IN) :: COLUMN_INDEX
4644  LOGICAL, INTENT(OUT) :: VALUE
4645  INTEGER(INTG), INTENT(OUT) :: ERR
4646  TYPE(varying_string), INTENT(OUT) :: ERROR
4647  !Local Variables
4648  TYPE(varying_string) :: LOCAL_ERROR
4649 
4650  enters("DISTRIBUTED_MATRIX_VALUES_GET_L1",err,error,*999)
4651 
4652  IF(ASSOCIATED(distributed_matrix)) THEN
4653  IF(distributed_matrix%MATRIX_FINISHED) THEN
4654  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4656  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4657  CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_index,column_index,VALUE,err,error,*999)
4658  ELSE
4659  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4660  ENDIF
4662  CALL flagerror("Cannot get values for a logical PETSc distributed matrix.",err,error,*999)
4663  CASE DEFAULT
4664  local_error="The distributed matrix library type of "// &
4665  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4666  CALL flagerror(local_error,err,error,*999)
4667  END SELECT
4668  ELSE
4669  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4670  ENDIF
4671  ELSE
4672  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4673  ENDIF
4674 
4675  exits("DISTRIBUTED_MATRIX_VALUES_GET_L1")
4676  RETURN
4677 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_GET_L1",err,error)
4678  RETURN 1
4679  END SUBROUTINE distributed_matrix_values_get_l1
4680 
4681  !
4682  !================================================================================================================================
4683  !
4684 
4686  SUBROUTINE distributed_matrix_values_get_l2(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
4688  !Argument variables
4689  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4690  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
4691  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
4692  LOGICAL, INTENT(OUT) :: VALUES(:,:)
4693  INTEGER(INTG), INTENT(OUT) :: ERR
4694  TYPE(varying_string), INTENT(OUT) :: ERROR
4695  !Local Variables
4696  TYPE(varying_string) :: LOCAL_ERROR
4697 
4698  enters("DISTRIBUTED_MATRIX_VALUES_GET_L2",err,error,*999)
4699 
4700  IF(ASSOCIATED(distributed_matrix)) THEN
4701  IF(distributed_matrix%MATRIX_FINISHED) THEN
4702  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4704  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4705  CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4706  ELSE
4707  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4708  ENDIF
4710  CALL flagerror("Cannot get values for a logical PETSc distributed matrix.",err,error,*999)
4711  CASE DEFAULT
4712  local_error="The distributed matrix library type of "// &
4713  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4714  CALL flagerror(local_error,err,error,*999)
4715  END SELECT
4716  ELSE
4717  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4718  ENDIF
4719  ELSE
4720  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4721  ENDIF
4722 
4723  exits("DISTRIBUTED_MATRIX_VALUES_GET_L2")
4724  RETURN
4725 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_GET_L2",err,error)
4726  RETURN 1
4727  END SUBROUTINE distributed_matrix_values_get_l2
4728 
4729  !
4730  !================================================================================================================================
4731  !
4732 
4734  SUBROUTINE distributed_matrix_values_set_intg(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
4736  !Argument variables
4737  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4738  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
4739  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
4740  INTEGER(INTG), INTENT(IN) :: VALUES(:)
4741  INTEGER(INTG), INTENT(OUT) :: ERR
4742  TYPE(varying_string), INTENT(OUT) :: ERROR
4743  !Local Variables
4744  TYPE(varying_string) :: LOCAL_ERROR
4745 
4746  enters("DISTRIBUTED_MATRIX_VALUES_SET_INTG",err,error,*999)
4747 
4748  IF(ASSOCIATED(distributed_matrix)) THEN
4749  IF(distributed_matrix%MATRIX_FINISHED) THEN
4750  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4752  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4753  CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4754  ELSE
4755  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4756  ENDIF
4758  CALL flagerror("Cannot get values for an integer PETSc distributed matrix.",err,error,*999)
4759  CASE DEFAULT
4760  local_error="The distributed matrix library type of "// &
4761  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4762  CALL flagerror(local_error,err,error,*999)
4763  END SELECT
4764  ELSE
4765  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4766  ENDIF
4767  ELSE
4768  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4769  ENDIF
4770 
4771  exits("DISTRIBUTED_MATRIX_VALUES_SET_INTG")
4772  RETURN
4773 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_SET_INTG",err,error)
4774  RETURN 1
4775  END SUBROUTINE distributed_matrix_values_set_intg
4776 
4777  !
4778  !================================================================================================================================
4779  !
4780 
4782  SUBROUTINE distributed_matrix_values_set_intg1(DISTRIBUTED_MATRIX,ROW_INDEX,COLUMN_INDEX,VALUE,ERR,ERROR,*)
4784  !Argument variables
4785  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4786  INTEGER(INTG), INTENT(IN) :: ROW_INDEX
4787  INTEGER(INTG), INTENT(IN) :: COLUMN_INDEX
4788  INTEGER(INTG), INTENT(IN) :: VALUE
4789  INTEGER(INTG), INTENT(OUT) :: ERR
4790  TYPE(varying_string), INTENT(OUT) :: ERROR
4791  !Local Variables
4792  TYPE(varying_string) :: LOCAL_ERROR
4793 
4794  enters("DISTRIBUTED_MATRIX_VALUES_SET_INTG1",err,error,*999)
4795 
4796  IF(ASSOCIATED(distributed_matrix)) THEN
4797  IF(distributed_matrix%MATRIX_FINISHED) THEN
4798  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4800  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4801  CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_index,column_index,VALUE,err,error,*999)
4802  ELSE
4803  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4804  ENDIF
4806  CALL flagerror("Cannot get values for an integer PETSc distributed matrix.",err,error,*999)
4807  CASE DEFAULT
4808  local_error="The distributed matrix library type of "// &
4809  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4810  CALL flagerror(local_error,err,error,*999)
4811  END SELECT
4812  ELSE
4813  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4814  ENDIF
4815  ELSE
4816  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4817  ENDIF
4818 
4819  exits("DISTRIBUTED_MATRIX_VALUES_SET_INTG1")
4820  RETURN
4821 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_SET_INTG1",err,error)
4822  RETURN 1
4824 
4825  !
4826  !================================================================================================================================
4827  !
4828 
4830  SUBROUTINE distributed_matrix_values_set_intg2(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
4832  !Argument variables
4833  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4834  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
4835  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
4836  INTEGER(INTG), INTENT(IN) :: VALUES(:,:)
4837  INTEGER(INTG), INTENT(OUT) :: ERR
4838  TYPE(varying_string), INTENT(OUT) :: ERROR
4839  !Local Variables
4840  TYPE(varying_string) :: LOCAL_ERROR
4841 
4842  enters("DISTRIBUTED_MATRIX_VALUES_SET_INTG2",err,error,*999)
4843 
4844  IF(ASSOCIATED(distributed_matrix)) THEN
4845  IF(distributed_matrix%MATRIX_FINISHED) THEN
4846  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4848  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4849  CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4850  ELSE
4851  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4852  ENDIF
4854  CALL flagerror("Cannot get values for an integer PETSc distributed matrix.",err,error,*999)
4855  CASE DEFAULT
4856  local_error="The distributed matrix library type of "// &
4857  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4858  CALL flagerror(local_error,err,error,*999)
4859  END SELECT
4860  ELSE
4861  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4862  ENDIF
4863  ELSE
4864  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4865  ENDIF
4866 
4867  exits("DISTRIBUTED_MATRIX_VALUES_SET_INTG2")
4868  RETURN
4869 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_SET_INTG2",err,error)
4870  RETURN 1
4872 
4873  !
4874  !================================================================================================================================
4875  !
4876 
4878  SUBROUTINE distributed_matrix_values_set_sp(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
4880  !Argument variables
4881  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4882  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
4883  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
4884  REAL(SP), INTENT(IN) :: VALUES(:)
4885  INTEGER(INTG), INTENT(OUT) :: ERR
4886  TYPE(varying_string), INTENT(OUT) :: ERROR
4887  !Local Variables
4888  TYPE(varying_string) :: LOCAL_ERROR
4889 
4890  enters("DISTRIBUTED_MATRIX_VALUES_SET_SP",err,error,*999)
4891 
4892  IF(ASSOCIATED(distributed_matrix)) THEN
4893  IF(distributed_matrix%MATRIX_FINISHED) THEN
4894  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4896  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4897  CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4898  ELSE
4899  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4900  ENDIF
4902  CALL flagerror("Cannot get values for a single precision PETSc distributed matrix.",err,error,*999)
4903  CASE DEFAULT
4904  local_error="The distributed matrix library type of "// &
4905  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4906  CALL flagerror(local_error,err,error,*999)
4907  END SELECT
4908  ELSE
4909  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4910  ENDIF
4911  ELSE
4912  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4913  ENDIF
4914 
4915  exits("DISTRIBUTED_MATRIX_VALUES_SET_SP")
4916  RETURN
4917 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_SET_SP",err,error)
4918  RETURN 1
4919  END SUBROUTINE distributed_matrix_values_set_sp
4920 
4921  !
4922  !================================================================================================================================
4923  !
4924 
4926  SUBROUTINE distributed_matrix_values_set_sp1(DISTRIBUTED_MATRIX,ROW_INDEX,COLUMN_INDEX,VALUE,ERR,ERROR,*)
4928  !Argument variables
4929  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4930  INTEGER(INTG), INTENT(IN) :: ROW_INDEX
4931  INTEGER(INTG), INTENT(IN) :: COLUMN_INDEX
4932  REAL(SP), INTENT(IN) :: VALUE
4933  INTEGER(INTG), INTENT(OUT) :: ERR
4934  TYPE(varying_string), INTENT(OUT) :: ERROR
4935  !Local Variables
4936  TYPE(varying_string) :: LOCAL_ERROR
4937 
4938  enters("DISTRIBUTED_MATRIX_VALUES_SET_SP1",err,error,*999)
4939 
4940  IF(ASSOCIATED(distributed_matrix)) THEN
4941  IF(distributed_matrix%MATRIX_FINISHED) THEN
4942  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4944  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4945  CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_index,column_index,VALUE,err,error,*999)
4946  ELSE
4947  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4948  ENDIF
4950  CALL flagerror("Cannot get values for a single precision PETSc distributed matrix.",err,error,*999)
4951  CASE DEFAULT
4952  local_error="The distributed matrix library type of "// &
4953  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4954  CALL flagerror(local_error,err,error,*999)
4955  END SELECT
4956  ELSE
4957  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4958  ENDIF
4959  ELSE
4960  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4961  ENDIF
4962 
4963  exits("DISTRIBUTED_MATRIX_VALUES_SET_SP1")
4964  RETURN
4965 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_SET_SP1",err,error)
4966  RETURN 1
4967  END SUBROUTINE distributed_matrix_values_set_sp1
4968 
4969  !
4970  !================================================================================================================================
4971  !
4972 
4974  SUBROUTINE distributed_matrix_values_set_sp2(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
4976  !Argument variables
4977  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4978  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
4979  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
4980  REAL(SP), INTENT(IN) :: VALUES(:,:)
4981  INTEGER(INTG), INTENT(OUT) :: ERR
4982  TYPE(varying_string), INTENT(OUT) :: ERROR
4983  !Local Variables
4984  TYPE(varying_string) :: LOCAL_ERROR
4985 
4986  enters("DISTRIBUTED_MATRIX_VALUES_SET_SP2",err,error,*999)
4987 
4988  IF(ASSOCIATED(distributed_matrix)) THEN
4989  IF(distributed_matrix%MATRIX_FINISHED) THEN
4990  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4992  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4993  CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4994  ELSE
4995  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4996  ENDIF
4998  CALL flagerror("Cannot get values for a single precision PETSc distributed matrix.",err,error,*999)
4999  CASE DEFAULT
5000  local_error="The distributed matrix library type of "// &
5001  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
5002  CALL flagerror(local_error,err,error,*999)
5003  END SELECT
5004  ELSE
5005  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
5006  ENDIF
5007  ELSE
5008  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
5009  ENDIF
5010 
5011  exits("DISTRIBUTED_MATRIX_VALUES_SET_SP2")
5012  RETURN
5013 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_SET_SP2",err,error)
5014  RETURN 1
5015  END SUBROUTINE distributed_matrix_values_set_sp2
5016 
5017  !
5018  !================================================================================================================================
5019  !
5020 
5022  SUBROUTINE distributed_matrix_values_set_dp(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
5024  !Argument variables
5025  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
5026  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
5027  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
5028  REAL(DP), INTENT(IN) :: VALUES(:)
5029  INTEGER(INTG), INTENT(OUT) :: ERR
5030  TYPE(varying_string), INTENT(OUT) :: ERROR
5031  !Local Variables
5032  INTEGER(INTG) :: i
5033  TYPE(varying_string) :: LOCAL_ERROR
5034 
5035  enters("DISTRIBUTED_MATRIX_VALUES_SET_DP",err,error,*999)
5036 
5037  IF(ASSOCIATED(distributed_matrix)) THEN
5038  IF(distributed_matrix%MATRIX_FINISHED) THEN
5039  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
5041  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
5042  CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
5043  ELSE
5044  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
5045  ENDIF
5047  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
5048  IF(SIZE(row_indices,1)==SIZE(values,1)) THEN
5049  IF(SIZE(column_indices,1)==SIZE(values,1)) THEN
5050  IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX) THEN
5051  DO i=1,SIZE(row_indices,1)
5052  CALL petsc_matsetvalues(distributed_matrix%PETSC%OVERRIDE_MATRIX,1,distributed_matrix%PETSC% &
5053  & global_row_numbers(row_indices(i:i)),1,column_indices(i:i)-1,values(i:i),petsc_insert_values, &
5054  & err,error,*999) !0 based indices
5055  ENDDO !i
5056  ELSE
5057  DO i=1,SIZE(row_indices,1)
5058  CALL petsc_matsetvalues(distributed_matrix%PETSC%MATRIX,1,distributed_matrix%PETSC%GLOBAL_ROW_NUMBERS( &
5059  & row_indices(i:i)),1,column_indices(i:i)-1,values(i:i),petsc_insert_values,err,error,*999) !0 based indices
5060  ENDDO !i
5061  ENDIF
5062  ELSE
5063  local_error="The size of the column indices array ("// &
5064  & trim(numbertovstring(SIZE(column_indices,1),"*",err,error))// &
5065  & ") does not conform to the size of the values array ("// &
5066  & trim(numbertovstring(SIZE(values,1),"*",err,error))//")."
5067  CALL flagerror(local_error,err,error,*999)
5068  ENDIF
5069  ELSE
5070  local_error="The size of the row indices array ("// &
5071  & trim(numbertovstring(SIZE(row_indices,1),"*",err,error))// &
5072  & ") does not conform to the size of the values array ("// &
5073  & trim(numbertovstring(SIZE(values,1),"*",err,error))//")."
5074  CALL flagerror(local_error,err,error,*999)
5075  ENDIF
5076  ELSE
5077  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
5078  ENDIF
5079  CASE DEFAULT
5080  local_error="The distributed matrix library type of "// &
5081  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
5082  CALL flagerror(local_error,err,error,*999)
5083  END SELECT
5084  ELSE
5085  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
5086  ENDIF
5087  ELSE
5088  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
5089  ENDIF
5090 
5091  exits("DISTRIBUTED_MATRIX_VALUES_SET_DP")
5092  RETURN
5093 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_SET_DP",err,error)
5094  RETURN 1
5095  END SUBROUTINE distributed_matrix_values_set_dp
5096 
5097  !
5098  !================================================================================================================================
5099  !
5100 
5102  SUBROUTINE distributed_matrix_values_set_dp1(DISTRIBUTED_MATRIX,ROW_INDEX,COLUMN_INDEX,VALUE,ERR,ERROR,*)
5104  !Argument variables
5105  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
5106  INTEGER(INTG)